In this HackerEarth Palindromic changes problem solution You are given the cost of changing M pairs of lowercase characters x to y. You are also given a string S.

You can change a character in string S to another lowercase character by spending the cost you were given earlier. Determine the minimum cost that is required to make S a palindrome.

You can assume that it is always possible to make S a palindrome.


HackerEarth Palindromic changes problem solution


HackerEarth Palindromic changes problem solution.

#include <algorithm>
#include <bitset>
#include <cassert>
#include <chrono>
#include <complex>
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <ctime>
#include <deque>
#include <functional>
#include <iomanip>
#include <iostream>
#include <iterator>
#include <limits>
#include <list>
#include <map>
#include <numeric>
#include <queue>
#include <random>
#include <ratio>
#include <set>
#include <sstream>
#include <stack>
#include <string>
#include <unordered_map>
#include <unordered_set>
#include <utility>
#include <vector>
#include <climits>
#define ll long long
#define ld long double
#define mp make_pair
#define pb push_back
#define in insert
#define vll vector<ll>
#define endl "\n"
#define pll pair<ll,ll>
#define f first
#define s second
#define FOR(i,a,b) for(int i=(a),_b=(b); i<=_b; i++)
#define int ll
#define sz(x) (ll)x.size()
#define all(x) (x.begin(),x.end())
using namespace std;


const ll INF = 1e12;
const ll N =(250002); // TODO : change value as per problem
const ll MOD = 1e9+7;
int dis[26][26];
void solve(){
string s;
cin >> s;
int m;
cin >> m;
for(int i =0;i<26;i++){
for(int j =0;j<26;j++){
dis[i][j] = INF;
}
dis[i][i] = 0;
}
for(int i =1;i<=m;i++){
char x,y;
int c;
cin >> x >> y >> c;
dis[x-'a'][y-'a'] = c;
dis[y-'a'][x-'a'] = c;
}
for (int k = 0; k < 26; ++k) {
for (int i = 0; i < 26; ++i) {
for (int j = 0; j < 26; ++j) {
if (dis[i][k] < INF && dis[k][j] < INF)
dis[i][j] = min(dis[i][j], dis[i][k] + dis[k][j]);
}
}

}

int ans =0;
int n = (int)s.size();
for(int i =0;i<n/2;i++){
int mi =INF;
for(char c = 'a';c<='z';c++){
mi = min(dis[c-'a'][s[i]-'a'] + dis[c-'a'][s[n-i-1]-'a'],mi);
}
ans += mi;
}
cout << ans << endl;

}
signed main(){

ios_base::sync_with_stdio(0);
cin.tie(NULL);
// freopen(".in","r",stdin);freopen(".out","w",stdout);

ll tt=1;
// cin >> tt;
while(tt--){
solve();
}
}

Second solution

#include <bits/stdc++.h>
using namespace std;
typedef long long ll;

const int z = 26;
int d[z][z];
int main(){
string s;
cin >> s;
memset(d, 63, sizeof d);
for(int i = 0; i < z; i++)
d[i][i] = 0;
int t;
cin >> t;
while(t--){
char v, u;
int w;
cin >> v >> u >> w;
v -= 'a', u -= 'a';
d[u][v] = d[v][u] = min(d[v][u], w);
}
for(int k = 0; k < z; k++)
for(int i = 0; i < z; i++)
for(int j = 0; j < z; j++)
d[i][j] = min(d[i][j], d[i][k] + d[k][j]);
ll ans = 0;
for(int i = 0; i < s.size() / 2; i++){
int cur = INT_MAX;
for(int c = 0; c < z; c++)
cur = min(cur, d[s[i] - 'a'][c] + d[s[s.size() - i - 1] - 'a'][c]);
assert(cur < INT_MAX);
ans += cur;
}
cout << ans << '\n';
}