Header Ad

HackerEarth XOR in Tree problem solution

In this HackerEarth XOR in Tree problem In Graph theory, tree is a simple connected acyclic graph. You are given a tree consists of N nodes numbered from 1 to N. Each node i have a value Ai associated with it. Your task is writing a program doing Q operations, each operations is 1 of 2 following types:
  1. u, v: Assign Au to v.
  2. u, v: Calculate XOR sum of all value associated with nodes in unique single path from u to v.


HackerEarth XOR in Tree problem solution


HackerEarth XOR in Tree problem solution.

#include <bits/stdc++.h>

using namespace std;

const int MAXN = 100000 + 10;
const int MAXL = 22;

vector<int> adj[MAXN];
int a[MAXN], subtree[MAXN], pos[MAXN], path_xor[MAXN], depth[MAXN];
int par[MAXN][MAXL];
bool visited[MAXN];
int n, m, l;

void DFS(int u) {
visited[u] = true;
subtree[u] = 1;
for(int i = 0; i < adj[u].size(); i++) {
int v = adj[u][i];
if (!visited[v]) {
path_xor[v] = path_xor[u] ^ a[v]; depth[v] = depth[u] + 1;
DFS(v);
subtree[u] += subtree[v]; par[v][0] = u;
}
}
pos[u] = m; m--;
}

void init() {
scanf("%d\n", &n);
assert((1 <= n) && (n <= 100000));
for(int i = 1; i <= n; i++) {
scanf("%d", &a[i]);
assert((1 <= a[i]) && (a[i] <= (int)(1e9)));
adj[i].clear();
}
for(int i = 1; i <= n - 1; i++) {
int u, v;
scanf("%d %d\n", &u, &v);
assert((1 <= u) && (u <= n));
assert((1 <= v) && (v <= n));
adj[u].push_back(v); adj[v].push_back(u);
}
for(int i = 1; i <= n; i++) visited[i] = false;
m = n;
path_xor[1] = a[1]; depth[1] = 0;
DFS(1);
l = (int)(log(n) / log(2)) + 1;
for(int j = 1; j <= l; j++) {
for(int i = 1; i <= n; i++) par[i][j] = par[par[i][j - 1]][j - 1];
}
}

struct segment_tree {
int T[8 * MAXN];
int n;

void init(int _n) {
n = _n;
for(int i = 0; i <= 8 * n; i++) T[i] = 0;
}

void lazy_propagation(int node, int from, int to) {
if (from < to) {
T[node * 2] ^= T[node]; T[node * 2 + 1] ^= T[node];
T[node] = 0;
}
}

void update(int node, int from, int to, int l, int r, int val) {
lazy_propagation(node, from, to);
if ((from > r) || (to < l)) return;
if ((from >= l) && (to <= r)) {
T[node] ^= val;
return;
}
int mid = (from + to) / 2;
update(node * 2, from, mid, l, r, val);
update(node * 2 + 1, mid + 1, to, l, r, val);
}

void get_val(int node, int from, int to, int pos, int &result) {
lazy_propagation(node, from, to);
if ((from > pos) || (to < pos)) return;
if (from == to) {
result = T[node];
return;
}
int mid = (from + to) / 2;
get_val(node * 2, from, mid, pos, result);
get_val(node * 2 + 1, mid + 1, to, pos, result);
}

void xor_range(int l, int r, int val) {
update(1, 1, n, l, r, val);
}

int get_xor(int x) {
int res = 0;
get_val(1, 1, n, x, res);
return res;
}

} ST;

void jump(int &u, int height) {
for(int i = l; i >= 0; i--) {
if (height >= (1 << i)) {
u = par[u][i];
height -= (1 << i);
}
}
}

int lca(int u, int v) {
if (depth[u] > depth[v]) jump(u, depth[u] - depth[v]); else jump(v, depth[v] - depth[u]);
if (u == v) return(u);
for(int i = l; i >= 0; i--) {
if (par[u][i] != par[v][i]) {
u = par[u][i]; v = par[v][i];
}
}
return par[u][0];
}

int main()
{
int test_cases;
cin >> test_cases;
while (test_cases --) {
init();
int q;
scanf("%d\n", &q);
assert((1 <= q) && (q <= 100000));
ST.init(n);
for(int i = 1; i <= q; i++) {
int t, u, v;
scanf("%d %d %d\n", &t, &u, &v);
assert((1 <= t) && (t <= 2));
assert((1 <= u) && (u <= n));
if (t == 1) {
assert((1 <= v) && (v <= (int)(1e9)));
ST.xor_range(pos[u], pos[u] + subtree[u] - 1, a[u] ^ v);
a[u] = v;
}
else {
assert((1 <= v) && (v <= n));
int c = lca(u, v);
int res = path_xor[u] ^ path_xor[v] ^ ST.get_xor(pos[u]) ^ ST.get_xor(pos[v]) ^ a[c];
printf("%d\n", res);
}
}
}
}

Second solution

#include <string>
#include <vector>
#include <algorithm>
#include <numeric>
#include <set>
#include <map>
#include <queue>
#include <iostream>
#include <sstream>
#include <cstdio>
#include <cmath>
#include <ctime>
#include <cstring>
#include <cctype>
#include <cassert>
#include <limits>
#include <functional>
#define rep(i,n) for(int (i)=0;(i)<(int)(n);++(i))
#define rer(i,l,u) for(int (i)=(int)(l);(i)<=(int)(u);++(i))
#define reu(i,l,u) for(int (i)=(int)(l);(i)<(int)(u);++(i))
#if defined(_MSC_VER) || __cplusplus > 199711L
#define aut(r,v) auto r = (v)
#else
#define aut(r,v) __typeof(v) r = (v)
#endif
#define each(it,o) for(aut(it, (o).begin()); it != (o).end(); ++ it)
#define all(o) (o).begin(), (o).end()
#define pb(x) push_back(x)
#define mp(x,y) make_pair((x),(y))
#define mset(m,v) memset(m,v,sizeof(m))
#define INF 0x3f3f3f3f
#define INFL 0x3f3f3f3f3f3f3f3fLL
using namespace std;
typedef vector<int> vi; typedef pair<int,int> pii; typedef vector<pair<int,int> > vpii; typedef long long ll;
template<typename T, typename U> inline void amin(T &x, U y) { if(y < x) x = y; }
template<typename T, typename U> inline void amax(T &x, U y) { if(x < y) x = y; }

struct To {
typedef int Vertex;
Vertex to;
To() { }
To(Vertex to_): to(to_) { }
};

template<typename To_>
struct StaticGraph {
typedef To_ To;
typedef typename To::Vertex Vertex;
typedef std::pair<Vertex,To> Edge;
typedef const To *const_iterator;

void init(int n_, const std::vector<Edge> &edges) {
n = n_; int m = edges.size();
tos.resize(m+1); offsets.resize(n+1);
for(int e = 0; e < m; e ++) offsets[edges[e].first] ++;
for(int v = 1; v <= n_; v ++) offsets[v] += offsets[v-1];
for(int e = 0; e < m; e ++)
tos[-- offsets[edges[e].first]] = edges[e].second;
}
int numVertices() const { return n; }
int numEdges() const { return tos.size() - 1; }

inline const_iterator edgesBegin(int v) const { return &tos[offsets[v]]; }
inline const_iterator edgesEnd(int v) const { return &tos[offsets[v+1]]; }
private:
int n;
std::vector<To> tos;
std::vector<int> offsets;
};


class SchieberVishkinLCA {
public:
typedef StaticGraph<To> Graph;
typedef unsigned Word;
typedef Graph::Vertex Vertex;
private:

static inline Word lowestOneBit(Word v) {
return v & (~v+1);
}
static inline Word highestOneBitMask(Word v) {
v |= v >> 1;
v |= v >> 2;
v |= v >> 4;
v |= v >> 8;
v |= v >> 16;
return v >> 1;
}

std::vector<Word> indices;
std::vector<Word> maxHIndices;
std::vector<Word> ancestorHeights;
std::vector<Vertex> pathParents;
public:
void build(const Graph &g, Vertex root) {
return build(g, std::vector<Vertex>(1, root));
}
void build(const Graph &g, const std::vector<Vertex> &roots) {
int N = g.numVertices();

ancestorHeights.resize(N);
std::vector<Vertex> parents(N);
maxHIndices.resize(N);
std::vector<Vertex> vertices; vertices.reserve(N);
indices.resize(N);

Word currentIndex = 1;
for(int ri = 0; ri < (int)roots.size(); ri ++) {
Vertex root = roots[ri];
parents[root] = root;
vertices.push_back(root);
}
while(!vertices.empty()) {
Vertex v = vertices.back(); vertices.pop_back();
indices[v] = currentIndex ++;
for(const Graph::To *it = g.edgesBegin(v); it != g.edgesEnd(v); ++ it) {
parents[it->to] = v;
vertices.push_back(it->to);
}
}

int head = 0, tail = roots.size();
vertices.resize(N);
for(int ri = 0; ri < (int)roots.size(); ri ++)
vertices[ri] = roots[ri];
while(head != tail) {
Vertex v = vertices[head ++];
for(const Graph::To *it = g.edgesBegin(v); it != g.edgesEnd(v); ++ it)
vertices[tail ++] = it->to;
}

for(std::vector<Vertex>::const_iterator it = vertices.begin(); it != vertices.end(); ++ it)
maxHIndices[*it] = indices[*it];
for(std::vector<Vertex>::const_reverse_iterator it = vertices.rbegin(); it != vertices.rend(); ++ it) {
Vertex v = *it, parent = parents[v];
if(lowestOneBit(maxHIndices[parent]) < lowestOneBit(maxHIndices[v]))
maxHIndices[parent] = maxHIndices[v];
}

for(int ri = 0; ri < (int)roots.size(); ri ++) {
Vertex root = roots[ri];
ancestorHeights[root] = 0;
}
for(std::vector<Vertex>::const_iterator it = vertices.begin(); it != vertices.end(); ++ it) {
Vertex v = *it;
ancestorHeights[v] = ancestorHeights[parents[v]] | lowestOneBit(maxHIndices[v]);
}

pathParents.swap(parents);
for(int ri = 0; ri < (int)roots.size(); ri ++) {
Vertex root = roots[ri];
pathParents[indices[root]-1] = root;
}
for(std::vector<Vertex>::const_iterator it = vertices.begin(); it != vertices.end(); ++ it) {
Vertex v = *it;
for(const Graph::To *jt = g.edgesBegin(v); jt != g.edgesEnd(v); ++ jt) {
if(maxHIndices[v] != maxHIndices[jt->to])
pathParents[indices[jt->to]-1] = v;
else
pathParents[indices[jt->to]-1] = pathParents[indices[v]-1];
}
}
}

Vertex query(Vertex v, Vertex u) const {
Word Iv = maxHIndices[v], Iu = maxHIndices[u];
Word hIv = lowestOneBit(Iv), hIu = lowestOneBit(Iu);
Word hbMask = highestOneBitMask((Iv ^ Iu) | hIv | hIu);
Word j = lowestOneBit(ancestorHeights[v] & ancestorHeights[u] & ~hbMask);
Vertex x, y;
if(j == hIv) x = v;
else {
Word kMask = highestOneBitMask(ancestorHeights[v] & (j-1));
x = pathParents[(indices[v] & ~kMask | (kMask+1))-1];
}
if(j == hIu) y = u;
else {
Word kMask = highestOneBitMask(ancestorHeights[u] & (j-1));
y = pathParents[(indices[u] & ~kMask | (kMask+1))-1];
}
return indices[x] < indices[y] ? x : y;
}
};



vector<int> t_parent;
vi t_seq; vector<bool> t_sign;
vector<int> t_left, t_right;

void tree_signedeulertour(const vector<vi> &g, int root) {
int n = g.size();
t_parent.assign(n, -1);
t_left.assign(n, -1);
t_right.assign(n, -1);
t_seq.assign(n * 2, -1);
t_sign.assign(n * 2, false);
int pos = 0;

vector<int> stk; stk.push_back(root);
while(!stk.empty()) {
int i = stk.back(); stk.pop_back();
if(i < 0) {
i = -i - 1;
t_seq[pos] = i;
t_sign[pos] = true;
pos ++;
t_right[i] = pos;
continue;
}
t_left[i] = pos;
t_seq[pos] = i;
t_sign[pos] = false;
pos ++;

stk.push_back(-(i+1));
for(int j = (int)g[i].size()-1; j >= 0; j --) {
int c = g[i][j];
if(t_parent[c] == -1 && c != root)
stk.push_back(c);
else
t_parent[i] = c;
}
}
}

struct FenwickTreeXor {
typedef int T;
vector<T> v;
void init(int n) { v.assign(n, 0); }
void add(int i, T x) {
for(; i < (int)v.size(); i |= i+1) v[i] ^= x;
}
T sum(int i) const {
T r = 0;
for(-- i; i >= 0; i = (i & (i+1)) - 1) r ^= v[i];
return r;
}
T sum(int left, int right) const {
return sum(right) ^ sum(left);
}
};


int main() {
typedef SchieberVishkinLCA::Graph Graph;

int T;
scanf("%d", &T);
assert(1 <= T && T <= 10);
rep(ii, T) {
int N;
scanf("%d", &N);
assert(1 <= N && N <= 100000);
vector<int> A(N);
rep(i, N) {
scanf("%d", &A[i]);
assert(1 <= A[i] && A[i] <= 1000000000);
}
vector<vi> g(N);
rep(i, N-1) {
int u, v;
scanf("%d%d", &u, &v), -- u, -- v;
assert(0 <= u && u < N && 0 <= v && v < N);
g[u].push_back(v);
g[v].push_back(u);
}
tree_signedeulertour(g, 0);

vector<Graph::Edge> edges;
reu(i, 1, N)
edges.push_back(Graph::Edge(t_parent[i], i));
Graph gg; gg.init(N, edges);
SchieberVishkinLCA lca; lca.build(gg, 0);

FenwickTreeXor ft;
ft.init(t_seq.size());
rep(i, t_seq.size())
ft.add(i, A[t_seq[i]]);

int Q;
scanf("%d", &Q);
assert(1 <= Q && Q <= 100000);
rep(jj, Q) {
int t;
scanf("%d", &t);
assert(1 <= t && t <= 2);
if(t == 1) {
int u, v;
scanf("%d%d", &u, &v), -- u;
assert(0 <= u && u < N);
assert(1 <= v && v <= 1000000000);
ft.add(t_left[u], A[u] ^ v);
ft.add(t_right[u] - 1, A[u] ^ v);
A[u] = v;
}else if(t == 2) {
int u, v;
scanf("%d%d", &u, &v), -- u, -- v;
assert(0 <= u && u < N && 0 <= v && v < N);
int w = lca.query(u, v);
int ans = 0;
ans ^= ft.sum(t_left[w], t_left[u] + 1);
ans ^= ft.sum(t_left[w] + 1, t_left[v] + 1);
printf("%d\n", ans);
}
}
}
return 0;
}


Post a Comment

0 Comments