tkmst201's Library

This documentation is automatically generated by online-judge-tools/verification-helper

View the Project on GitHub tkmst201/Library

:heavy_check_mark: Test/HeavyLightDecomposition_Query.SubtreeQuery.test.cpp

Depends on

Code

#define PROBLEM "https://judge.yosupo.jp/problem/vertex_add_subtree_sum"

#include "GraphTheory/HeavyLightDecomposition_Query.hpp"

#include <cstdio>
#include <vector>

int main() {
	int N, Q;
	scanf("%d %d", &N, &Q);
	
	std::vector<int> A(N);
	for (int i = 0; i < N; ++i) scanf("%d", &A[i]);
	
	using ll = long long;
	using HLD = HeavyLightDecomposition_Query<ll>;
	HLD::Graph g(N);
	for (int i = 1; i < N; ++i) {
		int p;
		scanf("%d", &p);
		g[p].emplace_back(i);
	}
	HLD hld(g, A, 0, [](ll a, ll b) { return a + b; });
	
	while (Q--) {
		int q;
		scanf("%d", &q);
		if (q == 0) {
			int u, x;
			scanf("%d %d", &u, &x);
			hld.set(u, hld.get(u) + x);
		}
		else {
			int u;
			scanf("%d", &u);
			printf("%lld\n", hld.subtree_sum(u));
		}
	}
}
#line 1 "Test/HeavyLightDecomposition_Query.SubtreeQuery.test.cpp"
#define PROBLEM "https://judge.yosupo.jp/problem/vertex_add_subtree_sum"

#line 1 "GraphTheory/HeavyLightDecomposition_Query.hpp"



#line 1 "DataStructure/SegmentTree.hpp"



#include <vector>
#include <algorithm>
#include <cassert>
#include <functional>

/**
 * @brief https://tkmst201.github.io/Library/DataStructure/SegmentTree.hpp
 */
template<typename T>
struct SegmentTree {
	using value_type = T;
	using const_reference = const value_type &;
	using F = std::function<value_type (const_reference, const_reference)>;
	using size_type = std::size_t;
	
private:
	size_type n, n_;
	value_type id_elem;
	F f;
	std::vector<value_type> node;
	
public:
	SegmentTree() = default;
	
	SegmentTree(size_type n, const_reference id_elem, const F & f)
		: n(n), id_elem(id_elem), f(f) {
		n_ = 1;
		while (n_ < n) n_ <<= 1;
		node.assign(2 * n_, id_elem);
	}
	
	SegmentTree(const std::vector<value_type> & v, const_reference id_elem, const F & f)
		: SegmentTree(v.size(), id_elem, f) {
		for (size_type i = 0; i < v.size(); ++i) node[i + n_] = v[i];
		for (size_type i = n_ - 1; i > 0; --i) node[i] = f(node[i << 1], node[i << 1 | 1]);
	}
	
	size_type size() const noexcept {
		return n;
	}
	
	void set(size_type i, const_reference x) noexcept {
		assert(i < size());
		node[i += n_] = x;
		while (i > 1) {
			i >>= 1;
			node[i] = f(node[i << 1], node[i << 1 | 1]);
		}
	}
	
	const_reference get(size_type i) const noexcept {
		assert(i < size());
		return node[i + n_];
	}
	
	value_type fold(size_type l, size_type r) const noexcept {
		assert(l <= r);
		assert(r <= size());
		value_type lv = id_elem, rv = id_elem;
		for (l += n_, r += n_; l < r; l >>= 1, r >>= 1) {
			if (l & 1) lv = f(lv, node[l++]);
			if (r & 1) rv = f(node[r - 1], rv);
		}
		return f(lv, rv);
	}
	
	value_type fold_all() const noexcept {
		return node[1];
	}
	
	size_type max_right(size_type l, std::function<bool (const_reference)> g) const noexcept {
		assert(l <= size());
		assert(g(id_elem));
		if (l == size()) return size();
		l += n_;
		value_type sum = id_elem;
		while (true) {
			while (~l & 1) l >>= 1;
			const value_type nex_sum = f(sum, node[l]);
			if (g(nex_sum)) { sum = nex_sum; ++l; }
			else break;
			if ((l & -l) == l) return size();
		}
		while (l < n_) {
			const value_type nex_sum = f(sum, node[l << 1]);
			l <<= 1;
			if (g(nex_sum)) { sum = nex_sum; l |= 1; }
		}
		return l - n_;
	}
	
	size_type min_left(size_type r, std::function<bool (const_reference)> g) const noexcept {
		assert(r <= size());
		assert(g(id_elem));
		if (r == 0) return 0;
		r += n_;
		value_type sum = id_elem;
		while (true) {
			--r;
			while (r > 1 && (r & 1)) r >>= 1;
			const value_type nex_sum = f(node[r], sum);
			if (g(nex_sum)) sum = nex_sum;
			else break;
			if ((r & -r) == r) return 0;
		}
		while (r < n_) {
			const value_type nex_sum = f(node[r << 1 | 1], sum);
			r <<= 1;
			if (!g(nex_sum)) r |= 1;
			else sum = nex_sum;
		}
		return r + 1 - n_;
	}
};


#line 1 "GraphTheory/HeavyLightDecomposition.hpp"



#line 6 "GraphTheory/HeavyLightDecomposition.hpp"
#include <stack>
#include <utility>
#line 9 "GraphTheory/HeavyLightDecomposition.hpp"

/**
 * @brief https://tkmst201.github.io/Library/GraphTheory/HeavyLightDecomposition.hpp
 */
struct HeavyLightDecomposition {
	using Graph = std::vector<std::vector<int>>;
	
private:
	int n;
	std::vector<int> par_;
	std::vector<int> heavy_, head_, heavy_size_, heavy_depth_;
	std::vector<int> tree_id_, roots_;
	std::vector<int> in_, out_;
	std::vector<std::vector<int>> par_dblng_;
	bool LCA;
	
public:
	HeavyLightDecomposition(const Graph & g, bool LCA = false)
		: HeavyLightDecomposition(g, -1, LCA) {}
	
	HeavyLightDecomposition(const Graph & g, int root, bool LCA)
		: n(g.size()), par_(n, -1), heavy_(n, -1), tree_id_(n, -1), in_(n, -1), out_(n, -1), LCA(LCA) {
		std::vector<int> sub_size(n, 0), next(n, -1);
		for (int i = 0; i < n; ++i) {
			if (tree_id_[i] != -1) continue;
			if (root != -1 && i != root) continue;
			tree_id_[i] = roots_.size();
			std::stack<int> stk;
			stk.emplace(i);
			while (!stk.empty()) {
				const int u = stk.top();
				stk.pop();
				if (sub_size[u]) {
					int mx_size = 0;
					for (int v : g[u]) {
						if (v == par_[u]) continue;
						sub_size[u] += sub_size[v];
						if (mx_size < sub_size[v]) mx_size = sub_size[v], next[u] = v;
					}
					continue;
				}
				sub_size[u] = 1;
				stk.emplace(u);
				for (int v : g[u]) {
					assert(0 <= v && v < n);
					assert(v != u);
					if (v == par_[u]) continue;
					par_[v] = u;
					tree_id_[v] = roots_.size();
					stk.emplace(v);
				}
			}
			roots_.emplace_back(i);
		}
		int euc = 0;
		for (int r : roots_) {
			heavy_[r] = head_.size();
			head_.emplace_back(r);
			heavy_size_.emplace_back(1);
			heavy_depth_.emplace_back(0);
			std::stack<std::pair<int, int>> stk;
			stk.emplace(r, 0);
			while (!stk.empty()) {
				const auto [u, i] = stk.top();
				stk.pop();
				if (i < static_cast<int>(g[u].size())) {
					stk.emplace(u, i + 1);
					const int v = g[u][i];
					if (v != par_[u] && v != next[u]) {
						heavy_[v] = head_.size();
						head_.emplace_back(v);
						heavy_size_.emplace_back(1);
						heavy_depth_.emplace_back(heavy_depth_[heavy_[u]] + 1);
						stk.emplace(v, 0);
					}
				}
				if (i == 0) {
					in_[u] = euc++;
					const int v = next[u];
					if (v != -1) {
						heavy_[v] = heavy_[u];
						++heavy_size_[heavy_[u]];
						stk.emplace(v, 0);
					}
				}
				if (i == static_cast<int>(g[u].size())) out_[u] = euc;
			}
		}
		if (!LCA) return;
		int max_depth = *std::max_element(begin(heavy_depth_), end(heavy_depth_));
		int lglg_n = 0;
		while ((1 << lglg_n) <= max_depth) ++lglg_n;
		par_dblng_.assign(lglg_n + 1, std::vector<int>(heavy_size(), -1));
		for (int i = 0; i < heavy_size(); ++i) par_dblng_[0][i] = par_[head_[i]] == -1 ? head_[i] : par_[head_[i]];
		for (int i = 0; i < lglg_n; ++i) {
			for (int j = 0; j < heavy_size(); ++j) {
				par_dblng_[i + 1][j] = par_dblng_[i][heavy_[par_dblng_[i][j]]];
			}
		}
	}
	
	int size() const noexcept {
		return n;
	}
	
	int par(int v) const noexcept {
		assert(0 <= v && v < size());
		return par_[v];
	}
	
	int tree_id(int v) const noexcept {
		assert(0 <= v && v < size());
		return tree_id_[v];
	}
	
	int tree_cnt() const noexcept {
		return roots_.size();
	}
	
	const std::vector<int> & trees() const noexcept {
		return roots_;
	}
	
	int heavy(int v) const noexcept {
		assert(0 <= v && v < size());
		return heavy_[v];
	}
	
	int head(int k) const noexcept {
		assert(0 <= k && k < heavy_size());
		return head_[k];
	}
	
	int heavy_size() const noexcept {
		return head_.size();
	}
	
	int heavy_size(int k) const noexcept {
		assert(0 <= k && k < heavy_size());
		return heavy_size_[k];
	}
	
	int heavy_depth(int k) const noexcept {
		assert(0 <= k && k < heavy_size());
		return heavy_depth_[k];
	}
	
	int in(int v) const noexcept {
		assert(0 <= v && v < size());
		return in_[v];
	}
	
	int out(int v) const noexcept {
		assert(0 <= v && v < size());
		return out_[v];
	}
	
	const std::vector<std::vector<int>> & par_dblng() const noexcept {
		assert(LCA);
		return par_dblng_;
	}
	
	std::pair<int, int> lca_heavy(int x, int y) const noexcept {
		assert(LCA);
		assert(0 <= x && x < size());
		assert(0 <= y && y < size());
		assert(tree_id_[x] == tree_id_[y]);
		if (heavy_[x] == heavy_[y]) return {x, y};
		const bool isswap = heavy_depth_[heavy_[x]] < heavy_depth_[heavy_[y]];
		if (isswap) std::swap(x, y);
		const int diff = heavy_depth_[heavy_[x]] - heavy_depth_[heavy_[y]];
		for (int i = 0; i < static_cast<int>(par_dblng_.size()); ++i) if (diff >> i & 1) x = par_dblng_[i][heavy_[x]];
		if (heavy_[x] == heavy_[y]) return isswap ? std::make_pair(y, x) : std::make_pair(x, y);
		for (int i = par_dblng_.size() - 1; i >= 0; --i) {
			const int p1 = par_dblng_[i][heavy_[x]], p2 = par_dblng_[i][heavy_[y]];
			if (heavy_[p1] != heavy_[p2]) x = p1, y = p2;
		}
		x = par_dblng_[0][heavy_[x]];
		y = par_dblng_[0][heavy_[y]];
		return isswap ? std::make_pair(y, x) : std::make_pair(x, y);
	}
	
	int lca(int x, int y) {
		assert(LCA);
		assert(0 <= x && x < size());
		assert(0 <= y && y < size());
		assert(tree_id_[x] == tree_id_[y]);
		const auto [a, b] = lca_heavy(x, y);
		return in_[a] < in_[b] ? a : b;
	}
};


#line 6 "GraphTheory/HeavyLightDecomposition_Query.hpp"

#line 11 "GraphTheory/HeavyLightDecomposition_Query.hpp"

/**
 * @brief https://tkmst201.github.io/Library/GraphTheory/HeavyLightDecomposition_Query.hpp
 */
template<typename T>
struct HeavyLightDecomposition_Query {
	using value_type = T;
	using const_reference = const value_type &;
	using seg_type = SegmentTree<value_type>;
	using hld_type = HeavyLightDecomposition;
	using size_type = std::size_t;
	using Graph = typename hld_type::Graph;
	using F = std::function<value_type (const_reference, const_reference)>;
	
private:
	bool VERTEX;
	value_type id_elem;
	F f;
	hld_type hld;
	seg_type seg, rseg;
	
public:
	HeavyLightDecomposition_Query(const Graph & g, bool VERTEX, const value_type & id_elem, const F & f)
		: HeavyLightDecomposition_Query(g, VERTEX, -1, id_elem, f) {}
	
	HeavyLightDecomposition_Query(const Graph & g, bool VERTEX, int root, const value_type & id_elem, const F & f)
		: VERTEX(VERTEX), id_elem(id_elem), f(f), hld(g, root, false) {
		seg = seg_type{static_cast<size_type>(hld.size()), id_elem, f};
		rseg = seg_type{static_cast<size_type>(hld.size()), id_elem, f};
	}
	
	template<typename U>
	HeavyLightDecomposition_Query(const Graph & g, const std::vector<U> & v, const value_type & id_elem, const F & f)
		: HeavyLightDecomposition_Query(g, -1, v, id_elem, f) {}
	
	template<typename U>
	HeavyLightDecomposition_Query(const Graph & g, int root, const std::vector<U> & v, const value_type & id_elem, const F & f)
		: VERTEX(true), id_elem(id_elem), f(f), hld(g, root, false) {
		assert(hld.size() == v.size());
		std::vector<value_type> init(hld.size());
		for (int i = 0; i < hld.size(); ++i) init[hld.in(i)] = v[i];
		seg = seg_type{init, id_elem, f};
		for (int i = 0; i < hld.heavy_size(); ++i) {
			const int offset = hld.in(hld.head(i));
			std::reverse(begin(init) + offset, begin(init) + offset + hld.heavy_size(i));
		}
		rseg = seg_type{init, id_elem, f};
	}
	
	const hld_type & get_hld() const noexcept {
		return get_hld;
	}
	
	void set(int v, const_reference x) noexcept {
		assert(VERTEX);
		assert(0 <= v && v < hld.size());
		set_(v, x);
	}
	
	value_type get(int v) const noexcept {
		assert(VERTEX);
		assert(0 <= v && v < hld.size());
		return get_(v);
	}
	
	void set(int u, int v, const_reference x) noexcept {
		assert(!VERTEX);
		assert(0 <= u && u < hld.size());
		assert(0 <= v && v < hld.size());
		assert(hld.par(u) == v || hld.par(v) == u);
		set_(hld.par(u) == v ? u : v, x);
	}
	
	value_type get(int u, int v) const noexcept {
		assert(!VERTEX);
		assert(0 <= u && u < hld.size());
		assert(0 <= v && v < hld.size());
		assert(hld.par(u) == v || hld.par(v) == u);
		return get_(hld.par(u) == v ? u : v);
	}
	
private:
	void set_(int v, const_reference x) noexcept {
		seg.set(hld.in(v), x);
		rseg.set(reverse_idx(v), x);
	}
	
	value_type get_(int v) const noexcept {
		return seg.get(hld.in(v));
	}
	
	int reverse_idx(int v) const noexcept {
		const int id = hld.heavy(v);
		return (hld.in(hld.head(id)) << 1) + hld.heavy_size(id) - hld.in(v) - 1;
	}
	
public:
	value_type fold(int u, int v) const noexcept {
		assert(0 <= u && u < hld.size());
		assert(0 <= v && v < hld.size());
		assert(hld.tree_id(u) == hld.tree_id(v));
		value_type lv = id_elem, rv = id_elem;
		while (hld.heavy(u) != hld.heavy(v)) {
			if (hld.heavy_depth(hld.heavy(u)) >= hld.heavy_depth(hld.heavy(v))) {
				const int head = hld.head(hld.heavy(u));
				lv = f(lv, rseg.fold(reverse_idx(u), reverse_idx(head) + 1));
				u = hld.par(head);
			}
			if (hld.heavy(u) == hld.heavy(v)) break;
			if (hld.heavy_depth(hld.heavy(u)) <= hld.heavy_depth(hld.heavy(v))) {
				const int head = hld.head(hld.heavy(v));
				rv = f(seg.fold(hld.in(head), hld.in(v) + 1), rv);
				v = hld.par(head);
			}
		}
		if (hld.in(u) < hld.in(v)) rv = f(seg.fold(hld.in(u) + !VERTEX, hld.in(v) + 1), rv);
		else lv = f(lv, rseg.fold(reverse_idx(u), reverse_idx(v) + VERTEX));
		return f(lv, rv);
	}
	
	value_type subtree_sum(int v) const noexcept {
		assert(0 <= v && v < hld.size());
		return seg.fold(hld.in(v) + !VERTEX, hld.out(v));
	}
};


#line 4 "Test/HeavyLightDecomposition_Query.SubtreeQuery.test.cpp"

#include <cstdio>
#line 7 "Test/HeavyLightDecomposition_Query.SubtreeQuery.test.cpp"

int main() {
	int N, Q;
	scanf("%d %d", &N, &Q);
	
	std::vector<int> A(N);
	for (int i = 0; i < N; ++i) scanf("%d", &A[i]);
	
	using ll = long long;
	using HLD = HeavyLightDecomposition_Query<ll>;
	HLD::Graph g(N);
	for (int i = 1; i < N; ++i) {
		int p;
		scanf("%d", &p);
		g[p].emplace_back(i);
	}
	HLD hld(g, A, 0, [](ll a, ll b) { return a + b; });
	
	while (Q--) {
		int q;
		scanf("%d", &q);
		if (q == 0) {
			int u, x;
			scanf("%d %d", &u, &x);
			hld.set(u, hld.get(u) + x);
		}
		else {
			int u;
			scanf("%d", &u);
			printf("%lld\n", hld.subtree_sum(u));
		}
	}
}
Back to top page