This documentation is automatically generated by online-judge-tools/verification-helper
#include "GraphTheory/HeavyLightDecomposition_Query.hpp"
頂点数 $N$ の木に対し、重軽分解 ( HL 分解) と セグメント木 を用いて、$1$ 点更新 $\Theta(\log{N})$ 、パスクエリを $\mathcal{O}((\log{N})^2)$ 、部分木クエリを $\Theta(\log{N})$ で処理します。
1 点更新パス Fold でも同様な処理ができます。$\log$ が 1 つ少ないのでより高速な(はず)です。
詳しくは使用例
をご覧ください。
HeavyLightDecomposition_Query(const Graph & g, bool VERTEX, const T & id_elem, const F & f)
g
で初期化HeavyLightDecomposition_Query(const Graph & g, bool VERTEX, int root, const T & id_elem, const F & f)
root
の根付き木 g
で初期化HeavyLightDecomposition_Query(const Graph & g, std::vector<U> & v, const T & id_elem, const F & f)
g
と初期値 v
で初期化HeavyLightDecomposition_Query(const Graph & g, int root, std::vector<U> & v, const T & id_elem, const F & f)
root
の根付き木 g
と初期値 v
で初期化const HLD & get_hld()
void set(int v, const T & x)
T get(int v)
void set(int u, int v, const T & x)
T get(int u, int v)
T fold(int u, int v)
T subtree_sum(int v)
以下、グラフの頂点数を $N$ とします。
F
は二項演算 std::function<T (const T &, const T &)>
の略記です。
制約
id_elem
$)$ はモノイドVERTEX
$= true$VERTEX
$= false$グラフ g
で初期化します。
はじめ、すべての値は単位元です。
制約
g
は木計算量
根 root
の根付き木 g
で初期化します。
はじめ、すべての値は単位元です。
制約
g
は root
を根とした根付き木または木計算量
グラフ g
と初期値 v
で初期化します。
はじめ、頂点 $i$ の値は $v[i]$ です。
制約
g
は木v
$| = N$計算量
根 root
の根付き木 g
と初期値 v
で初期化します。
はじめ、頂点 $i$ の値は $v[i]$ です。
制約
g
は root
を根とした根付き木または木v
$| = N$計算量
内部で保持している HLD を返します。
計算量
頂点 $v$ に値 $x$ をセットします。
制約
VERTEX
$= true$計算量
頂点 $v$ の値を返します。
制約
VERTEX
$= true$計算量
辺 $u-v$ に値 $x$ をセットします。
制約
VERTEX
$= false$計算量
辺 $u-v$ の値を返します。
制約
VERTEX
$= false$計算量
頂点 $u$ から頂点 $v$ へのパス上の頂点または辺を順にその値を並べたものを $a_1, a_2, \ldots, a_k$ として、$f(a_1, f(a_2, f(\ldots, f(a_{k-2}, a_{k-1}))\ldots)$ を返します。
制約
計算量
頂点 $u$ の部分木に含まれる頂点または辺の値をそれぞれ $a_1, a_2, \ldots, a_k$ として、$f(a_1, a_2, \ldots, a_{k-1})$ を返します。
制約
id_elem
$)$ は可換モノイド計算量
頂点に値を持つ例です。
#include <bits/stdc++.h>
#include "GraphTheory/HeavyLightDecomposition_Query.hpp"
using namespace std;
int main() {
using HLD = HeavyLightDecomposition_Query<int>;
HLD::Graph g(6);
// 1(10)
// 2(100) 3(1000)
// 0(1) 4(10000) 5(100000)
g[1].emplace_back(2);
g[1].emplace_back(3);
g[2].emplace_back(0);
g[2].emplace_back(4);
g[3].emplace_back(5);
g[5].emplace_back(3); // 逆辺があっても良い
vector<int> A{1, 10, 100, 1000, 10000, 100000};
HLD hld(g, 1, A, 0, [](auto x, auto y) { return x + y; });
// 1 -> 3 パス上の頂点 [1, 3] の総和
cout << "fold(1, 3) = " << hld.fold(1, 3) << endl; // 1010
// 4 -> 5 パス上の頂点 [4, 2, 1, 3, 5] の総和
cout << "fold(4, 5) = " << hld.fold(4, 5) << endl; // 111110
// 3 -> 3 パス上の頂点 [3] の総和
cout << "fold(3, 3) = " << hld.fold(3, 3) << endl; // 1000
// 頂点 1 の部分木の頂点 [0, 1, 2, 3, 4, 5] の値の総和
cout << "subtree_sum(1) = " << hld.subtree_sum(1) << endl; // 111111
// 頂点 2 の部分木の頂点 [0, 2, 4] の値の総和
cout << "subtree_sum(2) = " << hld.subtree_sum(2) << endl; // 10101
// 頂点 5 の部分木の頂点 [5] の値の総和
cout << "subtree_sum(5) = " << hld.subtree_sum(5) << endl; // 100000
}
辺に値を持つ例です。
#include <bits/stdc++.h>
#include "GraphTheory/HeavyLightDecomposition_Query.hpp"
using namespace std;
int main() {
using HLD = HeavyLightDecomposition_Query<int>;
HLD::Graph g(6);
// 辺の値は葉側の頂点に書いてある
// 1
// 2(100) 3(1000)
// 0(1) 4(10000) 5(100000)
g[1].emplace_back(2);
g[1].emplace_back(3);
g[2].emplace_back(0);
g[2].emplace_back(4);
g[3].emplace_back(5);
g[5].emplace_back(3); // 逆辺があっても良い
HLD hld(g, false, 1, 0, [](auto x, auto y) { return x + y; });
hld.set(1, 2, 100);
hld.set(1, 3, 1000);
hld.set(2, 0, 1);
hld.set(2, 4, 10000);
hld.set(3, 5, 100000);
// 1 -> 3 パス上の辺 [1-3] の総和
cout << "fold(1, 3) = " << hld.fold(1, 3) << endl; // 1000
// 4 -> 5 パス上の辺 [4-2, 2-1, 1-3, 3-5] の総和
cout << "fold(4, 5) = " << hld.fold(4, 5) << endl; // 111100
// 3 -> 3 パス上の辺 [] の総和
cout << "fold(3, 3) = " << hld.fold(3, 3) << endl; // 0
// 頂点 1 の部分木の辺 [1-2, 2-0, 2-4, 1-3, 3-5] の値の総和
cout << "subtree_sum(1) = " << hld.subtree_sum(1) << endl; // 111101
// 頂点 2 の部分木の辺 [2-1, 2-4] の値の総和
cout << "subtree_sum(2) = " << hld.subtree_sum(2) << endl; // 10001
// 頂点 5 の部分木の辺 [] の値の総和
cout << "subtree_sum(5) = " << hld.subtree_sum(5) << endl; // 0
}
2020/04/19: https://qiita.com/ageprocpp/items/8dfe768218da83314989
2020/04/19: https://math314.hateblo.jp/entry/2014/06/24/220107
2020/09/18: https://codeforces.com/blog/entry/53170
#ifndef INCLUDE_GUARD_HEAVY_LIGHT_DECOMPOSITION_PATH_QUERY_HPP
#define INCLUDE_GUARD_HEAVY_LIGHT_DECOMPOSITION_PATH_QUERY_HPP
#include "DataStructure/SegmentTree.hpp"
#include "GraphTheory/HeavyLightDecomposition.hpp"
#include <vector>
#include <cassert>
#include <functional>
#include <algorithm>
/**
* @brief https://tkmst201.github.io/Library/GraphTheory/HeavyLightDecomposition_Query.hpp
*/
template<typename T>
struct HeavyLightDecomposition_Query {
using value_type = T;
using const_reference = const value_type &;
using seg_type = SegmentTree<value_type>;
using hld_type = HeavyLightDecomposition;
using size_type = std::size_t;
using Graph = typename hld_type::Graph;
using F = std::function<value_type (const_reference, const_reference)>;
private:
bool VERTEX;
value_type id_elem;
F f;
hld_type hld;
seg_type seg, rseg;
public:
HeavyLightDecomposition_Query(const Graph & g, bool VERTEX, const value_type & id_elem, const F & f)
: HeavyLightDecomposition_Query(g, VERTEX, -1, id_elem, f) {}
HeavyLightDecomposition_Query(const Graph & g, bool VERTEX, int root, const value_type & id_elem, const F & f)
: VERTEX(VERTEX), id_elem(id_elem), f(f), hld(g, root, false) {
seg = seg_type{static_cast<size_type>(hld.size()), id_elem, f};
rseg = seg_type{static_cast<size_type>(hld.size()), id_elem, f};
}
template<typename U>
HeavyLightDecomposition_Query(const Graph & g, const std::vector<U> & v, const value_type & id_elem, const F & f)
: HeavyLightDecomposition_Query(g, -1, v, id_elem, f) {}
template<typename U>
HeavyLightDecomposition_Query(const Graph & g, int root, const std::vector<U> & v, const value_type & id_elem, const F & f)
: VERTEX(true), id_elem(id_elem), f(f), hld(g, root, false) {
assert(hld.size() == v.size());
std::vector<value_type> init(hld.size());
for (int i = 0; i < hld.size(); ++i) init[hld.in(i)] = v[i];
seg = seg_type{init, id_elem, f};
for (int i = 0; i < hld.heavy_size(); ++i) {
const int offset = hld.in(hld.head(i));
std::reverse(begin(init) + offset, begin(init) + offset + hld.heavy_size(i));
}
rseg = seg_type{init, id_elem, f};
}
const hld_type & get_hld() const noexcept {
return get_hld;
}
void set(int v, const_reference x) noexcept {
assert(VERTEX);
assert(0 <= v && v < hld.size());
set_(v, x);
}
value_type get(int v) const noexcept {
assert(VERTEX);
assert(0 <= v && v < hld.size());
return get_(v);
}
void set(int u, int v, const_reference x) noexcept {
assert(!VERTEX);
assert(0 <= u && u < hld.size());
assert(0 <= v && v < hld.size());
assert(hld.par(u) == v || hld.par(v) == u);
set_(hld.par(u) == v ? u : v, x);
}
value_type get(int u, int v) const noexcept {
assert(!VERTEX);
assert(0 <= u && u < hld.size());
assert(0 <= v && v < hld.size());
assert(hld.par(u) == v || hld.par(v) == u);
return get_(hld.par(u) == v ? u : v);
}
private:
void set_(int v, const_reference x) noexcept {
seg.set(hld.in(v), x);
rseg.set(reverse_idx(v), x);
}
value_type get_(int v) const noexcept {
return seg.get(hld.in(v));
}
int reverse_idx(int v) const noexcept {
const int id = hld.heavy(v);
return (hld.in(hld.head(id)) << 1) + hld.heavy_size(id) - hld.in(v) - 1;
}
public:
value_type fold(int u, int v) const noexcept {
assert(0 <= u && u < hld.size());
assert(0 <= v && v < hld.size());
assert(hld.tree_id(u) == hld.tree_id(v));
value_type lv = id_elem, rv = id_elem;
while (hld.heavy(u) != hld.heavy(v)) {
if (hld.heavy_depth(hld.heavy(u)) >= hld.heavy_depth(hld.heavy(v))) {
const int head = hld.head(hld.heavy(u));
lv = f(lv, rseg.fold(reverse_idx(u), reverse_idx(head) + 1));
u = hld.par(head);
}
if (hld.heavy(u) == hld.heavy(v)) break;
if (hld.heavy_depth(hld.heavy(u)) <= hld.heavy_depth(hld.heavy(v))) {
const int head = hld.head(hld.heavy(v));
rv = f(seg.fold(hld.in(head), hld.in(v) + 1), rv);
v = hld.par(head);
}
}
if (hld.in(u) < hld.in(v)) rv = f(seg.fold(hld.in(u) + !VERTEX, hld.in(v) + 1), rv);
else lv = f(lv, rseg.fold(reverse_idx(u), reverse_idx(v) + VERTEX));
return f(lv, rv);
}
value_type subtree_sum(int v) const noexcept {
assert(0 <= v && v < hld.size());
return seg.fold(hld.in(v) + !VERTEX, hld.out(v));
}
};
#endif // INCLUDE_GUARD_HEAVY_LIGHT_DECOMPOSITION_PATH_QUERY_HPP
#line 1 "GraphTheory/HeavyLightDecomposition_Query.hpp"
#line 1 "DataStructure/SegmentTree.hpp"
#include <vector>
#include <algorithm>
#include <cassert>
#include <functional>
/**
* @brief https://tkmst201.github.io/Library/DataStructure/SegmentTree.hpp
*/
template<typename T>
struct SegmentTree {
using value_type = T;
using const_reference = const value_type &;
using F = std::function<value_type (const_reference, const_reference)>;
using size_type = std::size_t;
private:
size_type n, n_;
value_type id_elem;
F f;
std::vector<value_type> node;
public:
SegmentTree() = default;
SegmentTree(size_type n, const_reference id_elem, const F & f)
: n(n), id_elem(id_elem), f(f) {
n_ = 1;
while (n_ < n) n_ <<= 1;
node.assign(2 * n_, id_elem);
}
SegmentTree(const std::vector<value_type> & v, const_reference id_elem, const F & f)
: SegmentTree(v.size(), id_elem, f) {
for (size_type i = 0; i < v.size(); ++i) node[i + n_] = v[i];
for (size_type i = n_ - 1; i > 0; --i) node[i] = f(node[i << 1], node[i << 1 | 1]);
}
size_type size() const noexcept {
return n;
}
void set(size_type i, const_reference x) noexcept {
assert(i < size());
node[i += n_] = x;
while (i > 1) {
i >>= 1;
node[i] = f(node[i << 1], node[i << 1 | 1]);
}
}
const_reference get(size_type i) const noexcept {
assert(i < size());
return node[i + n_];
}
value_type fold(size_type l, size_type r) const noexcept {
assert(l <= r);
assert(r <= size());
value_type lv = id_elem, rv = id_elem;
for (l += n_, r += n_; l < r; l >>= 1, r >>= 1) {
if (l & 1) lv = f(lv, node[l++]);
if (r & 1) rv = f(node[r - 1], rv);
}
return f(lv, rv);
}
value_type fold_all() const noexcept {
return node[1];
}
size_type max_right(size_type l, std::function<bool (const_reference)> g) const noexcept {
assert(l <= size());
assert(g(id_elem));
if (l == size()) return size();
l += n_;
value_type sum = id_elem;
while (true) {
while (~l & 1) l >>= 1;
const value_type nex_sum = f(sum, node[l]);
if (g(nex_sum)) { sum = nex_sum; ++l; }
else break;
if ((l & -l) == l) return size();
}
while (l < n_) {
const value_type nex_sum = f(sum, node[l << 1]);
l <<= 1;
if (g(nex_sum)) { sum = nex_sum; l |= 1; }
}
return l - n_;
}
size_type min_left(size_type r, std::function<bool (const_reference)> g) const noexcept {
assert(r <= size());
assert(g(id_elem));
if (r == 0) return 0;
r += n_;
value_type sum = id_elem;
while (true) {
--r;
while (r > 1 && (r & 1)) r >>= 1;
const value_type nex_sum = f(node[r], sum);
if (g(nex_sum)) sum = nex_sum;
else break;
if ((r & -r) == r) return 0;
}
while (r < n_) {
const value_type nex_sum = f(node[r << 1 | 1], sum);
r <<= 1;
if (!g(nex_sum)) r |= 1;
else sum = nex_sum;
}
return r + 1 - n_;
}
};
#line 1 "GraphTheory/HeavyLightDecomposition.hpp"
#line 6 "GraphTheory/HeavyLightDecomposition.hpp"
#include <stack>
#include <utility>
#line 9 "GraphTheory/HeavyLightDecomposition.hpp"
/**
* @brief https://tkmst201.github.io/Library/GraphTheory/HeavyLightDecomposition.hpp
*/
struct HeavyLightDecomposition {
using Graph = std::vector<std::vector<int>>;
private:
int n;
std::vector<int> par_;
std::vector<int> heavy_, head_, heavy_size_, heavy_depth_;
std::vector<int> tree_id_, roots_;
std::vector<int> in_, out_;
std::vector<std::vector<int>> par_dblng_;
bool LCA;
public:
HeavyLightDecomposition(const Graph & g, bool LCA = false)
: HeavyLightDecomposition(g, -1, LCA) {}
HeavyLightDecomposition(const Graph & g, int root, bool LCA)
: n(g.size()), par_(n, -1), heavy_(n, -1), tree_id_(n, -1), in_(n, -1), out_(n, -1), LCA(LCA) {
std::vector<int> sub_size(n, 0), next(n, -1);
for (int i = 0; i < n; ++i) {
if (tree_id_[i] != -1) continue;
if (root != -1 && i != root) continue;
tree_id_[i] = roots_.size();
std::stack<int> stk;
stk.emplace(i);
while (!stk.empty()) {
const int u = stk.top();
stk.pop();
if (sub_size[u]) {
int mx_size = 0;
for (int v : g[u]) {
if (v == par_[u]) continue;
sub_size[u] += sub_size[v];
if (mx_size < sub_size[v]) mx_size = sub_size[v], next[u] = v;
}
continue;
}
sub_size[u] = 1;
stk.emplace(u);
for (int v : g[u]) {
assert(0 <= v && v < n);
assert(v != u);
if (v == par_[u]) continue;
par_[v] = u;
tree_id_[v] = roots_.size();
stk.emplace(v);
}
}
roots_.emplace_back(i);
}
int euc = 0;
for (int r : roots_) {
heavy_[r] = head_.size();
head_.emplace_back(r);
heavy_size_.emplace_back(1);
heavy_depth_.emplace_back(0);
std::stack<std::pair<int, int>> stk;
stk.emplace(r, 0);
while (!stk.empty()) {
const auto [u, i] = stk.top();
stk.pop();
if (i < static_cast<int>(g[u].size())) {
stk.emplace(u, i + 1);
const int v = g[u][i];
if (v != par_[u] && v != next[u]) {
heavy_[v] = head_.size();
head_.emplace_back(v);
heavy_size_.emplace_back(1);
heavy_depth_.emplace_back(heavy_depth_[heavy_[u]] + 1);
stk.emplace(v, 0);
}
}
if (i == 0) {
in_[u] = euc++;
const int v = next[u];
if (v != -1) {
heavy_[v] = heavy_[u];
++heavy_size_[heavy_[u]];
stk.emplace(v, 0);
}
}
if (i == static_cast<int>(g[u].size())) out_[u] = euc;
}
}
if (!LCA) return;
int max_depth = *std::max_element(begin(heavy_depth_), end(heavy_depth_));
int lglg_n = 0;
while ((1 << lglg_n) <= max_depth) ++lglg_n;
par_dblng_.assign(lglg_n + 1, std::vector<int>(heavy_size(), -1));
for (int i = 0; i < heavy_size(); ++i) par_dblng_[0][i] = par_[head_[i]] == -1 ? head_[i] : par_[head_[i]];
for (int i = 0; i < lglg_n; ++i) {
for (int j = 0; j < heavy_size(); ++j) {
par_dblng_[i + 1][j] = par_dblng_[i][heavy_[par_dblng_[i][j]]];
}
}
}
int size() const noexcept {
return n;
}
int par(int v) const noexcept {
assert(0 <= v && v < size());
return par_[v];
}
int tree_id(int v) const noexcept {
assert(0 <= v && v < size());
return tree_id_[v];
}
int tree_cnt() const noexcept {
return roots_.size();
}
const std::vector<int> & trees() const noexcept {
return roots_;
}
int heavy(int v) const noexcept {
assert(0 <= v && v < size());
return heavy_[v];
}
int head(int k) const noexcept {
assert(0 <= k && k < heavy_size());
return head_[k];
}
int heavy_size() const noexcept {
return head_.size();
}
int heavy_size(int k) const noexcept {
assert(0 <= k && k < heavy_size());
return heavy_size_[k];
}
int heavy_depth(int k) const noexcept {
assert(0 <= k && k < heavy_size());
return heavy_depth_[k];
}
int in(int v) const noexcept {
assert(0 <= v && v < size());
return in_[v];
}
int out(int v) const noexcept {
assert(0 <= v && v < size());
return out_[v];
}
const std::vector<std::vector<int>> & par_dblng() const noexcept {
assert(LCA);
return par_dblng_;
}
std::pair<int, int> lca_heavy(int x, int y) const noexcept {
assert(LCA);
assert(0 <= x && x < size());
assert(0 <= y && y < size());
assert(tree_id_[x] == tree_id_[y]);
if (heavy_[x] == heavy_[y]) return {x, y};
const bool isswap = heavy_depth_[heavy_[x]] < heavy_depth_[heavy_[y]];
if (isswap) std::swap(x, y);
const int diff = heavy_depth_[heavy_[x]] - heavy_depth_[heavy_[y]];
for (int i = 0; i < static_cast<int>(par_dblng_.size()); ++i) if (diff >> i & 1) x = par_dblng_[i][heavy_[x]];
if (heavy_[x] == heavy_[y]) return isswap ? std::make_pair(y, x) : std::make_pair(x, y);
for (int i = par_dblng_.size() - 1; i >= 0; --i) {
const int p1 = par_dblng_[i][heavy_[x]], p2 = par_dblng_[i][heavy_[y]];
if (heavy_[p1] != heavy_[p2]) x = p1, y = p2;
}
x = par_dblng_[0][heavy_[x]];
y = par_dblng_[0][heavy_[y]];
return isswap ? std::make_pair(y, x) : std::make_pair(x, y);
}
int lca(int x, int y) {
assert(LCA);
assert(0 <= x && x < size());
assert(0 <= y && y < size());
assert(tree_id_[x] == tree_id_[y]);
const auto [a, b] = lca_heavy(x, y);
return in_[a] < in_[b] ? a : b;
}
};
#line 6 "GraphTheory/HeavyLightDecomposition_Query.hpp"
#line 11 "GraphTheory/HeavyLightDecomposition_Query.hpp"
/**
* @brief https://tkmst201.github.io/Library/GraphTheory/HeavyLightDecomposition_Query.hpp
*/
template<typename T>
struct HeavyLightDecomposition_Query {
using value_type = T;
using const_reference = const value_type &;
using seg_type = SegmentTree<value_type>;
using hld_type = HeavyLightDecomposition;
using size_type = std::size_t;
using Graph = typename hld_type::Graph;
using F = std::function<value_type (const_reference, const_reference)>;
private:
bool VERTEX;
value_type id_elem;
F f;
hld_type hld;
seg_type seg, rseg;
public:
HeavyLightDecomposition_Query(const Graph & g, bool VERTEX, const value_type & id_elem, const F & f)
: HeavyLightDecomposition_Query(g, VERTEX, -1, id_elem, f) {}
HeavyLightDecomposition_Query(const Graph & g, bool VERTEX, int root, const value_type & id_elem, const F & f)
: VERTEX(VERTEX), id_elem(id_elem), f(f), hld(g, root, false) {
seg = seg_type{static_cast<size_type>(hld.size()), id_elem, f};
rseg = seg_type{static_cast<size_type>(hld.size()), id_elem, f};
}
template<typename U>
HeavyLightDecomposition_Query(const Graph & g, const std::vector<U> & v, const value_type & id_elem, const F & f)
: HeavyLightDecomposition_Query(g, -1, v, id_elem, f) {}
template<typename U>
HeavyLightDecomposition_Query(const Graph & g, int root, const std::vector<U> & v, const value_type & id_elem, const F & f)
: VERTEX(true), id_elem(id_elem), f(f), hld(g, root, false) {
assert(hld.size() == v.size());
std::vector<value_type> init(hld.size());
for (int i = 0; i < hld.size(); ++i) init[hld.in(i)] = v[i];
seg = seg_type{init, id_elem, f};
for (int i = 0; i < hld.heavy_size(); ++i) {
const int offset = hld.in(hld.head(i));
std::reverse(begin(init) + offset, begin(init) + offset + hld.heavy_size(i));
}
rseg = seg_type{init, id_elem, f};
}
const hld_type & get_hld() const noexcept {
return get_hld;
}
void set(int v, const_reference x) noexcept {
assert(VERTEX);
assert(0 <= v && v < hld.size());
set_(v, x);
}
value_type get(int v) const noexcept {
assert(VERTEX);
assert(0 <= v && v < hld.size());
return get_(v);
}
void set(int u, int v, const_reference x) noexcept {
assert(!VERTEX);
assert(0 <= u && u < hld.size());
assert(0 <= v && v < hld.size());
assert(hld.par(u) == v || hld.par(v) == u);
set_(hld.par(u) == v ? u : v, x);
}
value_type get(int u, int v) const noexcept {
assert(!VERTEX);
assert(0 <= u && u < hld.size());
assert(0 <= v && v < hld.size());
assert(hld.par(u) == v || hld.par(v) == u);
return get_(hld.par(u) == v ? u : v);
}
private:
void set_(int v, const_reference x) noexcept {
seg.set(hld.in(v), x);
rseg.set(reverse_idx(v), x);
}
value_type get_(int v) const noexcept {
return seg.get(hld.in(v));
}
int reverse_idx(int v) const noexcept {
const int id = hld.heavy(v);
return (hld.in(hld.head(id)) << 1) + hld.heavy_size(id) - hld.in(v) - 1;
}
public:
value_type fold(int u, int v) const noexcept {
assert(0 <= u && u < hld.size());
assert(0 <= v && v < hld.size());
assert(hld.tree_id(u) == hld.tree_id(v));
value_type lv = id_elem, rv = id_elem;
while (hld.heavy(u) != hld.heavy(v)) {
if (hld.heavy_depth(hld.heavy(u)) >= hld.heavy_depth(hld.heavy(v))) {
const int head = hld.head(hld.heavy(u));
lv = f(lv, rseg.fold(reverse_idx(u), reverse_idx(head) + 1));
u = hld.par(head);
}
if (hld.heavy(u) == hld.heavy(v)) break;
if (hld.heavy_depth(hld.heavy(u)) <= hld.heavy_depth(hld.heavy(v))) {
const int head = hld.head(hld.heavy(v));
rv = f(seg.fold(hld.in(head), hld.in(v) + 1), rv);
v = hld.par(head);
}
}
if (hld.in(u) < hld.in(v)) rv = f(seg.fold(hld.in(u) + !VERTEX, hld.in(v) + 1), rv);
else lv = f(lv, rseg.fold(reverse_idx(u), reverse_idx(v) + VERTEX));
return f(lv, rv);
}
value_type subtree_sum(int v) const noexcept {
assert(0 <= v && v < hld.size());
return seg.fold(hld.in(v) + !VERTEX, hld.out(v));
}
};