Skip to content

Commit bc0a740

Browse files
authored
Add Heavy Light Decomposition (rust-lang#310)
1 parent 44ecece commit bc0a740

File tree

4 files changed

+195
-0
lines changed

4 files changed

+195
-0
lines changed

DIRECTORY.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@
5454
* [Prufer Code](https://github.com/TheAlgorithms/Rust/blob/master/src/graph/prufer_code.rs)
5555
* [Lowest Common Ancestor](https://github.com/TheAlgorithms/Rust/blob/master/src/graph/lowest_common_ancestor.rs)
5656
* [Disjoint Set Union](https://github.com/TheAlgorithms/Rust/blob/master/src/graph/disjoint_set_union.rs)
57+
* [Heavy Light Decomposition](https://github.com/TheAlgorithms/Rust/blob/master/src/graph/heavy_light_decomposition.rs)
5758
* [Tarjan's Strongly Connected Components](https://github.com/TheAlgorithms/Rust/blob/master/src/graph/strongly_connected_components.rs)
5859
* [Lib](https://github.com/TheAlgorithms/Rust/blob/master/src/lib.rs)
5960
* Math

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ These are for demonstration purposes only.
3737
- [x] [Bellman-Ford](./src/graph/bellman_ford.rs)
3838
- [x] [Prufer Code](./src/graph/prufer_code.rs)
3939
- [x] [Lowest Common Ancestor](./src/graph/lowest_common_ancestor.rs)
40+
- [x] [Heavy Light Decomposition](./src/graph/heavy_light_decomposition.rs)
4041
- [x] [Tarjan's Strongly Connected Components](./src/graph/strongly_connected_components.rs)
4142
- [x] [Topological sorting](./src/graph/topological_sort.rs)
4243

Lines changed: 191 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,191 @@
1+
/*
2+
Heavy Light Decomposition:
3+
It partitions a tree into disjoint paths such that:
4+
1. Each path is a part of some leaf's path to root
5+
2. The number of paths from any vertex to the root is of O(lg(n))
6+
Such a decomposition can be used to answer many types of queries about vertices
7+
or edges on a particular path. It is often used with some sort of binary tree
8+
to handle different operations on the paths, for example segment tree or
9+
fenwick tree.
10+
11+
Many members of this struct are made public, because they can either be
12+
supplied by the developer, or can be useful for other parts of the code.
13+
14+
The implementation assumes that the tree vertices are numbered from 1 to n
15+
and it is represented using (compressed) adjacency matrix. If this is not true,
16+
maybe `graph_enumeration.rs` can help.
17+
*/
18+
19+
type Adj = [Vec<usize>];
20+
21+
pub struct HeavyLightDecomposition {
22+
// Each vertex is assigned a number from 1 to n. For `v` and `u` such that
23+
// u is parent of v, and both are in path `p`, it is true that:
24+
// position[u] = position[v] - 1
25+
pub position: Vec<usize>,
26+
27+
// The first (closest to root) vertex of the path containing each vertex
28+
pub head: Vec<usize>,
29+
30+
// The "heaviest" child of each vertex, its subtree is at least as big as
31+
// the other ones. If `v` is a leaf, big_child[v] = 0
32+
pub big_child: Vec<usize>,
33+
34+
// Used internally to fill `position` Vec
35+
current_position: usize,
36+
}
37+
38+
impl HeavyLightDecomposition {
39+
pub fn new(mut num_vertices: usize) -> Self {
40+
num_vertices += 1;
41+
HeavyLightDecomposition {
42+
position: vec![0; num_vertices],
43+
head: vec![0; num_vertices],
44+
big_child: vec![0; num_vertices],
45+
current_position: 1,
46+
}
47+
}
48+
fn dfs(&mut self, v: usize, parent: usize, adj: &Adj) -> usize {
49+
let mut big_child = 0usize;
50+
let mut bc_size = 0usize; // big child size
51+
let mut subtree_size = 1usize; // size of this subtree
52+
for &u in adj[v].iter() {
53+
if u == parent {
54+
continue;
55+
}
56+
let u_size = self.dfs(u, v, adj);
57+
subtree_size += u_size;
58+
if u_size > bc_size {
59+
big_child = u;
60+
bc_size = u_size;
61+
}
62+
}
63+
self.big_child[v] = big_child;
64+
subtree_size
65+
}
66+
pub fn decompose(&mut self, root: usize, adj: &Adj) {
67+
self.current_position = 1;
68+
self.dfs(root, 0, adj);
69+
self.decompose_path(root, 0, root, adj);
70+
}
71+
fn decompose_path(&mut self, v: usize, parent: usize, head: usize, adj: &Adj) {
72+
self.head[v] = head;
73+
self.position[v] = self.current_position;
74+
self.current_position += 1;
75+
let bc = self.big_child[v];
76+
if bc != 0 {
77+
// Continue this path
78+
self.decompose_path(bc, v, head, adj);
79+
}
80+
for &u in adj[v].iter() {
81+
if u == parent || u == bc {
82+
continue;
83+
}
84+
// Start a new path
85+
self.decompose_path(u, v, u, adj);
86+
}
87+
}
88+
}
89+
90+
#[cfg(test)]
91+
mod tests {
92+
use super::*;
93+
94+
struct LinearCongruenceGenerator {
95+
// modulus as 2 ^ 32
96+
multiplier: u32,
97+
increment: u32,
98+
state: u32,
99+
}
100+
101+
impl LinearCongruenceGenerator {
102+
fn new(multiplier: u32, increment: u32, state: u32) -> Self {
103+
Self {
104+
multiplier,
105+
increment,
106+
state,
107+
}
108+
}
109+
fn next(&mut self) -> u32 {
110+
self.state =
111+
(self.multiplier as u64 * self.state as u64 + self.increment as u64) as u32;
112+
self.state
113+
}
114+
}
115+
116+
fn get_num_paths(
117+
hld: &HeavyLightDecomposition,
118+
mut v: usize,
119+
parent: &[usize],
120+
) -> (usize, usize) {
121+
// Return height and number of paths
122+
let mut ans = 0usize;
123+
let mut height = 0usize;
124+
let mut prev_head = 0usize;
125+
loop {
126+
height += 1;
127+
let head = hld.head[v];
128+
if head != prev_head {
129+
ans += 1;
130+
prev_head = head;
131+
}
132+
v = parent[v];
133+
if v == 0 {
134+
break;
135+
}
136+
}
137+
(ans, height)
138+
}
139+
140+
#[test]
141+
fn single_path() {
142+
let mut adj = vec![vec![], vec![2], vec![3], vec![4], vec![5], vec![6], vec![]];
143+
let mut hld = HeavyLightDecomposition::new(6);
144+
hld.decompose(1, &adj);
145+
assert_eq!(hld.head, vec![0, 1, 1, 1, 1, 1, 1]);
146+
assert_eq!(hld.position, vec![0, 1, 2, 3, 4, 5, 6]);
147+
assert_eq!(hld.big_child, vec![0, 2, 3, 4, 5, 6, 0]);
148+
149+
adj[3].push(2);
150+
adj[2].push(1);
151+
hld.decompose(3, &adj);
152+
assert_eq!(hld.head, vec![0, 2, 2, 3, 3, 3, 3]);
153+
assert_eq!(hld.position, vec![0, 6, 5, 1, 2, 3, 4]);
154+
assert_eq!(hld.big_child, vec![0, 0, 1, 4, 5, 6, 0]);
155+
}
156+
157+
#[test]
158+
fn random_tree() {
159+
// Let it have 1e4 vertices. It should finish under 100ms even with
160+
// 1e5 vertices
161+
let n = 1e4 as usize;
162+
let threshold = 14; // 2 ^ 14 = 16384 > n
163+
let mut adj: Vec<Vec<usize>> = vec![vec![]; n + 1];
164+
let mut parent: Vec<usize> = vec![0; n + 1];
165+
let mut hld = HeavyLightDecomposition::new(n);
166+
let mut lcg = LinearCongruenceGenerator::new(1103515245, 12345, 314);
167+
parent[2] = 1;
168+
adj[1].push(2);
169+
for i in 3..=n {
170+
// randomly determine the parent of each vertex.
171+
// There will be modulus bias, but it isn't important
172+
let par_max = i - 1;
173+
let par_min = (10 * par_max + 1) / 11;
174+
// Bring par_min closer to par_max to increase expected tree height
175+
let par = (lcg.next() as usize % (par_max - par_min + 1)) + par_min;
176+
adj[par].push(i);
177+
parent[i] = par;
178+
}
179+
// let's get a few leaves
180+
let leaves: Vec<usize> = (1..=n)
181+
.rev()
182+
.filter(|&v| adj[v].is_empty())
183+
.take(100)
184+
.collect();
185+
hld.decompose(1, &adj);
186+
for l in leaves {
187+
let (p, _h) = get_num_paths(&hld, l, &parent);
188+
assert!(p <= threshold);
189+
}
190+
}
191+
}

src/graph/mod.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ mod depth_first_search_tic_tac_toe;
55
mod dijkstra;
66
mod disjoint_set_union;
77
mod graph_enumeration;
8+
mod heavy_light_decomposition;
89
mod lowest_common_ancestor;
910
mod minimum_spanning_tree;
1011
mod prim;
@@ -19,6 +20,7 @@ pub use self::depth_first_search_tic_tac_toe::minimax;
1920
pub use self::dijkstra::dijkstra;
2021
pub use self::disjoint_set_union::DisjointSetUnion;
2122
pub use self::graph_enumeration::enumerate_graph;
23+
pub use self::heavy_light_decomposition::HeavyLightDecomposition;
2224
pub use self::lowest_common_ancestor::{LowestCommonAncestorOffline, LowestCommonAncestorOnline};
2325
pub use self::minimum_spanning_tree::kruskal;
2426
pub use self::prim::{prim, prim_with_start};

0 commit comments

Comments
 (0)