树链剖分,英文名字为heavy path decomposition或heavy-light decomposition。它并不是一个复杂的算法或者数据结构,而是一种对树进行分解的技术。他把树分解为为多个不相交的顶点的集合,将一棵树的问题拆成不超过O(logn)个链的问题来处理。换句话说,树链剖分只是各种数据结构或算法在树上的推广。诸如,求树上的两个点之间的路径上的权值的最大值、最小值、和等问题均可以使用树链剖分来处理。
树链剖分的第一步当然是对树进行轻重边的划分。定义size[x]为以x为根的子树的节点个数。
重儿子:size[v]为u的子节点中size值最大的,那么v就是u的重儿子。
轻儿子:v的其它子节点。
重边:点v与其重儿子的连边。
轻边:点v与其轻儿子的连边。
重链:由重边连成的路径。
轻链:由轻边连成的路径。
当然,关于这个它有两个重要的性质:
(a)轻边(u,v)中,size[v]*2 <= size[u]
(b)从根到某一点的路径上,不超过logn条轻边和不超过logn条重边。
CodeChef201609问题JTREE
(1)问题描述:
(2)要点:
(3)代码:
unsigned long long slove_fast(const vector< vector >& tickets,const unsigned int* parent,size_t size,unsigned int index,unsigned int ticket)
{
static bool init = false;
static vector quick_results;
if(!init)
{
CTreeAncestor ta; // O(sqrt(n))求出index的offset代祖宗
ta.init(parent,size,1);
unsigned int *out = new unsigned int[size];
size_t outsize = CTreeUtil::from_top_to_bottom(parent,out,size,1); // 从有根树的宽度优先遍历
typedef vhld_segtree_t< paqmin_value_t,paqmin_value_t > CHLDSegTree;
CHLDSegTree st;st.init(parent,size,1);
quick_results.resize(size+1,(unsigned long long)(-1));
quick_results[1] = 0;
st.update(1,0);
for(size_t i = 1;i < outsize;++i)
{
size_t u = out[i];
unsigned long long mincost = 100000000000000000LL;
for(size_t j = 0;j < tickets[u].size();++j)
{
size_t ancestor = ta.ancestor(u,tickets[u][j].ticket);
if(0 == ancestor) ancestor = 1;
unsigned long long ret = st.query(parent[u],ancestor).minv;
unsigned long long minv = ret;
minv += tickets[u][j].cost;
if(minv < mincost) mincost = minv;
}
quick_results[u] = mincost;
st.update(u,mincost);
}
delete[] out;
init = true;
}
return quick_results[index];
}
//
// 显然,对于i < k,有f(n,i) >= f(n,k)
//
// f(n,0) = c(n) + min{ f(n-1,tn-1),f(n-2,tn-2),...,f(n-tn,0) }
// f(n,i) = min{ f(n,0),f(n-1,i-1) } <= f(n,0)
// 因此有,f(n,i) <= f(n-1,i-1) => f(n,0) = c(n) + f(n-1,tn-1)
//
// 可以进一步推导出;
// 若i >= n,则f(n,i) = min { f(n,0),f(n-1,0),f(n-2,i-2) } = ... = min { f(n,0),f(n-1,0),...f(1,0),f(0,i-n) }
// 若i < n,f(n,i) = min { f(n,0),f(n-1,0),f(n-2,i-2) } = ... = min { f(n,0),f(n-1,0),...f(1,0),f(n-i,0) }
int main()
{
static const unsigned int maxn = 100000;
static const unsigned int manm = 100000;
unsigned int parent[maxn+1] = { 0 };
unsigned int n = 0,m = 0;scanf("%d%d",&n,&m);
vector< vector > tickets;
tickets.clear();tickets.resize(n+1);
for(unsigned int i = 1;i < n;++i)
{
unsigned int a = 0,b = 0;scanf("%d%d",&a,&b);
parent[a] = b;
}
for(unsigned int i = 0;i < m;++i)
{
unsigned int v = 0,k = 0,w = 0;scanf("%d%d%d",&v,&k,&w);
ticket_t t;
t.cost = w;t.ticket = k;
tickets[v].push_back(t);
}
unsigned int query = 0;scanf("%d",&query);
for(unsigned int iq = 0;iq < query;++iq)
{
unsigned int v = 0;scanf("%d",&v);
unsigned long long ans = slove_fast(tickets,parent,n+1,v,0);
printf("%llu\n",ans);
}
return 0;
}
HDU3966
SPOJ375
参考文献:
(1)https://en.wikipedia.org/wiki/Heavy_path_decomposition
(2)http://wcipeg.com/wiki/Heavy-light_decomposition
(3)https://blog.anudeep2011.com/heavy-light-decomposition/
(4)https://quartergeek.com/summary-of-heavy-light-decomposition/
(5)http://blog.sina.com.cn/s/blog_7a1746820100wp67.html
(6)http://blog.csdn.net/y990041769/article/details/40348013
(10)http://blog.csdn.net/acdreamers/article/details/10591443