Aleph-w 3.0
A C++ Library for Data Structures and Algorithms
Loading...
Searching...
No Matches
Tree_Decomposition.H
Go to the documentation of this file.
1/*
2 Aleph_w
3
4 Data structures & Algorithms
5 version 2.0.0b
6 https://github.com/lrleon/Aleph-w
7
8 This file is part of Aleph-w library
9
10 Copyright (c) 2002-2026 Leandro Rabindranath Leon
11
12 Permission is hereby granted, free of charge, to any person obtaining a copy
13 of this software and associated documentation files (the "Software"), to deal
14 in the Software without restriction, including without limitation the rights
15 to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
16 copies of the Software, and to permit persons to whom the Software is
17 furnished to do so, subject to the following conditions:
18
19 The above copyright notice and this permission notice shall be included in all
20 copies or substantial portions of the Software.
21
22 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
23 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
24 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
25 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
26 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
27 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
28 SOFTWARE.
29*/
30
31
111# ifndef TREE_DECOMPOSITION_H
112# define TREE_DECOMPOSITION_H
113
114# include <algorithm>
115# include <concepts>
116# include <cstddef>
117# include <limits>
118# include <type_traits>
119# include <utility>
120
121# include <ah-errors.H>
122# include <ahFunction.H>
123# include <tpl_array.H>
124# include <tpl_dynListStack.H>
125# include <tpl_dynMapOhash.H>
126# include <tpl_dynMapTree.H>
127# include <tpl_graph.H>
128# include <tpl_segment_tree.H>
129
130namespace Aleph
131{
132 namespace tree_decomposition_detail
133 {
137 struct Id_Range
138 {
139 size_t left = 0;
140 size_t right = 0;
141 };
142
151 template <class GT, class SA>
153 {
154 public:
155 using Node = typename GT::Node;
156 using Arc = typename GT::Arc;
157
158 static constexpr size_t NONE = std::numeric_limits<size_t>::max();
159
160 private:
161 using Pair_Key = std::pair<size_t, size_t>;
162
163 const GT *graph_ = nullptr;
165 Node *root_ = nullptr;
166 size_t root_id_ = NONE;
167
168 size_t n_ = 0;
169
173
174 static Pair_Key normalize_pair(size_t u, size_t v) noexcept
175 {
176 if (u > v)
177 std::swap(u, v);
178 return std::make_pair(u, v);
179 }
180
181 void check_id(const size_t id, const char *where) const
182 {
184 << where << ": id=" << id << " is out of range [0, " << n_ << ")";
185 }
186
188 {
190
191 if (n_ == 0)
192 {
193 ah_domain_error_if(root_ != nullptr)
194 << "Rooted_Tree_Topology: root node provided but graph is empty";
195 return;
196 }
197
200 node_to_id_.swap(tmp);
201 }
202
203 size_t next_id = 0;
204 for (Node_Iterator<GT> it(*graph_); it.has_curr(); it.next_ne())
205 {
206 Node *p = it.get_curr();
207 id_to_node_(next_id) = p;
209 ++next_id;
210 }
211
213 << "Rooted_Tree_Topology: failed to index all graph nodes";
214
215 if (root_ == nullptr)
216 root_ = id_to_node_(0);
217
219 << "Rooted_Tree_Topology: root node does not belong to graph";
220
222 }
223
225 {
226 if (n_ == 0)
227 return;
228
229 adjacency_.empty();
230 adjacency_.reserve(n_);
231 for (size_t i = 0; i < n_; ++i)
232 adjacency_.append(Array<size_t>());
233
235 size_t edge_count = 0;
236
237 for (Arc_Iterator<GT, SA> it(*graph_, sa_); it.has_curr(); it.next_ne())
238 {
239 Arc *a = it.get_curr_ne();
240 Node *src = graph_->get_src_node(a);
241 Node *tgt = graph_->get_tgt_node(a);
242
243 const auto *src_item = node_to_id_.search(src);
244 const auto *tgt_item = node_to_id_.search(tgt);
245
246 ah_runtime_error_unless(src_item != nullptr and tgt_item != nullptr)
247 << "Rooted_Tree_Topology: arc endpoint is not indexed";
248
249 const size_t u = src_item->second;
250 const size_t v = tgt_item->second;
251
252 ah_domain_error_if(u == v)
253 << "Rooted_Tree_Topology: self-loop detected in filtered graph";
254
255 const Pair_Key key = normalize_pair(u, v);
256 ah_domain_error_if(unique_edges.search(key) != nullptr)
257 << "Rooted_Tree_Topology: parallel/duplicate edge detected in filtered graph";
258
259 unique_edges.insert(key, 1);
260 ++edge_count;
261 }
262
264 << "Rooted_Tree_Topology: filtered graph is not a tree (expected "
265 << (n_ - 1) << " edges, got " << edge_count << ")";
266
268 it.has_curr(); it.next_ne())
269 {
270 const auto & [fst, snd] = it.get_curr();
271 (void) snd;
272
273 const size_t u = fst.first;
274 const size_t v = fst.second;
275 adjacency_(u).append(v);
276 adjacency_(v).append(u);
277 }
278 }
279
281 {
282 if (n_ == 0)
283 return;
284
286 for (size_t i = 0; i < n_; ++i)
287 visited(i) = 0;
288
289 struct Frame
290 {
291 size_t node;
292 size_t parent;
293 size_t next_child;
294 };
295
297 visited(root_id_) = 1;
298 size_t visited_count = 1;
299 stack.push({root_id_, NONE, 0});
300
301 while (not stack.is_empty())
302 {
303 auto & fr = stack.top();
304
305 if (fr.next_child == adjacency_(fr.node).size())
306 {
307 (void) stack.pop();
308 continue;
309 }
310
311 const size_t nxt = adjacency_(fr.node)(fr.next_child++);
312 if (nxt == fr.parent)
313 continue;
314
315 ah_domain_error_if(visited(nxt))
316 << "Rooted_Tree_Topology: filtered graph is not acyclic";
317
318 visited(nxt) = 1;
320 stack.push({nxt, fr.node, 0});
321 }
322
324 << "Rooted_Tree_Topology: filtered graph is not connected";
325 }
326
327 public:
328 Rooted_Tree_Topology(const GT & g, Node *root, SA sa = SA())
329 : graph_(&g), sa_(std::move(sa)), root_(root)
330 {
331 index_nodes();
334 }
335
336 [[nodiscard]] size_t size() const noexcept { return n_; }
337 [[nodiscard]] bool is_empty() const noexcept { return n_ == 0; }
338 [[nodiscard]] Node * root() const noexcept { return root_; }
339 [[nodiscard]] size_t root_id() const noexcept { return root_id_; }
340
342 {
343 return id_to_node_;
344 }
345
347 {
348 return adjacency_;
349 }
350
351 [[nodiscard]] size_t id_of(const Node *node) const
352 {
353 ah_invalid_argument_if(node == nullptr)
354 << "Rooted_Tree_Topology::id_of: null node";
355
356 const auto *item = node_to_id_.search(const_cast<Node *>(node));
357 ah_domain_error_if(item == nullptr)
358 << "Rooted_Tree_Topology::id_of: node does not belong to graph";
359
360 return item->second;
361 }
362
363 [[nodiscard]] Node * node_of(const size_t id) const
364 {
365 check_id(id, "Rooted_Tree_Topology::node_of");
366 return id_to_node_(id);
367 }
368
369 void validate_id(const size_t id, const char *where) const
370 {
371 check_id(id, where);
372 }
373 };
374 } // namespace tree_decomposition_detail
375
376
391 template <class GT, class SA = Dft_Show_Arc<GT>>
393 {
394 public:
395 using Node = typename GT::Node;
397
398 private:
400 static constexpr size_t NONE = Topology::NONE;
401
403
411
412 void ensure_not_empty(const char *where) const
413 {
414 ah_domain_error_if(is_empty()) << where << ": tree is empty";
415 }
416
418 {
419 const size_t n = size();
420 if (n == 0)
421 return;
422
430
431 for (size_t i = 0; i < n; ++i)
432 {
433 parent_(i) = NONE;
434 depth_(i) = 0;
435 subtree_size_(i) = 0;
436 heavy_(i) = NONE;
437 head_(i) = NONE;
438 pos_(i) = NONE;
439 inv_pos_(i) = NONE;
440 }
441
442 Array<char> visited = Array<char>::create(n);
443 for (size_t i = 0; i < n; ++i)
444 visited(i) = 0;
445
446 Array<size_t> order;
447 order.reserve(n);
448
449 struct Frame
450 {
451 size_t node;
452 size_t parent;
453 size_t next_child;
454 };
455
456 const size_t root_id = topology_.root_id();
458 depth_(root_id) = 0;
459 visited(root_id) = 1;
460
461 size_t visited_count = 1;
462 order.append(root_id);
463
465 stack.push({root_id, NONE, 0});
466
467 while (not stack.is_empty())
468 {
469 auto & fr = stack.top();
470
471 if (fr.next_child == topology_.adjacency()(fr.node).size())
472 {
473 (void) stack.pop();
474 continue;
475 }
476
477 const size_t nxt = topology_.adjacency()(fr.node)(fr.next_child++);
478 if (nxt == fr.parent)
479 continue;
480
481 ah_domain_error_if(visited(nxt))
482 << "Gen_Heavy_Light_Decomposition: filtered graph is not acyclic";
483
484 visited(nxt) = 1;
486
487 parent_(nxt) = fr.node;
488 depth_(nxt) = depth_(fr.node) + 1;
489 order.append(nxt);
490
491 stack.push({nxt, fr.node, 0});
492 }
493
495 << "Gen_Heavy_Light_Decomposition: filtered graph is not connected";
496
497 for (size_t i = order.size(); i-- > 0;)
498 {
499 const size_t u = order(i);
500 subtree_size_(u) = 1;
501 heavy_(u) = NONE;
502
503 for (size_t j = 0; j < topology_.adjacency()(u).size(); ++j)
504 {
505 const size_t v = topology_.adjacency()(u)(j);
506 if (parent_(v) != u)
507 continue;
508
510 if (heavy_(u) == NONE or subtree_size_(v) > subtree_size_(heavy_(u)))
511 heavy_(u) = v;
512 }
513 }
514
515 size_t next_pos = 0;
518
519 while (not chain_stack.is_empty())
520 {
521 const auto [chain_head, chain_start] = chain_stack.pop();
522
523 size_t u = chain_start;
524 while (u != NONE)
525 {
526 head_(u) = chain_head;
527 pos_(u) = next_pos;
528 inv_pos_(next_pos) = u;
529 ++next_pos;
530
531 for (size_t j = 0; j < topology_.adjacency()(u).size(); ++j)
532 if (const size_t v = topology_.adjacency()(u)(j); parent_(v) == u and v != heavy_(u))
533 chain_stack.push({v, v});
534
535 u = heavy_(u);
536 }
537 }
538
540 << "Gen_Heavy_Light_Decomposition: invalid decomposition size";
541 }
542
543 public:
551 : topology_(g, root, std::move(sa))
552 {
553 build_decomposition();
554 }
555
562 : topology_(g, nullptr, std::move(sa))
563 {
564 build_decomposition();
565 }
566
567 [[nodiscard]] size_t size() const noexcept { return topology_.size(); }
568 [[nodiscard]] bool is_empty() const noexcept { return topology_.is_empty(); }
569
570 [[nodiscard]] Node * root() const noexcept { return topology_.root(); }
571 [[nodiscard]] size_t root_id() const noexcept { return topology_.root_id(); }
572
573 [[nodiscard]] Node * node_of(const size_t id) const
574 {
575 return topology_.node_of(id);
576 }
577
578 [[nodiscard]] size_t id_of(const Node *node) const
579 {
580 return topology_.id_of(node);
581 }
582
583 [[nodiscard]] size_t parent_id(const size_t id) const
584 {
585 topology_.validate_id(id, "Gen_Heavy_Light_Decomposition::parent_id");
586 return parent_(id);
587 }
588
589 [[nodiscard]] Node * parent_of(const Node *node) const
590 {
591 const size_t pid = parent_id(id_of(node));
592 return pid == NONE ? nullptr : node_of(pid);
593 }
594
595 [[nodiscard]] size_t depth_of_id(const size_t id) const
596 {
597 topology_.validate_id(id, "Gen_Heavy_Light_Decomposition::depth_of_id");
598 return depth_(id);
599 }
600
601 [[nodiscard]] size_t depth_of(const Node *node) const
602 {
603 return depth_of_id(id_of(node));
604 }
605
606 [[nodiscard]] size_t subtree_size_of_id(const size_t id) const
607 {
608 topology_.validate_id(id, "Gen_Heavy_Light_Decomposition::subtree_size_of_id");
609 return subtree_size_(id);
610 }
611
612 [[nodiscard]] size_t subtree_size_of(const Node *node) const
613 {
614 return subtree_size_of_id(id_of(node));
615 }
616
617 [[nodiscard]] size_t heavy_child_id(const size_t id) const
618 {
619 topology_.validate_id(id, "Gen_Heavy_Light_Decomposition::heavy_child_id");
620 return heavy_(id);
621 }
622
623 [[nodiscard]] Node * heavy_child(const Node *node) const
624 {
625 const size_t cid = heavy_child_id(id_of(node));
626 return cid == NONE ? nullptr : node_of(cid);
627 }
628
629 [[nodiscard]] size_t head_id(const size_t id) const
630 {
631 topology_.validate_id(id, "Gen_Heavy_Light_Decomposition::head_id");
632 return head_(id);
633 }
634
635 [[nodiscard]] Node * head_of(const Node *node) const
636 {
637 return node_of(head_id(id_of(node)));
638 }
639
640 [[nodiscard]] size_t position_of_id(const size_t id) const
641 {
642 topology_.validate_id(id, "Gen_Heavy_Light_Decomposition::position_of_id");
643 return pos_(id);
644 }
645
646 [[nodiscard]] size_t position_of(const Node *node) const
647 {
648 return position_of_id(id_of(node));
649 }
650
652 [[nodiscard]] Range subtree_range_id(const size_t id) const
653 {
654 topology_.validate_id(id, "Gen_Heavy_Light_Decomposition::subtree_range_id");
655 const size_t l = pos_(id);
656 const size_t r = l + subtree_size_(id) - 1;
657 return Range{l, r};
658 }
659
661 [[nodiscard]] Range subtree_range(const Node *node) const
662 {
663 return subtree_range_id(id_of(node));
664 }
665
667 [[nodiscard]] bool is_ancestor_id(const size_t u, const size_t v) const
668 {
669 const Range r = subtree_range_id(u);
670 const size_t pv = position_of_id(v);
671 return r.left <= pv and pv <= r.right;
672 }
673
675 [[nodiscard]] bool is_ancestor(const Node *u, const Node *v) const
676 {
677 return is_ancestor_id(id_of(u), id_of(v));
678 }
679
681 [[nodiscard]] size_t lca_id(size_t u, size_t v) const
682 {
683 ensure_not_empty("Gen_Heavy_Light_Decomposition::lca_id");
684 topology_.validate_id(u, "Gen_Heavy_Light_Decomposition::lca_id");
685 topology_.validate_id(v, "Gen_Heavy_Light_Decomposition::lca_id");
686
687 while (head_(u) != head_(v))
688 if (depth_(head_(u)) >= depth_(head_(v)))
689 u = parent_(head_(u));
690 else
691 v = parent_(head_(v));
692
693 return (depth_(u) <= depth_(v)) ? u : v;
694 }
695
697 [[nodiscard]] Node * lca(const Node *u, const Node *v) const
698 {
699 return node_of(lca_id(id_of(u), id_of(v)));
700 }
701
703 [[nodiscard]] size_t distance_id(const size_t u, const size_t v) const
704 {
705 const size_t a = lca_id(u, v);
706 return depth_(u) + depth_(v) - 2 * depth_(a);
707 }
708
710 [[nodiscard]] size_t distance(const Node *u, const Node *v) const
711 {
712 return distance_id(id_of(u), id_of(v));
713 }
714
725 template <class F>
726 void for_each_path_segment_id(size_t u, size_t v, F && visit) const
727 {
728 ensure_not_empty("Gen_Heavy_Light_Decomposition::for_each_path_segment_id");
729 topology_.validate_id(u, "Gen_Heavy_Light_Decomposition::for_each_path_segment_id");
730 topology_.validate_id(v, "Gen_Heavy_Light_Decomposition::for_each_path_segment_id");
731
733
734 while (head_(u) != head_(v))
735 if (depth_(head_(u)) >= depth_(head_(v)))
736 {
737 visit(pos_(head_(u)), pos_(u), true);
738 u = parent_(head_(u));
739 }
740 else
741 {
742 down_segments.push(Range{pos_(head_(v)), pos_(v)});
743 v = parent_(head_(v));
744 }
745
746 if (depth_(u) >= depth_(v))
747 visit(pos_(v), pos_(u), true);
748 else
749 down_segments.push(Range{pos_(u), pos_(v)});
750
751 while (not down_segments.is_empty())
752 {
753 const auto [left, right] = down_segments.pop();
754 visit(left, right, false);
755 }
756 }
757
759 template <class F>
760 void for_each_path_segment(const Node *u, const Node *v, F && visit) const
761 {
762 for_each_path_segment_id(id_of(u), id_of(v), std::forward<F>(visit));
763 }
764
769 template <class F>
771 const size_t v,
772 F && visit) const
773 {
774 for_each_path_segment_id(u, v,
775 [&](const size_t l, const size_t r, const bool)
776 {
777 visit(l, r);
778 });
779 }
780 };
781
782
807 template <class GT,
808 typename T = typename GT::Node::Node_Type,
809 class Op = Aleph::plus<T>,
810 class SA = Dft_Show_Arc<GT>>
812 {
813 public:
814 using Node = typename GT::Node;
815
816 private:
820 Op op_;
821
822 void ensure_not_empty(const char *where) const
823 {
824 ah_domain_error_if(is_empty()) << where << ": tree is empty";
825 }
826
827 template <class Getter>
828 requires std::invocable<Getter, Node *> and
829 std::convertible_to<std::invoke_result_t<Getter, Node *>, T>
832 {
833 const size_t n = hld.size();
834 if (n == 0)
835 return Array<T>();
836
837 auto base = Array<T>::create(n);
838 for (size_t id = 0; id < n; ++id)
839 {
840 const size_t p = hld.position_of_id(id);
841 base(p) = static_cast<T>(getter(hld.node_of(id)));
842 }
843
844 return base;
845 }
846
848 {
849 return build_base_array(hld,
850 [](Node *p) -> T
851 {
852 return static_cast<T>(p->get_info());
853 });
854 }
855
856 public:
859 Node *root,
860 const T & identity,
861 Op op = Op(),
862 SA sa = SA())
863 : hld_(g, root, std::move(sa)),
865 identity_(identity),
866 op_(op)
867 {
868 // Empty
869 }
870
873 const T & identity,
874 Op op = Op(),
875 SA sa = SA())
876 : hld_(g, std::move(sa)),
878 identity_(identity),
879 op_(op)
880 {
881 // Empty
882 }
883
885 template <class Getter>
886 requires std::invocable<Getter, Node *> and
887 std::convertible_to<std::invoke_result_t<Getter, Node *>, T>
889 Node *root,
891 const T & identity,
892 Op op = Op(),
893 SA sa = SA())
894 : hld_(g, root, std::move(sa)),
895 segment_tree_(build_base_array(hld_, getter), identity, op),
896 identity_(identity),
897 op_(op)
898 {
899 // Empty
900 }
901
903 template <class Getter>
904 requires std::invocable<Getter, Node *> and
905 std::convertible_to<std::invoke_result_t<Getter, Node *>, T>
908 const T & identity,
909 Op op = Op(),
910 SA sa = SA())
911 : hld_(g, std::move(sa)),
912 segment_tree_(build_base_array(hld_, getter), identity, op),
913 identity_(identity),
914 op_(op)
915 {
916 // Empty
917 }
918
923
924 [[nodiscard]] size_t size() const noexcept { return hld_.size(); }
925 [[nodiscard]] bool is_empty() const noexcept { return hld_.is_empty(); }
926
928 [[nodiscard]] T query_path_id(const size_t u, const size_t v) const
929 {
930 ensure_not_empty("Gen_HLD_Path_Query::query_path_id");
931
932 T ret = identity_;
933 hld_.for_each_path_segment_id(u, v,
934 [&](const size_t l,
935 const size_t r,
936 const bool)
937 {
938 ret = op_(ret, segment_tree_.query(l, r));
939 });
940 return ret;
941 }
942
944 [[nodiscard]] T query_path(const Node *u, const Node *v) const
945 {
946 return query_path_id(hld_.id_of(u), hld_.id_of(v));
947 }
948
950 [[nodiscard]] T query_subtree_id(const size_t id) const
951 {
952 ensure_not_empty("Gen_HLD_Path_Query::query_subtree_id");
953 const auto range = hld_.subtree_range_id(id);
954 return segment_tree_.query(range.left, range.right);
955 }
956
958 [[nodiscard]] T query_subtree(const Node *node) const
959 {
960 return query_subtree_id(hld_.id_of(node));
961 }
962
964 void update_node_id(const size_t id, const T & delta)
965 {
966 ensure_not_empty("Gen_HLD_Path_Query::update_node_id");
967 segment_tree_.update(hld_.position_of_id(id), delta);
968 }
969
971 void update_node(const Node *node, const T & delta)
972 {
973 update_node_id(hld_.id_of(node), delta);
974 }
975
977 void set_node_id(const size_t id, const T & value)
978 {
979 ensure_not_empty("Gen_HLD_Path_Query::set_node_id");
980 segment_tree_.set(hld_.position_of_id(id), value);
981 }
982
984 void set_node(const Node *node, const T & value)
985 {
986 set_node_id(hld_.id_of(node), value);
987 }
988
990 [[nodiscard]] T get_node_id(const size_t id) const
991 {
992 ensure_not_empty("Gen_HLD_Path_Query::get_node_id");
993 return segment_tree_.get(hld_.position_of_id(id));
994 }
995
997 [[nodiscard]] T get_node(const Node *node) const
998 {
999 return get_node_id(hld_.id_of(node));
1000 }
1001 };
1002
1003
1022 template <class GT, class SA = Dft_Show_Arc<GT>>
1024 {
1025 public:
1026 using Node = typename GT::Node;
1027
1028 private:
1030 static constexpr size_t NONE = Topology::NONE;
1031
1033
1038
1041
1044
1045 void ensure_not_empty(const char *where) const
1046 {
1047 ah_domain_error_if(is_empty()) << where << ": tree is empty";
1048 }
1049
1051 {
1052 if (is_empty())
1053 return;
1054
1055 const size_t n = size();
1063
1064 for (size_t i = 0; i < n; ++i)
1065 {
1066 removed_(i) = 0;
1068 centroid_level_(i) = 0;
1069 local_parent_(i) = NONE;
1070 local_subtree_(i) = 0;
1073 }
1074
1076 }
1077
1079 {
1082
1083 local_parent_(start) = NONE;
1084 stack.push(start);
1085
1086 while (not stack.is_empty())
1087 {
1088 const size_t u = stack.pop();
1089 nodes.append(u);
1090
1091 for (size_t i = 0; i < topology_.adjacency()(u).size(); ++i)
1092 {
1093 const size_t v = topology_.adjacency()(u)(i);
1094 if (removed_(v))
1095 continue;
1096
1097 if (v == local_parent_(u))
1098 continue;
1099
1100 local_parent_(v) = u;
1101 stack.push(v);
1102 }
1103 }
1104
1105 return nodes;
1106 }
1107
1109 {
1110 const size_t total = nodes.size();
1112 << "Gen_Centroid_Decomposition: empty component while choosing centroid";
1113
1114 for (size_t i = total; i-- > 0;)
1115 {
1116 const size_t u = nodes(i);
1117 local_subtree_(u) = 1;
1118
1119 for (size_t j = 0; j < topology_.adjacency()(u).size(); ++j)
1120 {
1121 const size_t v = topology_.adjacency()(u)(j);
1122 if (removed_(v))
1123 continue;
1124
1125 if (local_parent_(v) == u)
1127 }
1128 }
1129
1130 size_t centroid = nodes(0);
1131 size_t best_balance = total + 1;
1132
1133 for (size_t i = 0; i < total; ++i)
1134 {
1135 const size_t u = nodes(i);
1136
1137 size_t max_part = total - local_subtree_(u);
1138 for (size_t j = 0; j < topology_.adjacency()(u).size(); ++j)
1139 {
1140 const size_t v = topology_.adjacency()(u)(j);
1141 if (removed_(v))
1142 continue;
1143
1144 if (local_parent_(v) == u)
1145 max_part = std::max(max_part, local_subtree_(v));
1146 }
1147
1148 if (max_part < best_balance)
1149 {
1151 centroid = u;
1152 }
1153 }
1154
1155 return centroid;
1156 }
1157
1158 void append_centroid_annotations(const size_t centroid)
1159 {
1160 struct Frame
1161 {
1162 size_t node;
1163 size_t parent;
1164 size_t dist;
1165 };
1166
1167 DynListStack<Frame> stack;
1168 stack.push({centroid, NONE, 0});
1169
1170 while (not stack.is_empty())
1171 {
1172 const Frame fr = stack.pop();
1173 centroid_ancestors_(fr.node).append(centroid);
1174 centroid_distances_(fr.node).append(fr.dist);
1175
1176 for (size_t i = 0; i < topology_.adjacency()(fr.node).size(); ++i)
1177 {
1178 const size_t v = topology_.adjacency()(fr.node)(i);
1179 if (removed_(v) or v == fr.parent)
1180 continue;
1181
1182 stack.push({v, fr.node, fr.dist + 1});
1183 }
1184 }
1185 }
1186
1188 {
1189 const size_t n = size();
1190 if (n == 0)
1191 return;
1192
1193 init_storage();
1194
1195 struct Task
1196 {
1197 size_t start;
1198 size_t parent_centroid;
1199 size_t level;
1200 };
1201
1202 DynListStack<Task> tasks;
1203 tasks.push({topology_.root_id(), NONE, 0});
1204
1205 size_t centroid_count = 0;
1206
1207 while (not tasks.is_empty())
1208 {
1209 const Task task = tasks.pop();
1210 if (removed_(task.start))
1211 continue;
1212
1214 const size_t centroid = choose_centroid(nodes);
1215
1217
1218 centroid_parent_(centroid) = task.parent_centroid;
1219 centroid_level_(centroid) = task.level;
1220 if (task.parent_centroid == NONE)
1221 centroid_root_ = centroid;
1222
1223 removed_(centroid) = 1;
1225
1226 for (size_t i = 0; i < topology_.adjacency()(centroid).size(); ++i)
1227 if (const size_t v = topology_.adjacency()(centroid)(i); not removed_(v))
1228 tasks.push({v, centroid, task.level + 1});
1229 }
1230
1232 << "Gen_Centroid_Decomposition: invalid centroid tree size";
1233 }
1234
1235 public:
1238 : topology_(g, root, std::move(sa))
1239 {
1240 build_centroid_tree();
1241 }
1242
1245 : topology_(g, nullptr, std::move(sa))
1246 {
1247 build_centroid_tree();
1248 }
1249
1250 [[nodiscard]] size_t size() const noexcept { return topology_.size(); }
1251 [[nodiscard]] bool is_empty() const noexcept { return topology_.is_empty(); }
1252
1253 [[nodiscard]] Node * root() const noexcept { return topology_.root(); }
1254 [[nodiscard]] size_t root_id() const noexcept { return topology_.root_id(); }
1255
1256 [[nodiscard]] Node * node_of(const size_t id) const
1257 {
1258 return topology_.node_of(id);
1259 }
1260
1261 [[nodiscard]] size_t id_of(const Node *node) const
1262 {
1263 return topology_.id_of(node);
1264 }
1265
1268 {
1269 return centroid_root_;
1270 }
1271
1274 {
1275 return centroid_root_ == NONE ? nullptr : node_of(centroid_root_);
1276 }
1277
1279 [[nodiscard]] size_t centroid_parent_id(const size_t id) const
1280 {
1281 topology_.validate_id(id, "Gen_Centroid_Decomposition::centroid_parent_id");
1282 return centroid_parent_(id);
1283 }
1284
1286 [[nodiscard]] Node * centroid_parent(const Node *node) const
1287 {
1288 const size_t pid = centroid_parent_id(id_of(node));
1289 return pid == NONE ? nullptr : node_of(pid);
1290 }
1291
1293 [[nodiscard]] size_t centroid_level_of_id(const size_t id) const
1294 {
1295 topology_.validate_id(id, "Gen_Centroid_Decomposition::centroid_level_of_id");
1296 return centroid_level_(id);
1297 }
1298
1300 [[nodiscard]] size_t centroid_level_of(const Node *node) const
1301 {
1302 return centroid_level_of_id(id_of(node));
1303 }
1304
1307 {
1308 size_t ret = 0;
1309 for (size_t i = 0; i < size(); ++i)
1310 ret = std::max(ret, centroid_level_(i));
1311 return ret;
1312 }
1313
1315 [[nodiscard]] size_t centroid_path_length_of_id(const size_t id) const
1316 {
1317 topology_.validate_id(id, "Gen_Centroid_Decomposition::centroid_path_length_of_id");
1318 return centroid_ancestors_(id).size();
1319 }
1320
1322 [[nodiscard]] size_t centroid_path_length_of(const Node *node) const
1323 {
1324 return centroid_path_length_of_id(id_of(node));
1325 }
1326
1328 [[nodiscard]] const Array<size_t> &centroid_ancestors_of_id(const size_t id) const
1329 {
1330 topology_.validate_id(id, "Gen_Centroid_Decomposition::centroid_ancestors_of_id");
1331 return centroid_ancestors_(id);
1332 }
1333
1335 [[nodiscard]] const Array<size_t> &centroid_distances_of_id(const size_t id) const
1336 {
1337 topology_.validate_id(id, "Gen_Centroid_Decomposition::centroid_distances_of_id");
1338 return centroid_distances_(id);
1339 }
1340
1343 {
1344 return centroid_ancestors_of_id(id_of(node));
1345 }
1346
1349 {
1350 return centroid_distances_of_id(id_of(node));
1351 }
1352
1355 const size_t node) const
1356 {
1357 topology_.validate_id(ancestor, "Gen_Centroid_Decomposition::is_centroid_ancestor_id");
1358 topology_.validate_id(node, "Gen_Centroid_Decomposition::is_centroid_ancestor_id");
1359
1360 const auto & chain = centroid_ancestors_(node);
1361 for (size_t i = 0; i < chain.size(); ++i)
1362 if (chain(i) == ancestor)
1363 return true;
1364
1365 return false;
1366 }
1367
1372 [[nodiscard]] size_t distance_to_centroid_id(const size_t node,
1373 const size_t centroid) const
1374 {
1375 topology_.validate_id(node, "Gen_Centroid_Decomposition::distance_to_centroid_id");
1376 topology_.validate_id(centroid, "Gen_Centroid_Decomposition::distance_to_centroid_id");
1377
1378 const auto & chain = centroid_ancestors_(node);
1379 const auto & dists = centroid_distances_(node);
1380
1381 for (size_t i = 0; i < chain.size(); ++i)
1382 if (chain(i) == centroid)
1383 return dists(i);
1384
1385 return NONE;
1386 }
1387
1392 template <class F>
1393 void for_each_centroid_ancestor_id(const size_t node, F && f) const
1394 {
1395 ensure_not_empty("Gen_Centroid_Decomposition::for_each_centroid_ancestor_id");
1396 topology_.validate_id(node, "Gen_Centroid_Decomposition::for_each_centroid_ancestor_id");
1397
1398 const auto & chain = centroid_ancestors_(node);
1399 const auto & dists = centroid_distances_(node);
1400
1401 ah_runtime_error_unless(chain.size() == dists.size())
1402 << "Gen_Centroid_Decomposition: invalid centroid annotation state";
1403
1404 for (size_t i = 0; i < chain.size(); ++i)
1405 f(chain(i), dists(i), i);
1406 }
1407
1412 template <class F>
1413 void for_each_centroid_ancestor(const Node *node, F && f) const
1414 {
1415 for_each_centroid_ancestor_id(id_of(node), std::forward<F>(f));
1416 }
1417 };
1418
1419
1421 template <class GT, class SA = Dft_Show_Arc<GT>>
1423
1425 template <class GT,
1426 typename T = typename GT::Node::Node_Type,
1427 class Op = Aleph::plus<T>,
1428 class SA = Dft_Show_Arc<GT>>
1430
1432 template <class GT, class SA = Dft_Show_Arc<GT>>
1434} // namespace Aleph
1435
1436# endif // TREE_DECOMPOSITION_H
Exception handling system with formatted messages for Aleph-w.
#define ah_runtime_error_unless(C)
Throws std::runtime_error if condition does NOT hold.
Definition ah-errors.H:250
#define ah_out_of_range_error_if(C)
Throws std::out_of_range if condition holds.
Definition ah-errors.H:579
#define ah_domain_error_if(C)
Throws std::domain_error if condition holds.
Definition ah-errors.H:522
#define ah_invalid_argument_if(C)
Throws std::invalid_argument if condition holds.
Definition ah-errors.H:639
Standard functor implementations and comparison objects.
List_Graph< Graph_Node< Node_Info >, Graph_Arc< Arc_Info > > GT
Simple dynamic array with automatic resizing and functional operations.
Definition tpl_array.H:139
static Array create(size_t n)
Create an array with n logical elements.
Definition tpl_array.H:194
constexpr size_t size() const noexcept
Return the number of elements stored in the stack.
Definition tpl_array.H:351
T & append(const T &data)
Append a copy of data
Definition tpl_array.H:245
void reserve(size_t cap)
Reserves cap cells into the array.
Definition tpl_array.H:315
Dynamic stack of elements of generic type T based on a singly linked list.
T & top()
Return a modifiable reference to the top item of the stack.
bool is_empty() const noexcept
Check if the stack is empty.
T pop()
Remove and return the top item of the stack.
T & push(const T &data)
Push an item by copy onto the top of the stack.
Generic key-value map implemented on top of a binary search tree.
typename Base::Iterator Iterator
void next_ne() noexcept
Advances the iterator to the next filtered element (noexcept version).
Centroid decomposition over a rooted tree represented as an Aleph graph.
Node * centroid_parent(const Node *node) const
Parent centroid of node (or nullptr if centroid root).
size_t centroid_path_length_of(const Node *node) const
Number of centroid ancestors stored for node.
Array< Array< size_t > > centroid_distances_
size_t centroid_level_of_id(const size_t id) const
Level of centroid id in centroid tree (root level = 0).
const Array< size_t > & centroid_ancestors_of(const Node *node) const
Centroid ancestors of node from centroid root down to local centroid.
Node * centroid_root() const noexcept
Root centroid node of the centroid tree (or nullptr if empty).
bool is_centroid_ancestor_id(const size_t ancestor, const size_t node) const
True iff ancestor is in the centroid-ancestor chain of node.
void ensure_not_empty(const char *where) const
size_t centroid_level_of(const Node *node) const
Level of centroid node in centroid tree (root level = 0).
size_t centroid_root_id() const noexcept
Root centroid ID of the centroid tree (or NONE if empty).
Gen_Centroid_Decomposition(const GT &g, Node *root, SA sa=SA())
Construct using an explicit root node.
const Array< size_t > & centroid_ancestors_of_id(const size_t id) const
Centroid ancestors of node ID from centroid root down to local centroid.
Node * node_of(const size_t id) const
size_t centroid_parent_id(const size_t id) const
Parent centroid ID of id (or NONE if centroid root).
size_t centroid_path_length_of_id(const size_t id) const
Number of centroid ancestors stored for node ID.
const Array< size_t > & centroid_distances_of(const Node *node) const
Distances to centroid ancestors (aligned with centroid_ancestors_of).
size_t max_centroid_level() const noexcept
Maximum centroid-tree level.
size_t distance_to_centroid_id(const size_t node, const size_t centroid) const
Distance from node ID to a centroid ancestor ID.
void append_centroid_annotations(const size_t centroid)
void for_each_centroid_ancestor_id(const size_t node, F &&f) const
Visit each centroid ancestor of node ID.
size_t id_of(const Node *node) const
Array< size_t > collect_component_nodes(const size_t start)
void for_each_centroid_ancestor(const Node *node, F &&f) const
Visit each centroid ancestor of node.
const Array< size_t > & centroid_distances_of_id(const size_t id) const
Distances to centroid ancestors (aligned with centroid_ancestors_of_id).
Gen_Centroid_Decomposition(const GT &g, SA sa=SA())
Construct using the first graph node as root.
size_t choose_centroid(const Array< size_t > &nodes)
Array< Array< size_t > > centroid_ancestors_
Segment-tree-powered path/subtree queries over an HLD layout.
Gen_Heavy_Light_Decomposition< GT, SA > hld_
T query_subtree(const Node *node) const
Subtree query for node in O(log n).
static Array< T > build_base_from_node_info(const Gen_Heavy_Light_Decomposition< GT, SA > &hld)
void set_node(const Node *node, const T &value)
Point assignment: a[node] = value.
T query_path(const Node *u, const Node *v) const
Path query between two nodes in O(log^2 n).
void update_node(const Node *node, const T &delta)
Point update: a[node] = Op(a[node], delta).
Gen_HLD_Path_Query(const GT &g, Node *root, const T &identity, Op op=Op(), SA sa=SA())
Construct from graph/root using node->get_info() as initial values.
void ensure_not_empty(const char *where) const
const Gen_Heavy_Light_Decomposition< GT, SA > & decomposition() const noexcept
T query_path_id(const size_t u, const size_t v) const
Path query between two node IDs in O(log^2 n).
Gen_HLD_Path_Query(const GT &g, const T &identity, Op op=Op(), SA sa=SA())
Construct from graph (first node as root) using node->get_info().
T get_node_id(const size_t id) const
Return current value at node ID.
size_t size() const noexcept
T query_subtree_id(const size_t id) const
Subtree query for node ID in O(log n).
void update_node_id(const size_t id, const T &delta)
Point update: a[id] = Op(a[id], delta).
bool is_empty() const noexcept
void set_node_id(const size_t id, const T &value)
Point assignment: a[id] = value.
Gen_Segment_Tree< T, Op > segment_tree_
and static std::convertible_to< std::invoke_result_t< Getter, Node * >, T > Array< T > build_base_array(const Gen_Heavy_Light_Decomposition< GT, SA > &hld, Getter getter)
and std::convertible_to< std::invoke_result_t< Getter, Node * >, T > Gen_HLD_Path_Query(const GT &g, Node *root, Getter getter, const T &identity, Op op=Op(), SA sa=SA())
Construct with custom node-to-value getter.
and std::convertible_to< std::invoke_result_t< Getter, Node * >, T > Gen_HLD_Path_Query(const GT &g, Getter getter, const T &identity, Op op=Op(), SA sa=SA())
Construct with custom getter and implicit root (first node).
T get_node(const Node *node) const
Return current value at node.
Heavy-Light Decomposition over a rooted tree represented as an Aleph graph.
Gen_Heavy_Light_Decomposition(const GT &g, SA sa=SA())
Construct using the first graph node as root.
size_t position_of(const Node *node) const
void for_each_path_segment_id(size_t u, size_t v, F &&visit) const
Enumerate path segments from u to v in path order.
size_t parent_id(const size_t id) const
size_t head_id(const size_t id) const
size_t distance_id(const size_t u, const size_t v) const
Distance (number of edges) between two node IDs.
size_t lca_id(size_t u, size_t v) const
Lowest common ancestor in O(log n).
size_t position_of_id(const size_t id) const
Node * head_of(const Node *node) const
size_t distance(const Node *u, const Node *v) const
Distance (number of edges) between two nodes.
size_t subtree_size_of_id(const size_t id) const
size_t depth_of_id(const size_t id) const
size_t heavy_child_id(const size_t id) const
void ensure_not_empty(const char *where) const
Node * lca(const Node *u, const Node *v) const
Lowest common ancestor in O(log n).
Node * heavy_child(const Node *node) const
bool is_ancestor(const Node *u, const Node *v) const
True iff u is ancestor of v in the rooted tree.
Range subtree_range(const Node *node) const
Base-array range of a full subtree (inclusive endpoints).
size_t id_of(const Node *node) const
size_t depth_of(const Node *node) const
Gen_Heavy_Light_Decomposition(const GT &g, Node *root, SA sa=SA())
Construct using an explicit root node.
Node * node_of(const size_t id) const
size_t subtree_size_of(const Node *node) const
Range subtree_range_id(const size_t id) const
Base-array range of a full subtree (inclusive endpoints).
Node * parent_of(const Node *node) const
bool is_ancestor_id(const size_t u, const size_t v) const
True iff u is ancestor of v in the rooted tree.
void for_each_path_segment(const Node *u, const Node *v, F &&visit) const
Enumerate path segments from u to v in path order.
void for_each_path_segment_undirected_id(const size_t u, const size_t v, F &&visit) const
Enumerate path segments ignoring direction.
Segment tree over an arbitrary associative binary operation.
typename Node::Node_Type Node_Type
The arc class type.
Definition tpl_graph.H:436
Filtered iterator on the nodes of a graph.
Definition tpl_graph.H:1206
void validate_id(const size_t id, const char *where) const
const Array< Node * > & id_to_node() const noexcept
Rooted_Tree_Topology(const GT &g, Node *root, SA sa=SA())
const Array< Array< size_t > > & adjacency() const noexcept
void check_id(const size_t id, const char *where) const
static Pair_Key normalize_pair(size_t u, size_t v) noexcept
Node * get_src_node(Arc *arc) const noexcept
Return the source node of arc (only for directed graphs)
Definition graph-dry.H:737
constexpr size_t get_num_nodes() const noexcept
Return the total of nodes of graph.
Definition graph-dry.H:695
Node * get_tgt_node(Arc *arc) const noexcept
Return the target node of arc (only for directed graphs)
Definition graph-dry.H:743
__gmp_expr< T, __gmp_binary_expr< __gmp_expr< T, U >, unsigned long int, __gmp_root_function > > root(const __gmp_expr< T, U > &expr, unsigned long int l)
Definition gmpfrxx.h:4060
DynArray< Graph::Node * > nodes
Definition graphpic.C:406
Main namespace for Aleph-w library functions.
Definition ah-arena.H:89
and
Check uniqueness with explicit hash + equality functors.
size_t size(Node *root) noexcept
Divide_Conquer_DP_Result< Cost > divide_and_conquer_partition_dp(const size_t groups, const size_t n, Transition_Cost_Fn transition_cost, const Cost inf=dp_optimization_detail::default_inf< Cost >())
Optimize partition DP using divide-and-conquer optimization.
std::decay_t< typename HeadC::Item_Type > T
Definition ah-zip.H:105
Container< T > range(const T start, const T end, const T step=1)
Generate a range of values [start, end] with a given step.
STL namespace.
Filtered iterator on all the arcs of a graph.
Definition tpl_graph.H:1164
Default filter for filtered iterators on arcs.
Definition tpl_graph.H:1000
Open addressing hash map using linear probing.
Data & find(const Key &key)
Find and return the value for a key.
Pair * insert(const Key &key, const Data &data)
Insert a key-value pair (copy semantics).
Pair * search(const Key &key) const noexcept
Search for a key in the map.
Task with priority for job scheduling.
gsl_rng * r
Dynamic array container with automatic resizing.
Dynamic stack implementation based on linked lists.
Dynamic map with open hashing.
Dynamic key-value map based on balanced binary search trees.
Generic graph and digraph implementations.
Segment trees for dynamic range queries with lazy propagation.
DynList< int > l