xref: /llvm-project/lldb/include/lldb/Utility/RangeMap.h (revision 96b7c64b8a874584a9dad44bb8901904c14701c0)
1 //===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #ifndef LLDB_UTILITY_RANGEMAP_H
10 #define LLDB_UTILITY_RANGEMAP_H
11 
12 #include <algorithm>
13 #include <vector>
14 
15 #include "llvm/ADT/SmallVector.h"
16 
17 #include "lldb/lldb-private.h"
18 
19 // Uncomment to make sure all Range objects are sorted when needed
20 //#define ASSERT_RANGEMAP_ARE_SORTED
21 
22 namespace lldb_private {
23 
24 // Templatized classes for dealing with generic ranges and also collections of
25 // ranges, or collections of ranges that have associated data.
26 
27 // A simple range class where you get to define the type of the range
28 // base "B", and the type used for the range byte size "S".
29 template <typename B, typename S> struct Range {
30   typedef B BaseType;
31   typedef S SizeType;
32 
33   BaseType base;
34   SizeType size;
35 
36   Range() : base(0), size(0) {}
37 
38   Range(BaseType b, SizeType s) : base(b), size(s) {}
39 
40   void Clear(BaseType b = 0) {
41     base = b;
42     size = 0;
43   }
44 
45   BaseType GetRangeBase() const { return base; }
46 
47   /// Set the start value for the range, and keep the same size
48   void SetRangeBase(BaseType b) { base = b; }
49 
50   void Slide(BaseType slide) { base += slide; }
51 
52   void ShrinkFront(S s) {
53     base += s;
54     size -= std::min(s, size);
55   }
56 
57   bool Union(const Range &rhs) {
58     if (DoesAdjoinOrIntersect(rhs)) {
59       auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
60       base = std::min<BaseType>(base, rhs.base);
61       size = new_end - base;
62       return true;
63     }
64     return false;
65   }
66 
67   Range Intersect(const Range &rhs) const {
68     const BaseType lhs_base = this->GetRangeBase();
69     const BaseType rhs_base = rhs.GetRangeBase();
70     const BaseType lhs_end = this->GetRangeEnd();
71     const BaseType rhs_end = rhs.GetRangeEnd();
72     Range range;
73     range.SetRangeBase(std::max(lhs_base, rhs_base));
74     range.SetRangeEnd(std::min(lhs_end, rhs_end));
75     return range;
76   }
77 
78   BaseType GetRangeEnd() const { return base + size; }
79 
80   void SetRangeEnd(BaseType end) {
81     if (end > base)
82       size = end - base;
83     else
84       size = 0;
85   }
86 
87   SizeType GetByteSize() const { return size; }
88 
89   void SetByteSize(SizeType s) { size = s; }
90 
91   bool IsValid() const { return size > 0; }
92 
93   bool Contains(BaseType r) const {
94     return (GetRangeBase() <= r) && (r < GetRangeEnd());
95   }
96 
97   bool ContainsEndInclusive(BaseType r) const {
98     return (GetRangeBase() <= r) && (r <= GetRangeEnd());
99   }
100 
101   bool Contains(const Range &range) const {
102     return Contains(range.GetRangeBase()) &&
103            ContainsEndInclusive(range.GetRangeEnd());
104   }
105 
106   // Returns true if the two ranges adjoing or intersect
107   bool DoesAdjoinOrIntersect(const Range &rhs) const {
108     const BaseType lhs_base = this->GetRangeBase();
109     const BaseType rhs_base = rhs.GetRangeBase();
110     const BaseType lhs_end = this->GetRangeEnd();
111     const BaseType rhs_end = rhs.GetRangeEnd();
112     bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
113     return result;
114   }
115 
116   // Returns true if the two ranges intersect
117   bool DoesIntersect(const Range &rhs) const {
118     return Intersect(rhs).IsValid();
119   }
120 
121   bool operator<(const Range &rhs) const {
122     if (base == rhs.base)
123       return size < rhs.size;
124     return base < rhs.base;
125   }
126 
127   bool operator==(const Range &rhs) const {
128     return base == rhs.base && size == rhs.size;
129   }
130 
131   bool operator!=(const Range &rhs) const {
132     return base != rhs.base || size != rhs.size;
133   }
134 };
135 
136 template <typename B, typename S, unsigned N = 0> class RangeVector {
137 public:
138   typedef B BaseType;
139   typedef S SizeType;
140   typedef Range<B, S> Entry;
141   typedef llvm::SmallVector<Entry, N> Collection;
142 
143   RangeVector() = default;
144 
145   ~RangeVector() = default;
146 
147   static RangeVector GetOverlaps(const RangeVector &vec1,
148                                  const RangeVector &vec2) {
149 #ifdef ASSERT_RANGEMAP_ARE_SORTED
150     assert(vec1.IsSorted() && vec2.IsSorted());
151 #endif
152     RangeVector result;
153     auto pos1 = vec1.begin();
154     auto end1 = vec1.end();
155     auto pos2 = vec2.begin();
156     auto end2 = vec2.end();
157     while (pos1 != end1 && pos2 != end2) {
158       Entry entry = pos1->Intersect(*pos2);
159       if (entry.IsValid())
160         result.Append(entry);
161       if (pos1->GetRangeEnd() < pos2->GetRangeEnd())
162         ++pos1;
163       else
164         ++pos2;
165     }
166     return result;
167   }
168 
169   bool operator==(const RangeVector &rhs) const {
170     if (GetSize() != rhs.GetSize())
171       return false;
172     for (size_t i = 0; i < GetSize(); ++i) {
173       if (GetEntryRef(i) != rhs.GetEntryRef(i))
174         return false;
175     }
176     return true;
177   }
178 
179   void Append(const Entry &entry) { m_entries.push_back(entry); }
180 
181   void Append(B base, S size) { m_entries.emplace_back(base, size); }
182 
183   // Insert an item into a sorted list and optionally combine it with any
184   // adjacent blocks if requested.
185   void Insert(const Entry &entry, bool combine) {
186     if (m_entries.empty()) {
187       m_entries.push_back(entry);
188       return;
189     }
190     auto begin = m_entries.begin();
191     auto end = m_entries.end();
192     auto pos = std::lower_bound(begin, end, entry);
193     if (combine) {
194       if (pos != end && pos->Union(entry)) {
195         CombinePrevAndNext(pos);
196         return;
197       }
198       if (pos != begin) {
199         auto prev = pos - 1;
200         if (prev->Union(entry)) {
201           CombinePrevAndNext(prev);
202           return;
203         }
204       }
205     }
206     m_entries.insert(pos, entry);
207   }
208 
209   bool RemoveEntryAtIndex(uint32_t idx) {
210     if (idx < m_entries.size()) {
211       m_entries.erase(m_entries.begin() + idx);
212       return true;
213     }
214     return false;
215   }
216 
217   void Sort() {
218     if (m_entries.size() > 1)
219       std::stable_sort(m_entries.begin(), m_entries.end());
220   }
221 
222 #ifdef ASSERT_RANGEMAP_ARE_SORTED
223   bool IsSorted() const {
224     typename Collection::const_iterator pos, end, prev;
225     // First we determine if we can combine any of the Entry objects so we
226     // don't end up allocating and making a new collection for no reason
227     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
228          prev = pos++) {
229       if (prev != end && *pos < *prev)
230         return false;
231     }
232     return true;
233   }
234 #endif
235 
236   void CombineConsecutiveRanges() {
237 #ifdef ASSERT_RANGEMAP_ARE_SORTED
238     assert(IsSorted());
239 #endif
240     auto first_intersect = std::adjacent_find(
241         m_entries.begin(), m_entries.end(), [](const Entry &a, const Entry &b) {
242           return a.DoesAdjoinOrIntersect(b);
243         });
244     if (first_intersect == m_entries.end())
245       return;
246 
247     // We can combine at least one entry, then we make a new collection and
248     // populate it accordingly, and then swap it into place.
249     auto pos = std::next(first_intersect);
250     Collection minimal_ranges(m_entries.begin(), pos);
251     for (; pos != m_entries.end(); ++pos) {
252       Entry &back = minimal_ranges.back();
253       if (back.DoesAdjoinOrIntersect(*pos))
254         back.SetRangeEnd(std::max(back.GetRangeEnd(), pos->GetRangeEnd()));
255       else
256         minimal_ranges.push_back(*pos);
257     }
258     m_entries.swap(minimal_ranges);
259   }
260 
261   BaseType GetMinRangeBase(BaseType fail_value) const {
262 #ifdef ASSERT_RANGEMAP_ARE_SORTED
263     assert(IsSorted());
264 #endif
265     if (m_entries.empty())
266       return fail_value;
267     // m_entries must be sorted, so if we aren't empty, we grab the first
268     // range's base
269     return m_entries.front().GetRangeBase();
270   }
271 
272   BaseType GetMaxRangeEnd(BaseType fail_value) const {
273 #ifdef ASSERT_RANGEMAP_ARE_SORTED
274     assert(IsSorted());
275 #endif
276     if (m_entries.empty())
277       return fail_value;
278     // m_entries must be sorted, so if we aren't empty, we grab the last
279     // range's end
280     return m_entries.back().GetRangeEnd();
281   }
282 
283   void Slide(BaseType slide) {
284     typename Collection::iterator pos, end;
285     for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
286       pos->Slide(slide);
287   }
288 
289   void Clear() { m_entries.clear(); }
290 
291   void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
292 
293   bool IsEmpty() const { return m_entries.empty(); }
294 
295   size_t GetSize() const { return m_entries.size(); }
296 
297   const Entry *GetEntryAtIndex(size_t i) const {
298     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
299   }
300 
301   // Clients must ensure that "i" is a valid index prior to calling this
302   // function
303   Entry &GetEntryRef(size_t i) { return m_entries[i]; }
304   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
305 
306   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
307 
308   const Entry *Back() const {
309     return (m_entries.empty() ? nullptr : &m_entries.back());
310   }
311 
312   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
313     return lhs.GetRangeBase() < rhs.GetRangeBase();
314   }
315 
316   uint32_t FindEntryIndexThatContains(B addr) const {
317 #ifdef ASSERT_RANGEMAP_ARE_SORTED
318     assert(IsSorted());
319 #endif
320     if (!m_entries.empty()) {
321       Entry entry(addr, 1);
322       typename Collection::const_iterator begin = m_entries.begin();
323       typename Collection::const_iterator end = m_entries.end();
324       typename Collection::const_iterator pos =
325           std::lower_bound(begin, end, entry, BaseLessThan);
326 
327       if (pos != end && pos->Contains(addr)) {
328         return std::distance(begin, pos);
329       } else if (pos != begin) {
330         --pos;
331         if (pos->Contains(addr))
332           return std::distance(begin, pos);
333       }
334     }
335     return UINT32_MAX;
336   }
337 
338   const Entry *FindEntryThatContains(B addr) const {
339 #ifdef ASSERT_RANGEMAP_ARE_SORTED
340     assert(IsSorted());
341 #endif
342     if (!m_entries.empty()) {
343       Entry entry(addr, 1);
344       typename Collection::const_iterator begin = m_entries.begin();
345       typename Collection::const_iterator end = m_entries.end();
346       typename Collection::const_iterator pos =
347           std::lower_bound(begin, end, entry, BaseLessThan);
348 
349       if (pos != end && pos->Contains(addr)) {
350         return &(*pos);
351       } else if (pos != begin) {
352         --pos;
353         if (pos->Contains(addr)) {
354           return &(*pos);
355         }
356       }
357     }
358     return nullptr;
359   }
360 
361   const Entry *FindEntryThatContains(const Entry &range) const {
362 #ifdef ASSERT_RANGEMAP_ARE_SORTED
363     assert(IsSorted());
364 #endif
365     if (!m_entries.empty()) {
366       typename Collection::const_iterator begin = m_entries.begin();
367       typename Collection::const_iterator end = m_entries.end();
368       typename Collection::const_iterator pos =
369           std::lower_bound(begin, end, range, BaseLessThan);
370 
371       if (pos != end && pos->Contains(range)) {
372         return &(*pos);
373       } else if (pos != begin) {
374         --pos;
375         if (pos->Contains(range)) {
376           return &(*pos);
377         }
378       }
379     }
380     return nullptr;
381   }
382 
383   using const_iterator = typename Collection::const_iterator;
384   const_iterator begin() const { return m_entries.begin(); }
385   const_iterator end() const { return m_entries.end(); }
386 
387 protected:
388   void CombinePrevAndNext(typename Collection::iterator pos) {
389     // Check if the prev or next entries in case they need to be unioned with
390     // the entry pointed to by "pos".
391     if (pos != m_entries.begin()) {
392       auto prev = pos - 1;
393       if (prev->Union(*pos))
394         m_entries.erase(pos);
395       pos = prev;
396     }
397 
398     auto end = m_entries.end();
399     if (pos != end) {
400       auto next = pos + 1;
401       if (next != end) {
402         if (pos->Union(*next))
403           m_entries.erase(next);
404       }
405     }
406   }
407 
408   Collection m_entries;
409 };
410 
411 // A simple range  with data class where you get to define the type of
412 // the range base "B", the type used for the range byte size "S", and the type
413 // for the associated data "T".
414 template <typename B, typename S, typename T>
415 struct RangeData : public Range<B, S> {
416   typedef T DataType;
417 
418   DataType data;
419 
420   RangeData() : Range<B, S>(), data() {}
421 
422   RangeData(B base, S size) : Range<B, S>(base, size), data() {}
423 
424   RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
425 };
426 
427 // We can treat the vector as a flattened Binary Search Tree, augmenting it
428 // with upper bounds (max of range endpoints) for every index allows us to
429 // query for range containment quicker.
430 template <typename B, typename S, typename T>
431 struct AugmentedRangeData : public RangeData<B, S, T> {
432   B upper_bound;
433 
434   AugmentedRangeData(const RangeData<B, S, T> &rd)
435       : RangeData<B, S, T>(rd), upper_bound() {}
436 };
437 
438 template <typename B, typename S, typename T, unsigned N = 0,
439           class Compare = std::less<T>>
440 class RangeDataVector {
441 public:
442   typedef lldb_private::Range<B, S> Range;
443   typedef RangeData<B, S, T> Entry;
444   typedef AugmentedRangeData<B, S, T> AugmentedEntry;
445   typedef llvm::SmallVector<AugmentedEntry, N> Collection;
446 
447   RangeDataVector(Compare compare = Compare()) : m_compare(compare) {}
448 
449   ~RangeDataVector() = default;
450 
451   void Append(const Entry &entry) { m_entries.emplace_back(entry); }
452 
453   /// Append a range with data to the vector
454   /// \param B The base of the memory range
455   /// \param S The size of the memory range
456   /// \param T The data associated with the memory range
457   void Append(B &&b, S &&s, T &&t) { m_entries.emplace_back(Entry(b, s, t)); }
458 
459   bool Erase(uint32_t start, uint32_t end) {
460     if (start >= end || end > m_entries.size())
461       return false;
462     m_entries.erase(begin() + start, begin() + end);
463     return true;
464   }
465 
466   void Sort() {
467     if (m_entries.size() > 1)
468       std::stable_sort(m_entries.begin(), m_entries.end(),
469                        [&compare = m_compare](const Entry &a, const Entry &b) {
470                          if (a.base != b.base)
471                            return a.base < b.base;
472                          if (a.size != b.size)
473                            return a.size < b.size;
474                          return compare(a.data, b.data);
475                        });
476     if (!m_entries.empty())
477       ComputeUpperBounds(0, m_entries.size());
478   }
479 
480 #ifdef ASSERT_RANGEMAP_ARE_SORTED
481   bool IsSorted() const {
482     typename Collection::const_iterator pos, end, prev;
483     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
484          prev = pos++) {
485       if (prev != end && *pos < *prev)
486         return false;
487     }
488     return true;
489   }
490 #endif
491 
492   void CombineConsecutiveEntriesWithEqualData() {
493 #ifdef ASSERT_RANGEMAP_ARE_SORTED
494     assert(IsSorted());
495 #endif
496     typename Collection::iterator pos;
497     typename Collection::iterator end;
498     typename Collection::iterator prev;
499     bool can_combine = false;
500     // First we determine if we can combine any of the Entry objects so we
501     // don't end up allocating and making a new collection for no reason
502     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
503          prev = pos++) {
504       if (prev != end && prev->data == pos->data) {
505         can_combine = true;
506         break;
507       }
508     }
509 
510     // We can combine at least one entry, then we make a new collection and
511     // populate it accordingly, and then swap it into place.
512     if (can_combine) {
513       Collection minimal_ranges;
514       for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
515            pos != end; prev = pos++) {
516         if (prev != end && prev->data == pos->data)
517           minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
518         else
519           minimal_ranges.push_back(*pos);
520       }
521       // Use the swap technique in case our new vector is much smaller. We must
522       // swap when using the STL because std::vector objects never release or
523       // reduce the memory once it has been allocated/reserved.
524       m_entries.swap(minimal_ranges);
525     }
526   }
527 
528   void Clear() { m_entries.clear(); }
529 
530   bool IsEmpty() const { return m_entries.empty(); }
531 
532   size_t GetSize() const { return m_entries.size(); }
533 
534   const Entry *GetEntryAtIndex(size_t i) const {
535     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
536   }
537 
538   Entry *GetMutableEntryAtIndex(size_t i) {
539     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
540   }
541 
542   // Clients must ensure that "i" is a valid index prior to calling this
543   // function
544   Entry &GetEntryRef(size_t i) { return m_entries[i]; }
545   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
546 
547   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
548     return lhs.GetRangeBase() < rhs.GetRangeBase();
549   }
550 
551   uint32_t FindEntryIndexThatContains(B addr) const {
552     const AugmentedEntry *entry =
553         static_cast<const AugmentedEntry *>(FindEntryThatContains(addr));
554     if (entry)
555       return std::distance(m_entries.begin(), entry);
556     return UINT32_MAX;
557   }
558 
559   uint32_t FindEntryIndexesThatContain(B addr, std::vector<uint32_t> &indexes) {
560 #ifdef ASSERT_RANGEMAP_ARE_SORTED
561     assert(IsSorted());
562 #endif
563     if (!m_entries.empty())
564       FindEntryIndexesThatContain(addr, 0, m_entries.size(), indexes);
565 
566     return indexes.size();
567   }
568 
569   Entry *FindEntryThatContains(B addr) {
570     return const_cast<Entry *>(
571         static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
572             addr));
573   }
574 
575   const Entry *FindEntryThatContains(B addr) const {
576     return FindEntryThatContains(Entry(addr, 1));
577   }
578 
579   const Entry *FindEntryThatContains(const Entry &range) const {
580 #ifdef ASSERT_RANGEMAP_ARE_SORTED
581     assert(IsSorted());
582 #endif
583     if (!m_entries.empty()) {
584       typename Collection::const_iterator begin = m_entries.begin();
585       typename Collection::const_iterator end = m_entries.end();
586       typename Collection::const_iterator pos =
587           std::lower_bound(begin, end, range, BaseLessThan);
588 
589       while (pos != begin && pos[-1].Contains(range))
590         --pos;
591 
592       if (pos != end && pos->Contains(range))
593         return &(*pos);
594     }
595     return nullptr;
596   }
597 
598   const Entry *FindEntryStartsAt(B addr) const {
599 #ifdef ASSERT_RANGEMAP_ARE_SORTED
600     assert(IsSorted());
601 #endif
602     if (!m_entries.empty()) {
603       auto begin = m_entries.begin(), end = m_entries.end();
604       auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
605       if (pos != end && pos->base == addr)
606         return &(*pos);
607     }
608     return nullptr;
609   }
610 
611   // This method will return the entry that contains the given address, or the
612   // entry following that address.  If you give it an address of 0 and the
613   // first entry starts at address 0x100, you will get the entry at 0x100.
614   //
615   // For most uses, FindEntryThatContains is the correct one to use, this is a
616   // less commonly needed behavior.  It was added for core file memory regions,
617   // where we want to present a gap in the memory regions as a distinct region,
618   // so we need to know the start address of the next memory section that
619   // exists.
620   const Entry *FindEntryThatContainsOrFollows(B addr) const {
621 #ifdef ASSERT_RANGEMAP_ARE_SORTED
622     assert(IsSorted());
623 #endif
624     if (!m_entries.empty()) {
625       typename Collection::const_iterator begin = m_entries.begin();
626       typename Collection::const_iterator end = m_entries.end();
627       typename Collection::const_iterator pos = llvm::lower_bound(
628           m_entries, addr, [](const Entry &lhs, B rhs_base) -> bool {
629             return lhs.GetRangeEnd() <= rhs_base;
630           });
631 
632       while (pos != begin && pos[-1].Contains(addr))
633         --pos;
634 
635       if (pos != end)
636         return &(*pos);
637     }
638     return nullptr;
639   }
640 
641   uint32_t FindEntryIndexThatContainsOrFollows(B addr) const {
642 #ifdef ASSERT_RANGEMAP_ARE_SORTED
643     assert(IsSorted());
644 #endif
645     const AugmentedEntry *entry = static_cast<const AugmentedEntry *>(
646         FindEntryThatContainsOrFollows(addr));
647     if (entry)
648       return std::distance(m_entries.begin(), entry);
649     return UINT32_MAX;
650   }
651 
652   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
653 
654   const Entry *Back() const {
655     return (m_entries.empty() ? nullptr : &m_entries.back());
656   }
657 
658   using const_iterator = typename Collection::const_iterator;
659   const_iterator begin() const { return m_entries.begin(); }
660   const_iterator end() const { return m_entries.end(); }
661 
662 protected:
663   Collection m_entries;
664   Compare m_compare;
665 
666 private:
667   // Compute extra information needed for search
668   B ComputeUpperBounds(size_t lo, size_t hi) {
669     size_t mid = (lo + hi) / 2;
670     AugmentedEntry &entry = m_entries[mid];
671 
672     entry.upper_bound = entry.base + entry.size;
673 
674     if (lo < mid)
675       entry.upper_bound =
676           std::max(entry.upper_bound, ComputeUpperBounds(lo, mid));
677 
678     if (mid + 1 < hi)
679       entry.upper_bound =
680           std::max(entry.upper_bound, ComputeUpperBounds(mid + 1, hi));
681 
682     return entry.upper_bound;
683   }
684 
685   // This is based on the augmented tree implementation found at
686   // https://en.wikipedia.org/wiki/Interval_tree#Augmented_tree
687   void FindEntryIndexesThatContain(B addr, size_t lo, size_t hi,
688                                    std::vector<uint32_t> &indexes) {
689     size_t mid = (lo + hi) / 2;
690     const AugmentedEntry &entry = m_entries[mid];
691 
692     // addr is greater than the rightmost point of any interval below mid
693     // so there are cannot be any matches.
694     if (addr > entry.upper_bound)
695       return;
696 
697     // Recursively search left subtree
698     if (lo < mid)
699       FindEntryIndexesThatContain(addr, lo, mid, indexes);
700 
701     // If addr is smaller than the start of the current interval it
702     // cannot contain it nor can any of its right subtree.
703     if (addr < entry.base)
704       return;
705 
706     if (entry.Contains(addr))
707       indexes.push_back(entry.data);
708 
709     // Recursively search right subtree
710     if (mid + 1 < hi)
711       FindEntryIndexesThatContain(addr, mid + 1, hi, indexes);
712   }
713 };
714 
715 // A simple range  with data class where you get to define the type of
716 // the range base "B", the type used for the range byte size "S", and the type
717 // for the associated data "T".
718 template <typename B, typename T> struct AddressData {
719   typedef B BaseType;
720   typedef T DataType;
721 
722   BaseType addr;
723   DataType data;
724 
725   AddressData() : addr(), data() {}
726 
727   AddressData(B a, DataType d) : addr(a), data(d) {}
728 
729   bool operator<(const AddressData &rhs) const {
730     if (this->addr == rhs.addr)
731       return this->data < rhs.data;
732     return this->addr < rhs.addr;
733   }
734 
735   bool operator==(const AddressData &rhs) const {
736     return this->addr == rhs.addr && this->data == rhs.data;
737   }
738 
739   bool operator!=(const AddressData &rhs) const {
740     return this->addr != rhs.addr || this->data == rhs.data;
741   }
742 };
743 
744 template <typename B, typename T, unsigned N> class AddressDataArray {
745 public:
746   typedef AddressData<B, T> Entry;
747   typedef llvm::SmallVector<Entry, N> Collection;
748 
749   AddressDataArray() = default;
750 
751   ~AddressDataArray() = default;
752 
753   void Append(const Entry &entry) { m_entries.push_back(entry); }
754 
755   void Sort() {
756     if (m_entries.size() > 1)
757       std::stable_sort(m_entries.begin(), m_entries.end());
758   }
759 
760 #ifdef ASSERT_RANGEMAP_ARE_SORTED
761   bool IsSorted() const {
762     typename Collection::const_iterator pos, end, prev;
763     // First we determine if we can combine any of the Entry objects so we
764     // don't end up allocating and making a new collection for no reason
765     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
766          prev = pos++) {
767       if (prev != end && *pos < *prev)
768         return false;
769     }
770     return true;
771   }
772 #endif
773 
774   void Clear() { m_entries.clear(); }
775 
776   bool IsEmpty() const { return m_entries.empty(); }
777 
778   size_t GetSize() const { return m_entries.size(); }
779 
780   const Entry *GetEntryAtIndex(size_t i) const {
781     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
782   }
783 
784   // Clients must ensure that "i" is a valid index prior to calling this
785   // function
786   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
787 
788   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
789     return lhs.addr < rhs.addr;
790   }
791 
792   Entry *FindEntry(B addr, bool exact_match_only) {
793 #ifdef ASSERT_RANGEMAP_ARE_SORTED
794     assert(IsSorted());
795 #endif
796     if (!m_entries.empty()) {
797       Entry entry;
798       entry.addr = addr;
799       typename Collection::iterator begin = m_entries.begin();
800       typename Collection::iterator end = m_entries.end();
801       typename Collection::iterator pos =
802           llvm::lower_bound(m_entries, entry, BaseLessThan);
803 
804       while (pos != begin && pos[-1].addr == addr)
805         --pos;
806 
807       if (pos != end) {
808         if (pos->addr == addr || !exact_match_only)
809           return &(*pos);
810       }
811     }
812     return nullptr;
813   }
814 
815   const Entry *FindNextEntry(const Entry *entry) {
816     if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
817       return entry + 1;
818     return nullptr;
819   }
820 
821   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
822 
823   const Entry *Back() const {
824     return (m_entries.empty() ? nullptr : &m_entries.back());
825   }
826 
827 protected:
828   Collection m_entries;
829 };
830 
831 } // namespace lldb_private
832 
833 #endif // LLDB_UTILITY_RANGEMAP_H
834