xref: /freebsd/contrib/llvm-project/lldb/include/lldb/Utility/RangeMap.h (revision 5f757f3ff9144b609b3c433dfd370cc6bdc191ad)
1 //===-- RangeMap.h ----------------------------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #ifndef LLDB_UTILITY_RANGEMAP_H
10 #define LLDB_UTILITY_RANGEMAP_H
11 
12 #include <algorithm>
13 #include <vector>
14 
15 #include "llvm/ADT/SmallVector.h"
16 
17 #include "lldb/lldb-private.h"
18 
19 // Uncomment to make sure all Range objects are sorted when needed
20 //#define ASSERT_RANGEMAP_ARE_SORTED
21 
22 namespace lldb_private {
23 
24 // Templatized classes for dealing with generic ranges and also collections of
25 // ranges, or collections of ranges that have associated data.
26 
27 // A simple range class where you get to define the type of the range
28 // base "B", and the type used for the range byte size "S".
29 template <typename B, typename S> struct Range {
30   typedef B BaseType;
31   typedef S SizeType;
32 
33   BaseType base;
34   SizeType size;
35 
36   Range() : base(0), size(0) {}
37 
38   Range(BaseType b, SizeType s) : base(b), size(s) {}
39 
40   void Clear(BaseType b = 0) {
41     base = b;
42     size = 0;
43   }
44 
45   BaseType GetRangeBase() const { return base; }
46 
47   /// Set the start value for the range, and keep the same size
48   void SetRangeBase(BaseType b) { base = b; }
49 
50   void Slide(BaseType slide) { base += slide; }
51 
52   void ShrinkFront(S s) {
53     base += s;
54     size -= std::min(s, size);
55   }
56 
57   bool Union(const Range &rhs) {
58     if (DoesAdjoinOrIntersect(rhs)) {
59       auto new_end = std::max<BaseType>(GetRangeEnd(), rhs.GetRangeEnd());
60       base = std::min<BaseType>(base, rhs.base);
61       size = new_end - base;
62       return true;
63     }
64     return false;
65   }
66 
67   Range Intersect(const Range &rhs) const {
68     const BaseType lhs_base = this->GetRangeBase();
69     const BaseType rhs_base = rhs.GetRangeBase();
70     const BaseType lhs_end = this->GetRangeEnd();
71     const BaseType rhs_end = rhs.GetRangeEnd();
72     Range range;
73     range.SetRangeBase(std::max(lhs_base, rhs_base));
74     range.SetRangeEnd(std::min(lhs_end, rhs_end));
75     return range;
76   }
77 
78   BaseType GetRangeEnd() const { return base + size; }
79 
80   void SetRangeEnd(BaseType end) {
81     if (end > base)
82       size = end - base;
83     else
84       size = 0;
85   }
86 
87   SizeType GetByteSize() const { return size; }
88 
89   void SetByteSize(SizeType s) { size = s; }
90 
91   bool IsValid() const { return size > 0; }
92 
93   bool Contains(BaseType r) const {
94     return (GetRangeBase() <= r) && (r < GetRangeEnd());
95   }
96 
97   bool ContainsEndInclusive(BaseType r) const {
98     return (GetRangeBase() <= r) && (r <= GetRangeEnd());
99   }
100 
101   bool Contains(const Range &range) const {
102     return Contains(range.GetRangeBase()) &&
103            ContainsEndInclusive(range.GetRangeEnd());
104   }
105 
106   // Returns true if the two ranges adjoing or intersect
107   bool DoesAdjoinOrIntersect(const Range &rhs) const {
108     const BaseType lhs_base = this->GetRangeBase();
109     const BaseType rhs_base = rhs.GetRangeBase();
110     const BaseType lhs_end = this->GetRangeEnd();
111     const BaseType rhs_end = rhs.GetRangeEnd();
112     bool result = (lhs_base <= rhs_end) && (lhs_end >= rhs_base);
113     return result;
114   }
115 
116   // Returns true if the two ranges intersect
117   bool DoesIntersect(const Range &rhs) const {
118     return Intersect(rhs).IsValid();
119   }
120 
121   bool operator<(const Range &rhs) const {
122     if (base == rhs.base)
123       return size < rhs.size;
124     return base < rhs.base;
125   }
126 
127   bool operator==(const Range &rhs) const {
128     return base == rhs.base && size == rhs.size;
129   }
130 
131   bool operator!=(const Range &rhs) const {
132     return base != rhs.base || size != rhs.size;
133   }
134 };
135 
136 template <typename B, typename S, unsigned N = 0> class RangeVector {
137 public:
138   typedef B BaseType;
139   typedef S SizeType;
140   typedef Range<B, S> Entry;
141   typedef llvm::SmallVector<Entry, N> Collection;
142 
143   RangeVector() = default;
144 
145   ~RangeVector() = default;
146 
147   static RangeVector GetOverlaps(const RangeVector &vec1,
148                                  const RangeVector &vec2) {
149 #ifdef ASSERT_RANGEMAP_ARE_SORTED
150     assert(vec1.IsSorted() && vec2.IsSorted());
151 #endif
152     RangeVector result;
153     auto pos1 = vec1.begin();
154     auto end1 = vec1.end();
155     auto pos2 = vec2.begin();
156     auto end2 = vec2.end();
157     while (pos1 != end1 && pos2 != end2) {
158       Entry entry = pos1->Intersect(*pos2);
159       if (entry.IsValid())
160         result.Append(entry);
161       if (pos1->GetRangeEnd() < pos2->GetRangeEnd())
162         ++pos1;
163       else
164         ++pos2;
165     }
166     return result;
167   }
168 
169   bool operator==(const RangeVector &rhs) const {
170     if (GetSize() != rhs.GetSize())
171       return false;
172     for (size_t i = 0; i < GetSize(); ++i) {
173       if (GetEntryRef(i) != rhs.GetEntryRef(i))
174         return false;
175     }
176     return true;
177   }
178 
179   void Append(const Entry &entry) { m_entries.push_back(entry); }
180 
181   void Append(B base, S size) { m_entries.emplace_back(base, size); }
182 
183   // Insert an item into a sorted list and optionally combine it with any
184   // adjacent blocks if requested.
185   void Insert(const Entry &entry, bool combine) {
186     if (m_entries.empty()) {
187       m_entries.push_back(entry);
188       return;
189     }
190     auto begin = m_entries.begin();
191     auto end = m_entries.end();
192     auto pos = std::lower_bound(begin, end, entry);
193     if (combine) {
194       if (pos != end && pos->Union(entry)) {
195         CombinePrevAndNext(pos);
196         return;
197       }
198       if (pos != begin) {
199         auto prev = pos - 1;
200         if (prev->Union(entry)) {
201           CombinePrevAndNext(prev);
202           return;
203         }
204       }
205     }
206     m_entries.insert(pos, entry);
207   }
208 
209   bool RemoveEntryAtIndex(uint32_t idx) {
210     if (idx < m_entries.size()) {
211       m_entries.erase(m_entries.begin() + idx);
212       return true;
213     }
214     return false;
215   }
216 
217   void Sort() {
218     if (m_entries.size() > 1)
219       std::stable_sort(m_entries.begin(), m_entries.end());
220   }
221 
222 #ifdef ASSERT_RANGEMAP_ARE_SORTED
223   bool IsSorted() const {
224     typename Collection::const_iterator pos, end, prev;
225     // First we determine if we can combine any of the Entry objects so we
226     // don't end up allocating and making a new collection for no reason
227     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
228          prev = pos++) {
229       if (prev != end && *pos < *prev)
230         return false;
231     }
232     return true;
233   }
234 #endif
235 
236   void CombineConsecutiveRanges() {
237 #ifdef ASSERT_RANGEMAP_ARE_SORTED
238     assert(IsSorted());
239 #endif
240     auto first_intersect = std::adjacent_find(
241         m_entries.begin(), m_entries.end(), [](const Entry &a, const Entry &b) {
242           return a.DoesAdjoinOrIntersect(b);
243         });
244     if (first_intersect == m_entries.end())
245       return;
246 
247     // We can combine at least one entry, then we make a new collection and
248     // populate it accordingly, and then swap it into place.
249     auto pos = std::next(first_intersect);
250     Collection minimal_ranges(m_entries.begin(), pos);
251     for (; pos != m_entries.end(); ++pos) {
252       Entry &back = minimal_ranges.back();
253       if (back.DoesAdjoinOrIntersect(*pos))
254         back.SetRangeEnd(std::max(back.GetRangeEnd(), pos->GetRangeEnd()));
255       else
256         minimal_ranges.push_back(*pos);
257     }
258     m_entries.swap(minimal_ranges);
259   }
260 
261   BaseType GetMinRangeBase(BaseType fail_value) const {
262 #ifdef ASSERT_RANGEMAP_ARE_SORTED
263     assert(IsSorted());
264 #endif
265     if (m_entries.empty())
266       return fail_value;
267     // m_entries must be sorted, so if we aren't empty, we grab the first
268     // range's base
269     return m_entries.front().GetRangeBase();
270   }
271 
272   BaseType GetMaxRangeEnd(BaseType fail_value) const {
273 #ifdef ASSERT_RANGEMAP_ARE_SORTED
274     assert(IsSorted());
275 #endif
276     if (m_entries.empty())
277       return fail_value;
278     // m_entries must be sorted, so if we aren't empty, we grab the last
279     // range's end
280     return m_entries.back().GetRangeEnd();
281   }
282 
283   void Slide(BaseType slide) {
284     typename Collection::iterator pos, end;
285     for (pos = m_entries.begin(), end = m_entries.end(); pos != end; ++pos)
286       pos->Slide(slide);
287   }
288 
289   void Clear() { m_entries.clear(); }
290 
291   void Reserve(typename Collection::size_type size) { m_entries.reserve(size); }
292 
293   bool IsEmpty() const { return m_entries.empty(); }
294 
295   size_t GetSize() const { return m_entries.size(); }
296 
297   const Entry *GetEntryAtIndex(size_t i) const {
298     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
299   }
300 
301   // Clients must ensure that "i" is a valid index prior to calling this
302   // function
303   Entry &GetEntryRef(size_t i) { return m_entries[i]; }
304   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
305 
306   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
307 
308   const Entry *Back() const {
309     return (m_entries.empty() ? nullptr : &m_entries.back());
310   }
311 
312   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
313     return lhs.GetRangeBase() < rhs.GetRangeBase();
314   }
315 
316   uint32_t FindEntryIndexThatContains(B addr) const {
317 #ifdef ASSERT_RANGEMAP_ARE_SORTED
318     assert(IsSorted());
319 #endif
320     if (!m_entries.empty()) {
321       Entry entry(addr, 1);
322       typename Collection::const_iterator begin = m_entries.begin();
323       typename Collection::const_iterator end = m_entries.end();
324       typename Collection::const_iterator pos =
325           std::lower_bound(begin, end, entry, BaseLessThan);
326 
327       if (pos != end && pos->Contains(addr)) {
328         return std::distance(begin, pos);
329       } else if (pos != begin) {
330         --pos;
331         if (pos->Contains(addr))
332           return std::distance(begin, pos);
333       }
334     }
335     return UINT32_MAX;
336   }
337 
338   const Entry *FindEntryThatContains(B addr) const {
339 #ifdef ASSERT_RANGEMAP_ARE_SORTED
340     assert(IsSorted());
341 #endif
342     if (!m_entries.empty()) {
343       Entry entry(addr, 1);
344       typename Collection::const_iterator begin = m_entries.begin();
345       typename Collection::const_iterator end = m_entries.end();
346       typename Collection::const_iterator pos =
347           std::lower_bound(begin, end, entry, BaseLessThan);
348 
349       if (pos != end && pos->Contains(addr)) {
350         return &(*pos);
351       } else if (pos != begin) {
352         --pos;
353         if (pos->Contains(addr)) {
354           return &(*pos);
355         }
356       }
357     }
358     return nullptr;
359   }
360 
361   const Entry *FindEntryThatContains(const Entry &range) const {
362 #ifdef ASSERT_RANGEMAP_ARE_SORTED
363     assert(IsSorted());
364 #endif
365     if (!m_entries.empty()) {
366       typename Collection::const_iterator begin = m_entries.begin();
367       typename Collection::const_iterator end = m_entries.end();
368       typename Collection::const_iterator pos =
369           std::lower_bound(begin, end, range, BaseLessThan);
370 
371       if (pos != end && pos->Contains(range)) {
372         return &(*pos);
373       } else if (pos != begin) {
374         --pos;
375         if (pos->Contains(range)) {
376           return &(*pos);
377         }
378       }
379     }
380     return nullptr;
381   }
382 
383   using const_iterator = typename Collection::const_iterator;
384   const_iterator begin() const { return m_entries.begin(); }
385   const_iterator end() const { return m_entries.end(); }
386 
387 protected:
388   void CombinePrevAndNext(typename Collection::iterator pos) {
389     // Check if the prev or next entries in case they need to be unioned with
390     // the entry pointed to by "pos".
391     if (pos != m_entries.begin()) {
392       auto prev = pos - 1;
393       if (prev->Union(*pos))
394         m_entries.erase(pos);
395       pos = prev;
396     }
397 
398     auto end = m_entries.end();
399     if (pos != end) {
400       auto next = pos + 1;
401       if (next != end) {
402         if (pos->Union(*next))
403           m_entries.erase(next);
404       }
405     }
406   }
407 
408   Collection m_entries;
409 };
410 
411 // A simple range  with data class where you get to define the type of
412 // the range base "B", the type used for the range byte size "S", and the type
413 // for the associated data "T".
414 template <typename B, typename S, typename T>
415 struct RangeData : public Range<B, S> {
416   typedef T DataType;
417 
418   DataType data;
419 
420   RangeData() : Range<B, S>(), data() {}
421 
422   RangeData(B base, S size) : Range<B, S>(base, size), data() {}
423 
424   RangeData(B base, S size, DataType d) : Range<B, S>(base, size), data(d) {}
425 };
426 
427 // We can treat the vector as a flattened Binary Search Tree, augmenting it
428 // with upper bounds (max of range endpoints) for every index allows us to
429 // query for range containment quicker.
430 template <typename B, typename S, typename T>
431 struct AugmentedRangeData : public RangeData<B, S, T> {
432   B upper_bound;
433 
434   AugmentedRangeData(const RangeData<B, S, T> &rd)
435       : RangeData<B, S, T>(rd), upper_bound() {}
436 };
437 
438 template <typename B, typename S, typename T, unsigned N = 0,
439           class Compare = std::less<T>>
440 class RangeDataVector {
441 public:
442   typedef lldb_private::Range<B, S> Range;
443   typedef RangeData<B, S, T> Entry;
444   typedef AugmentedRangeData<B, S, T> AugmentedEntry;
445   typedef llvm::SmallVector<AugmentedEntry, N> Collection;
446 
447   RangeDataVector(Compare compare = Compare()) : m_compare(compare) {}
448 
449   ~RangeDataVector() = default;
450 
451   void Append(const Entry &entry) { m_entries.emplace_back(entry); }
452 
453   bool Erase(uint32_t start, uint32_t end) {
454     if (start >= end || end > m_entries.size())
455       return false;
456     m_entries.erase(begin() + start, begin() + end);
457     return true;
458   }
459 
460   void Sort() {
461     if (m_entries.size() > 1)
462       std::stable_sort(m_entries.begin(), m_entries.end(),
463                        [&compare = m_compare](const Entry &a, const Entry &b) {
464                          if (a.base != b.base)
465                            return a.base < b.base;
466                          if (a.size != b.size)
467                            return a.size < b.size;
468                          return compare(a.data, b.data);
469                        });
470     if (!m_entries.empty())
471       ComputeUpperBounds(0, m_entries.size());
472   }
473 
474 #ifdef ASSERT_RANGEMAP_ARE_SORTED
475   bool IsSorted() const {
476     typename Collection::const_iterator pos, end, prev;
477     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
478          prev = pos++) {
479       if (prev != end && *pos < *prev)
480         return false;
481     }
482     return true;
483   }
484 #endif
485 
486   void CombineConsecutiveEntriesWithEqualData() {
487 #ifdef ASSERT_RANGEMAP_ARE_SORTED
488     assert(IsSorted());
489 #endif
490     typename Collection::iterator pos;
491     typename Collection::iterator end;
492     typename Collection::iterator prev;
493     bool can_combine = false;
494     // First we determine if we can combine any of the Entry objects so we
495     // don't end up allocating and making a new collection for no reason
496     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
497          prev = pos++) {
498       if (prev != end && prev->data == pos->data) {
499         can_combine = true;
500         break;
501       }
502     }
503 
504     // We can combine at least one entry, then we make a new collection and
505     // populate it accordingly, and then swap it into place.
506     if (can_combine) {
507       Collection minimal_ranges;
508       for (pos = m_entries.begin(), end = m_entries.end(), prev = end;
509            pos != end; prev = pos++) {
510         if (prev != end && prev->data == pos->data)
511           minimal_ranges.back().SetRangeEnd(pos->GetRangeEnd());
512         else
513           minimal_ranges.push_back(*pos);
514       }
515       // Use the swap technique in case our new vector is much smaller. We must
516       // swap when using the STL because std::vector objects never release or
517       // reduce the memory once it has been allocated/reserved.
518       m_entries.swap(minimal_ranges);
519     }
520   }
521 
522   void Clear() { m_entries.clear(); }
523 
524   bool IsEmpty() const { return m_entries.empty(); }
525 
526   size_t GetSize() const { return m_entries.size(); }
527 
528   const Entry *GetEntryAtIndex(size_t i) const {
529     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
530   }
531 
532   Entry *GetMutableEntryAtIndex(size_t i) {
533     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
534   }
535 
536   // Clients must ensure that "i" is a valid index prior to calling this
537   // function
538   Entry &GetEntryRef(size_t i) { return m_entries[i]; }
539   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
540 
541   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
542     return lhs.GetRangeBase() < rhs.GetRangeBase();
543   }
544 
545   uint32_t FindEntryIndexThatContains(B addr) const {
546     const AugmentedEntry *entry =
547         static_cast<const AugmentedEntry *>(FindEntryThatContains(addr));
548     if (entry)
549       return std::distance(m_entries.begin(), entry);
550     return UINT32_MAX;
551   }
552 
553   uint32_t FindEntryIndexesThatContain(B addr, std::vector<uint32_t> &indexes) {
554 #ifdef ASSERT_RANGEMAP_ARE_SORTED
555     assert(IsSorted());
556 #endif
557     if (!m_entries.empty())
558       FindEntryIndexesThatContain(addr, 0, m_entries.size(), indexes);
559 
560     return indexes.size();
561   }
562 
563   Entry *FindEntryThatContains(B addr) {
564     return const_cast<Entry *>(
565         static_cast<const RangeDataVector *>(this)->FindEntryThatContains(
566             addr));
567   }
568 
569   const Entry *FindEntryThatContains(B addr) const {
570     return FindEntryThatContains(Entry(addr, 1));
571   }
572 
573   const Entry *FindEntryThatContains(const Entry &range) const {
574 #ifdef ASSERT_RANGEMAP_ARE_SORTED
575     assert(IsSorted());
576 #endif
577     if (!m_entries.empty()) {
578       typename Collection::const_iterator begin = m_entries.begin();
579       typename Collection::const_iterator end = m_entries.end();
580       typename Collection::const_iterator pos =
581           std::lower_bound(begin, end, range, BaseLessThan);
582 
583       while (pos != begin && pos[-1].Contains(range))
584         --pos;
585 
586       if (pos != end && pos->Contains(range))
587         return &(*pos);
588     }
589     return nullptr;
590   }
591 
592   const Entry *FindEntryStartsAt(B addr) const {
593 #ifdef ASSERT_RANGEMAP_ARE_SORTED
594     assert(IsSorted());
595 #endif
596     if (!m_entries.empty()) {
597       auto begin = m_entries.begin(), end = m_entries.end();
598       auto pos = std::lower_bound(begin, end, Entry(addr, 1), BaseLessThan);
599       if (pos != end && pos->base == addr)
600         return &(*pos);
601     }
602     return nullptr;
603   }
604 
605   // This method will return the entry that contains the given address, or the
606   // entry following that address.  If you give it an address of 0 and the
607   // first entry starts at address 0x100, you will get the entry at 0x100.
608   //
609   // For most uses, FindEntryThatContains is the correct one to use, this is a
610   // less commonly needed behavior.  It was added for core file memory regions,
611   // where we want to present a gap in the memory regions as a distinct region,
612   // so we need to know the start address of the next memory section that
613   // exists.
614   const Entry *FindEntryThatContainsOrFollows(B addr) const {
615 #ifdef ASSERT_RANGEMAP_ARE_SORTED
616     assert(IsSorted());
617 #endif
618     if (!m_entries.empty()) {
619       typename Collection::const_iterator begin = m_entries.begin();
620       typename Collection::const_iterator end = m_entries.end();
621       typename Collection::const_iterator pos = llvm::lower_bound(
622           m_entries, addr, [](const Entry &lhs, B rhs_base) -> bool {
623             return lhs.GetRangeEnd() <= rhs_base;
624           });
625 
626       while (pos != begin && pos[-1].Contains(addr))
627         --pos;
628 
629       if (pos != end)
630         return &(*pos);
631     }
632     return nullptr;
633   }
634 
635   uint32_t FindEntryIndexThatContainsOrFollows(B addr) const {
636 #ifdef ASSERT_RANGEMAP_ARE_SORTED
637     assert(IsSorted());
638 #endif
639     const AugmentedEntry *entry = static_cast<const AugmentedEntry *>(
640         FindEntryThatContainsOrFollows(addr));
641     if (entry)
642       return std::distance(m_entries.begin(), entry);
643     return UINT32_MAX;
644   }
645 
646   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
647 
648   const Entry *Back() const {
649     return (m_entries.empty() ? nullptr : &m_entries.back());
650   }
651 
652   using const_iterator = typename Collection::const_iterator;
653   const_iterator begin() const { return m_entries.begin(); }
654   const_iterator end() const { return m_entries.end(); }
655 
656 protected:
657   Collection m_entries;
658   Compare m_compare;
659 
660 private:
661   // Compute extra information needed for search
662   B ComputeUpperBounds(size_t lo, size_t hi) {
663     size_t mid = (lo + hi) / 2;
664     AugmentedEntry &entry = m_entries[mid];
665 
666     entry.upper_bound = entry.base + entry.size;
667 
668     if (lo < mid)
669       entry.upper_bound =
670           std::max(entry.upper_bound, ComputeUpperBounds(lo, mid));
671 
672     if (mid + 1 < hi)
673       entry.upper_bound =
674           std::max(entry.upper_bound, ComputeUpperBounds(mid + 1, hi));
675 
676     return entry.upper_bound;
677   }
678 
679   // This is based on the augmented tree implementation found at
680   // https://en.wikipedia.org/wiki/Interval_tree#Augmented_tree
681   void FindEntryIndexesThatContain(B addr, size_t lo, size_t hi,
682                                    std::vector<uint32_t> &indexes) {
683     size_t mid = (lo + hi) / 2;
684     const AugmentedEntry &entry = m_entries[mid];
685 
686     // addr is greater than the rightmost point of any interval below mid
687     // so there are cannot be any matches.
688     if (addr > entry.upper_bound)
689       return;
690 
691     // Recursively search left subtree
692     if (lo < mid)
693       FindEntryIndexesThatContain(addr, lo, mid, indexes);
694 
695     // If addr is smaller than the start of the current interval it
696     // cannot contain it nor can any of its right subtree.
697     if (addr < entry.base)
698       return;
699 
700     if (entry.Contains(addr))
701       indexes.push_back(entry.data);
702 
703     // Recursively search right subtree
704     if (mid + 1 < hi)
705       FindEntryIndexesThatContain(addr, mid + 1, hi, indexes);
706   }
707 };
708 
709 // A simple range  with data class where you get to define the type of
710 // the range base "B", the type used for the range byte size "S", and the type
711 // for the associated data "T".
712 template <typename B, typename T> struct AddressData {
713   typedef B BaseType;
714   typedef T DataType;
715 
716   BaseType addr;
717   DataType data;
718 
719   AddressData() : addr(), data() {}
720 
721   AddressData(B a, DataType d) : addr(a), data(d) {}
722 
723   bool operator<(const AddressData &rhs) const {
724     if (this->addr == rhs.addr)
725       return this->data < rhs.data;
726     return this->addr < rhs.addr;
727   }
728 
729   bool operator==(const AddressData &rhs) const {
730     return this->addr == rhs.addr && this->data == rhs.data;
731   }
732 
733   bool operator!=(const AddressData &rhs) const {
734     return this->addr != rhs.addr || this->data == rhs.data;
735   }
736 };
737 
738 template <typename B, typename T, unsigned N> class AddressDataArray {
739 public:
740   typedef AddressData<B, T> Entry;
741   typedef llvm::SmallVector<Entry, N> Collection;
742 
743   AddressDataArray() = default;
744 
745   ~AddressDataArray() = default;
746 
747   void Append(const Entry &entry) { m_entries.push_back(entry); }
748 
749   void Sort() {
750     if (m_entries.size() > 1)
751       std::stable_sort(m_entries.begin(), m_entries.end());
752   }
753 
754 #ifdef ASSERT_RANGEMAP_ARE_SORTED
755   bool IsSorted() const {
756     typename Collection::const_iterator pos, end, prev;
757     // First we determine if we can combine any of the Entry objects so we
758     // don't end up allocating and making a new collection for no reason
759     for (pos = m_entries.begin(), end = m_entries.end(), prev = end; pos != end;
760          prev = pos++) {
761       if (prev != end && *pos < *prev)
762         return false;
763     }
764     return true;
765   }
766 #endif
767 
768   void Clear() { m_entries.clear(); }
769 
770   bool IsEmpty() const { return m_entries.empty(); }
771 
772   size_t GetSize() const { return m_entries.size(); }
773 
774   const Entry *GetEntryAtIndex(size_t i) const {
775     return ((i < m_entries.size()) ? &m_entries[i] : nullptr);
776   }
777 
778   // Clients must ensure that "i" is a valid index prior to calling this
779   // function
780   const Entry &GetEntryRef(size_t i) const { return m_entries[i]; }
781 
782   static bool BaseLessThan(const Entry &lhs, const Entry &rhs) {
783     return lhs.addr < rhs.addr;
784   }
785 
786   Entry *FindEntry(B addr, bool exact_match_only) {
787 #ifdef ASSERT_RANGEMAP_ARE_SORTED
788     assert(IsSorted());
789 #endif
790     if (!m_entries.empty()) {
791       Entry entry;
792       entry.addr = addr;
793       typename Collection::iterator begin = m_entries.begin();
794       typename Collection::iterator end = m_entries.end();
795       typename Collection::iterator pos =
796           llvm::lower_bound(m_entries, entry, BaseLessThan);
797 
798       while (pos != begin && pos[-1].addr == addr)
799         --pos;
800 
801       if (pos != end) {
802         if (pos->addr == addr || !exact_match_only)
803           return &(*pos);
804       }
805     }
806     return nullptr;
807   }
808 
809   const Entry *FindNextEntry(const Entry *entry) {
810     if (entry >= &*m_entries.begin() && entry + 1 < &*m_entries.end())
811       return entry + 1;
812     return nullptr;
813   }
814 
815   Entry *Back() { return (m_entries.empty() ? nullptr : &m_entries.back()); }
816 
817   const Entry *Back() const {
818     return (m_entries.empty() ? nullptr : &m_entries.back());
819   }
820 
821 protected:
822   Collection m_entries;
823 };
824 
825 } // namespace lldb_private
826 
827 #endif // LLDB_UTILITY_RANGEMAP_H
828