xref: /freebsd/contrib/llvm-project/llvm/lib/Support/SmallPtrSet.cpp (revision 5916ae1fb14a28c69818f16ed2903248e08d50b3)
1  //===- llvm/ADT/SmallPtrSet.cpp - 'Normally small' pointer set ------------===//
2  //
3  // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4  // See https://llvm.org/LICENSE.txt for license information.
5  // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6  //
7  //===----------------------------------------------------------------------===//
8  //
9  // This file implements the SmallPtrSet class.  See SmallPtrSet.h for an
10  // overview of the algorithm.
11  //
12  //===----------------------------------------------------------------------===//
13  
14  #include "llvm/ADT/SmallPtrSet.h"
15  #include "llvm/ADT/DenseMapInfo.h"
16  #include "llvm/Support/ErrorHandling.h"
17  #include "llvm/Support/MathExtras.h"
18  #include "llvm/Support/MemAlloc.h"
19  #include <algorithm>
20  #include <cassert>
21  #include <cstdlib>
22  
23  using namespace llvm;
24  
25  void SmallPtrSetImplBase::shrink_and_clear() {
26    assert(!isSmall() && "Can't shrink a small set!");
27    free(CurArray);
28  
29    // Reduce the number of buckets.
30    unsigned Size = size();
31    CurArraySize = Size > 16 ? 1 << (Log2_32_Ceil(Size) + 1) : 32;
32    NumNonEmpty = NumTombstones = 0;
33  
34    // Install the new array.  Clear all the buckets to empty.
35    CurArray = (const void**)safe_malloc(sizeof(void*) * CurArraySize);
36  
37    memset(CurArray, -1, CurArraySize*sizeof(void*));
38  }
39  
40  std::pair<const void *const *, bool>
41  SmallPtrSetImplBase::insert_imp_big(const void *Ptr) {
42    if (LLVM_UNLIKELY(size() * 4 >= CurArraySize * 3)) {
43      // If more than 3/4 of the array is full, grow.
44      Grow(CurArraySize < 64 ? 128 : CurArraySize * 2);
45    } else if (LLVM_UNLIKELY(CurArraySize - NumNonEmpty < CurArraySize / 8)) {
46      // If fewer of 1/8 of the array is empty (meaning that many are filled with
47      // tombstones), rehash.
48      Grow(CurArraySize);
49    }
50  
51    // Okay, we know we have space.  Find a hash bucket.
52    const void **Bucket = const_cast<const void**>(FindBucketFor(Ptr));
53    if (*Bucket == Ptr)
54      return std::make_pair(Bucket, false); // Already inserted, good.
55  
56    // Otherwise, insert it!
57    if (*Bucket == getTombstoneMarker())
58      --NumTombstones;
59    else
60      ++NumNonEmpty; // Track density.
61    *Bucket = Ptr;
62    incrementEpoch();
63    return std::make_pair(Bucket, true);
64  }
65  
66  const void * const *SmallPtrSetImplBase::FindBucketFor(const void *Ptr) const {
67    unsigned Bucket = DenseMapInfo<void *>::getHashValue(Ptr) & (CurArraySize-1);
68    unsigned ArraySize = CurArraySize;
69    unsigned ProbeAmt = 1;
70    const void *const *Array = CurArray;
71    const void *const *Tombstone = nullptr;
72    while (true) {
73      // If we found an empty bucket, the pointer doesn't exist in the set.
74      // Return a tombstone if we've seen one so far, or the empty bucket if
75      // not.
76      if (LLVM_LIKELY(Array[Bucket] == getEmptyMarker()))
77        return Tombstone ? Tombstone : Array+Bucket;
78  
79      // Found Ptr's bucket?
80      if (LLVM_LIKELY(Array[Bucket] == Ptr))
81        return Array+Bucket;
82  
83      // If this is a tombstone, remember it.  If Ptr ends up not in the set, we
84      // prefer to return it than something that would require more probing.
85      if (Array[Bucket] == getTombstoneMarker() && !Tombstone)
86        Tombstone = Array+Bucket;  // Remember the first tombstone found.
87  
88      // It's a hash collision or a tombstone. Reprobe.
89      Bucket = (Bucket + ProbeAmt++) & (ArraySize-1);
90    }
91  }
92  
93  /// Grow - Allocate a larger backing store for the buckets and move it over.
94  ///
95  void SmallPtrSetImplBase::Grow(unsigned NewSize) {
96    const void **OldBuckets = CurArray;
97    const void **OldEnd = EndPointer();
98    bool WasSmall = isSmall();
99  
100    // Install the new array.  Clear all the buckets to empty.
101    const void **NewBuckets = (const void**) safe_malloc(sizeof(void*) * NewSize);
102  
103    // Reset member only if memory was allocated successfully
104    CurArray = NewBuckets;
105    CurArraySize = NewSize;
106    memset(CurArray, -1, NewSize*sizeof(void*));
107  
108    // Copy over all valid entries.
109    for (const void **BucketPtr = OldBuckets; BucketPtr != OldEnd; ++BucketPtr) {
110      // Copy over the element if it is valid.
111      const void *Elt = *BucketPtr;
112      if (Elt != getTombstoneMarker() && Elt != getEmptyMarker())
113        *const_cast<void**>(FindBucketFor(Elt)) = const_cast<void*>(Elt);
114    }
115  
116    if (!WasSmall)
117      free(OldBuckets);
118    NumNonEmpty -= NumTombstones;
119    NumTombstones = 0;
120  }
121  
122  SmallPtrSetImplBase::SmallPtrSetImplBase(const void **SmallStorage,
123                                           const SmallPtrSetImplBase &that) {
124    SmallArray = SmallStorage;
125  
126    // If we're becoming small, prepare to insert into our stack space
127    if (that.isSmall()) {
128      CurArray = SmallArray;
129    // Otherwise, allocate new heap space (unless we were the same size)
130    } else {
131      CurArray = (const void**)safe_malloc(sizeof(void*) * that.CurArraySize);
132    }
133  
134    // Copy over the that array.
135    CopyHelper(that);
136  }
137  
138  SmallPtrSetImplBase::SmallPtrSetImplBase(const void **SmallStorage,
139                                           unsigned SmallSize,
140                                           SmallPtrSetImplBase &&that) {
141    SmallArray = SmallStorage;
142    MoveHelper(SmallSize, std::move(that));
143  }
144  
145  void SmallPtrSetImplBase::CopyFrom(const SmallPtrSetImplBase &RHS) {
146    assert(&RHS != this && "Self-copy should be handled by the caller.");
147  
148    if (isSmall() && RHS.isSmall())
149      assert(CurArraySize == RHS.CurArraySize &&
150             "Cannot assign sets with different small sizes");
151  
152    // If we're becoming small, prepare to insert into our stack space
153    if (RHS.isSmall()) {
154      if (!isSmall())
155        free(CurArray);
156      CurArray = SmallArray;
157    // Otherwise, allocate new heap space (unless we were the same size)
158    } else if (CurArraySize != RHS.CurArraySize) {
159      if (isSmall())
160        CurArray = (const void**)safe_malloc(sizeof(void*) * RHS.CurArraySize);
161      else {
162        const void **T = (const void**)safe_realloc(CurArray,
163                                               sizeof(void*) * RHS.CurArraySize);
164        CurArray = T;
165      }
166    }
167  
168    CopyHelper(RHS);
169  }
170  
171  void SmallPtrSetImplBase::CopyHelper(const SmallPtrSetImplBase &RHS) {
172    // Copy over the new array size
173    CurArraySize = RHS.CurArraySize;
174  
175    // Copy over the contents from the other set
176    std::copy(RHS.CurArray, RHS.EndPointer(), CurArray);
177  
178    NumNonEmpty = RHS.NumNonEmpty;
179    NumTombstones = RHS.NumTombstones;
180  }
181  
182  void SmallPtrSetImplBase::MoveFrom(unsigned SmallSize,
183                                     SmallPtrSetImplBase &&RHS) {
184    if (!isSmall())
185      free(CurArray);
186    MoveHelper(SmallSize, std::move(RHS));
187  }
188  
189  void SmallPtrSetImplBase::MoveHelper(unsigned SmallSize,
190                                       SmallPtrSetImplBase &&RHS) {
191    assert(&RHS != this && "Self-move should be handled by the caller.");
192  
193    if (RHS.isSmall()) {
194      // Copy a small RHS rather than moving.
195      CurArray = SmallArray;
196      std::copy(RHS.CurArray, RHS.CurArray + RHS.NumNonEmpty, CurArray);
197    } else {
198      CurArray = RHS.CurArray;
199      RHS.CurArray = RHS.SmallArray;
200    }
201  
202    // Copy the rest of the trivial members.
203    CurArraySize = RHS.CurArraySize;
204    NumNonEmpty = RHS.NumNonEmpty;
205    NumTombstones = RHS.NumTombstones;
206  
207    // Make the RHS small and empty.
208    RHS.CurArraySize = SmallSize;
209    assert(RHS.CurArray == RHS.SmallArray);
210    RHS.NumNonEmpty = 0;
211    RHS.NumTombstones = 0;
212  }
213  
214  void SmallPtrSetImplBase::swap(SmallPtrSetImplBase &RHS) {
215    if (this == &RHS) return;
216  
217    // We can only avoid copying elements if neither set is small.
218    if (!this->isSmall() && !RHS.isSmall()) {
219      std::swap(this->CurArray, RHS.CurArray);
220      std::swap(this->CurArraySize, RHS.CurArraySize);
221      std::swap(this->NumNonEmpty, RHS.NumNonEmpty);
222      std::swap(this->NumTombstones, RHS.NumTombstones);
223      return;
224    }
225  
226    // FIXME: From here on we assume that both sets have the same small size.
227  
228    // If only RHS is small, copy the small elements into LHS and move the pointer
229    // from LHS to RHS.
230    if (!this->isSmall() && RHS.isSmall()) {
231      assert(RHS.CurArray == RHS.SmallArray);
232      std::copy(RHS.CurArray, RHS.CurArray + RHS.NumNonEmpty, this->SmallArray);
233      std::swap(RHS.CurArraySize, this->CurArraySize);
234      std::swap(this->NumNonEmpty, RHS.NumNonEmpty);
235      std::swap(this->NumTombstones, RHS.NumTombstones);
236      RHS.CurArray = this->CurArray;
237      this->CurArray = this->SmallArray;
238      return;
239    }
240  
241    // If only LHS is small, copy the small elements into RHS and move the pointer
242    // from RHS to LHS.
243    if (this->isSmall() && !RHS.isSmall()) {
244      assert(this->CurArray == this->SmallArray);
245      std::copy(this->CurArray, this->CurArray + this->NumNonEmpty,
246                RHS.SmallArray);
247      std::swap(RHS.CurArraySize, this->CurArraySize);
248      std::swap(RHS.NumNonEmpty, this->NumNonEmpty);
249      std::swap(RHS.NumTombstones, this->NumTombstones);
250      this->CurArray = RHS.CurArray;
251      RHS.CurArray = RHS.SmallArray;
252      return;
253    }
254  
255    // Both a small, just swap the small elements.
256    assert(this->isSmall() && RHS.isSmall());
257    unsigned MinNonEmpty = std::min(this->NumNonEmpty, RHS.NumNonEmpty);
258    std::swap_ranges(this->SmallArray, this->SmallArray + MinNonEmpty,
259                     RHS.SmallArray);
260    if (this->NumNonEmpty > MinNonEmpty) {
261      std::copy(this->SmallArray + MinNonEmpty,
262                this->SmallArray + this->NumNonEmpty,
263                RHS.SmallArray + MinNonEmpty);
264    } else {
265      std::copy(RHS.SmallArray + MinNonEmpty, RHS.SmallArray + RHS.NumNonEmpty,
266                this->SmallArray + MinNonEmpty);
267    }
268    assert(this->CurArraySize == RHS.CurArraySize);
269    std::swap(this->NumNonEmpty, RHS.NumNonEmpty);
270    std::swap(this->NumTombstones, RHS.NumTombstones);
271  }
272