xref: /freebsd/contrib/llvm-project/libcxx/include/__algorithm/nth_element.h (revision d5b0e70f7e04d971691517ce1304d86a1e367e2e)
1 //===----------------------------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #ifndef _LIBCPP___ALGORITHM_NTH_ELEMENT_H
10 #define _LIBCPP___ALGORITHM_NTH_ELEMENT_H
11 
12 #include <__algorithm/comp.h>
13 #include <__algorithm/comp_ref_type.h>
14 #include <__algorithm/sort.h>
15 #include <__config>
16 #include <__iterator/iterator_traits.h>
17 #include <__utility/swap.h>
18 
19 #if defined(_LIBCPP_DEBUG_RANDOMIZE_UNSPECIFIED_STABILITY)
20 #  include <__algorithm/shuffle.h>
21 #endif
22 
23 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
24 #pragma GCC system_header
25 #endif
26 
27 _LIBCPP_BEGIN_NAMESPACE_STD
28 
29 template<class _Compare, class _RandomAccessIterator>
30 _LIBCPP_CONSTEXPR_AFTER_CXX11 bool
31 __nth_element_find_guard(_RandomAccessIterator& __i, _RandomAccessIterator& __j,
32                          _RandomAccessIterator __m, _Compare __comp)
33 {
34     // manually guard downward moving __j against __i
35     while (true) {
36         if (__i == --__j) {
37             return false;
38         }
39         if (__comp(*__j, *__m)) {
40             return true;  // found guard for downward moving __j, now use unguarded partition
41         }
42     }
43 }
44 
45 template <class _Compare, class _RandomAccessIterator>
46 _LIBCPP_CONSTEXPR_AFTER_CXX11 void
47 __nth_element(_RandomAccessIterator __first, _RandomAccessIterator __nth, _RandomAccessIterator __last, _Compare __comp)
48 {
49     // _Compare is known to be a reference type
50     typedef typename iterator_traits<_RandomAccessIterator>::difference_type difference_type;
51     const difference_type __limit = 7;
52     while (true)
53     {
54         if (__nth == __last)
55             return;
56         difference_type __len = __last - __first;
57         switch (__len)
58         {
59         case 0:
60         case 1:
61             return;
62         case 2:
63             if (__comp(*--__last, *__first))
64                 swap(*__first, *__last);
65             return;
66         case 3:
67             {
68             _RandomAccessIterator __m = __first;
69             _VSTD::__sort3<_Compare>(__first, ++__m, --__last, __comp);
70             return;
71             }
72         }
73         if (__len <= __limit)
74         {
75             _VSTD::__selection_sort<_Compare>(__first, __last, __comp);
76             return;
77         }
78         // __len > __limit >= 3
79         _RandomAccessIterator __m = __first + __len/2;
80         _RandomAccessIterator __lm1 = __last;
81         unsigned __n_swaps = _VSTD::__sort3<_Compare>(__first, __m, --__lm1, __comp);
82         // *__m is median
83         // partition [__first, __m) < *__m and *__m <= [__m, __last)
84         // (this inhibits tossing elements equivalent to __m around unnecessarily)
85         _RandomAccessIterator __i = __first;
86         _RandomAccessIterator __j = __lm1;
87         // j points beyond range to be tested, *__lm1 is known to be <= *__m
88         // The search going up is known to be guarded but the search coming down isn't.
89         // Prime the downward search with a guard.
90         if (!__comp(*__i, *__m))  // if *__first == *__m
91         {
92             // *__first == *__m, *__first doesn't go in first part
93             if (_VSTD::__nth_element_find_guard<_Compare>(__i, __j, __m, __comp)) {
94                 swap(*__i, *__j);
95                 ++__n_swaps;
96             } else {
97                 // *__first == *__m, *__m <= all other elements
98                 // Partition instead into [__first, __i) == *__first and *__first < [__i, __last)
99                 ++__i;  // __first + 1
100                 __j = __last;
101                 if (!__comp(*__first, *--__j)) {  // we need a guard if *__first == *(__last-1)
102                     while (true) {
103                         if (__i == __j) {
104                             return;  // [__first, __last) all equivalent elements
105                         } else if (__comp(*__first, *__i)) {
106                             swap(*__i, *__j);
107                             ++__n_swaps;
108                             ++__i;
109                             break;
110                         }
111                         ++__i;
112                     }
113                 }
114                 // [__first, __i) == *__first and *__first < [__j, __last) and __j == __last - 1
115                 if (__i == __j) {
116                     return;
117                 }
118                 while (true) {
119                     while (!__comp(*__first, *__i))
120                         ++__i;
121                     while (__comp(*__first, *--__j))
122                         ;
123                     if (__i >= __j)
124                         break;
125                     swap(*__i, *__j);
126                     ++__n_swaps;
127                     ++__i;
128                 }
129                 // [__first, __i) == *__first and *__first < [__i, __last)
130                 // The first part is sorted,
131                 if (__nth < __i) {
132                     return;
133                 }
134                 // __nth_element the second part
135                 // _VSTD::__nth_element<_Compare>(__i, __nth, __last, __comp);
136                 __first = __i;
137                 continue;
138             }
139         }
140         ++__i;
141         // j points beyond range to be tested, *__lm1 is known to be <= *__m
142         // if not yet partitioned...
143         if (__i < __j)
144         {
145             // known that *(__i - 1) < *__m
146             while (true)
147             {
148                 // __m still guards upward moving __i
149                 while (__comp(*__i, *__m))
150                     ++__i;
151                 // It is now known that a guard exists for downward moving __j
152                 while (!__comp(*--__j, *__m))
153                     ;
154                 if (__i >= __j)
155                     break;
156                 swap(*__i, *__j);
157                 ++__n_swaps;
158                 // It is known that __m != __j
159                 // If __m just moved, follow it
160                 if (__m == __i)
161                     __m = __j;
162                 ++__i;
163             }
164         }
165         // [__first, __i) < *__m and *__m <= [__i, __last)
166         if (__i != __m && __comp(*__m, *__i))
167         {
168             swap(*__i, *__m);
169             ++__n_swaps;
170         }
171         // [__first, __i) < *__i and *__i <= [__i+1, __last)
172         if (__nth == __i)
173             return;
174         if (__n_swaps == 0)
175         {
176             // We were given a perfectly partitioned sequence.  Coincidence?
177             if (__nth < __i)
178             {
179                 // Check for [__first, __i) already sorted
180                 __j = __m = __first;
181                 while (true) {
182                     if (++__j == __i) {
183                         // [__first, __i) sorted
184                         return;
185                     }
186                     if (__comp(*__j, *__m)) {
187                         // not yet sorted, so sort
188                         break;
189                     }
190                     __m = __j;
191                 }
192             }
193             else
194             {
195                 // Check for [__i, __last) already sorted
196                 __j = __m = __i;
197                 while (true) {
198                     if (++__j == __last) {
199                         // [__i, __last) sorted
200                         return;
201                     }
202                     if (__comp(*__j, *__m)) {
203                         // not yet sorted, so sort
204                         break;
205                     }
206                     __m = __j;
207                 }
208             }
209         }
210         // __nth_element on range containing __nth
211         if (__nth < __i)
212         {
213             // _VSTD::__nth_element<_Compare>(__first, __nth, __i, __comp);
214             __last = __i;
215         }
216         else
217         {
218             // _VSTD::__nth_element<_Compare>(__i+1, __nth, __last, __comp);
219             __first = ++__i;
220         }
221     }
222 }
223 
224 template <class _RandomAccessIterator, class _Compare>
225 inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17
226 void
227 nth_element(_RandomAccessIterator __first, _RandomAccessIterator __nth, _RandomAccessIterator __last, _Compare __comp)
228 {
229   _LIBCPP_DEBUG_RANDOMIZE_RANGE(__first, __last);
230   typedef typename __comp_ref_type<_Compare>::type _Comp_ref;
231   _VSTD::__nth_element<_Comp_ref>(__first, __nth, __last, __comp);
232   _LIBCPP_DEBUG_RANDOMIZE_RANGE(__first, __nth);
233   if (__nth != __last) {
234     _LIBCPP_DEBUG_RANDOMIZE_RANGE(++__nth, __last);
235   }
236 }
237 
238 template <class _RandomAccessIterator>
239 inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17
240 void
241 nth_element(_RandomAccessIterator __first, _RandomAccessIterator __nth, _RandomAccessIterator __last)
242 {
243     _VSTD::nth_element(__first, __nth, __last, __less<typename iterator_traits<_RandomAccessIterator>::value_type>());
244 }
245 
246 _LIBCPP_END_NAMESPACE_STD
247 
248 #endif // _LIBCPP___ALGORITHM_NTH_ELEMENT_H
249