Lines Matching refs:__m
1570 _ForwardIterator __m = __first;
1576 if (++__m == __last) // Otherwise if source exhaused, pattern not found
1578 if (!__pred(*__m, __value_)) // if there is a mismatch, restart with a new __first
1580 __first = __m;
1611 _RandomAccessIterator __m = __first;
1617 … ++__m; // no need to check range on __m because __s guarantees we have enough source
1618 if (!__pred(*__m, __value_)) // if there is a mismatch, restart with a new __first
1620 __first = __m;
2847 static _LIBCPP_CONSTEXPR const size_t __m = __log2<_Working_result_type, _Rp>::value;
2869 __n_ = __w_ / __m + (__w_ % __m != 0);
3306 _ForwardIterator __m = __first;
3307 _VSTD::advance(__m, __l2);
3308 if (__pred(*__m))
3310 __first = ++__m;
3332 _ForwardIterator __m = __first;
3333 if (__pred(*++__m))
3335 swap(*__first, *__m);
3336 return __m;
3376 _ForwardIterator __m = __first;
3378 _VSTD::advance(__m, __len2);
3379 // recurse on [__first, __m), *__first know to be false
3383 …_ForwardIterator __first_false = __stable_partition<_PredRef>(__first, __m, __pred, __len2, __p, _…
3386 // recurse on [__m, __last], except increase __m until *(__m) is false, *__last know to be true
3387 _ForwardIterator __m1 = __m;
3402 return _VSTD::rotate(__first_false, __m, __second_false);
3459 _BidirectionalIterator __m = __first;
3460 if (__pred(*++__m))
3462 swap(*__first, *__m);
3463 swap(*__m, *__last);
3466 swap(*__m, *__last);
3467 swap(*__first, *__m);
3468 return __m;
3508 _BidirectionalIterator __m = __first;
3510 _VSTD::advance(__m, __len2);
3511 …// recurse on [__first, __m-1], except reduce __m-1 until *(__m-1) is true, *__first know to be fa…
3514 _BidirectionalIterator __m1 = __m;
3530 // recurse on [__m, __last], except increase __m until *(__m) is false, *__last know to be true
3531 __m1 = __m;
3547 return _VSTD::rotate(__first_false, __m, __second_false);
3924 _RandomAccessIterator __m = __first;
3933 __m += __delta;
3935 … __n_swaps = _VSTD::__sort5<_Compare>(__first, __first + __delta, __m, __m+__delta, __lm1, __comp);
3940 __m += __delta;
3941 __n_swaps = _VSTD::__sort3<_Compare>(__first, __m, __lm1, __comp);
3944 // *__m is median
3945 // partition [__first, __m) < *__m and *__m <= [__m, __last)
3946 // (this inhibits tossing elements equivalent to __m around unnecessarily)
3949 // j points beyond range to be tested, *__m is known to be <= *__lm1
3952 if (!__comp(*__i, *__m)) // if *__first == *__m
3954 // *__first == *__m, *__first doesn't go in first part
3960 // *__first == *__m, *__m <= all other elements
4001 if (__comp(*__j, *__m))
4009 // It is known that *__i < *__m
4011 // j points beyond range to be tested, *__m is known to be <= *__lm1
4015 // known that *(__i - 1) < *__m
4016 // known that __i <= __m
4019 // __m still guards upward moving __i
4020 while (__comp(*__i, *__m))
4023 while (!__comp(*--__j, *__m))
4029 // It is known that __m != __j
4030 // If __m just moved, follow it
4031 if (__m == __i)
4032 __m = __j;
4036 // [__first, __i) < *__m and *__m <= [__i, __last)
4037 if (__i != __m && __comp(*__m, *__i))
4039 swap(*__i, *__m);
4180 _ForwardIterator __m = __first;
4181 _VSTD::advance(__m, __l2);
4182 if (__comp(*__m, __value_))
4184 __first = ++__m;
4228 _ForwardIterator __m = __first;
4229 _VSTD::advance(__m, __l2);
4230 if (__comp(__value_, *__m))
4234 __first = ++__m;
4276 _ForwardIterator __m = __first;
4277 _VSTD::advance(__m, __l2);
4278 if (__comp(*__m, __value_))
4280 __first = ++__m;
4283 else if (__comp(__value_, *__m))
4285 __last = __m;
4290 _ForwardIterator __mp1 = __m;
4293 __lower_bound<_Compare>(__first, __m, __value_, __comp),
4712 _RandomAccessIterator __m = __first1 + __l2;
4713 __stable_sort<_Compare>(__first1, __m, __comp, __l2, __first2, __l2);
4714 __stable_sort<_Compare>(__m, __last1, __comp, __len - __l2, __first2 + __l2, __len - __l2);
4715 __merge_move_construct<_Compare>(__first1, __m, __m, __last1, __first2, __comp);
4748 _RandomAccessIterator __m = __first + __l2;
4753 __stable_sort_move<_Compare>(__first, __m, __comp, __l2, __buff);
4755 __stable_sort_move<_Compare>(__m, __last, __comp, __len - __l2, __buff + __l2);
4765 __stable_sort<_Compare>(__first, __m, __comp, __l2, __buff, __buff_size);
4766 __stable_sort<_Compare>(__m, __last, __comp, __len - __l2, __buff, __buff_size);
4767 … __inplace_merge<_Compare>(__first, __m, __last, __comp, __l2, __len - __l2, __buff, __buff_size);
5200 _RandomAccessIterator __m = __first;
5201 _VSTD::__sort3<_Compare>(__first, ++__m, --__last, __comp);
5211 _RandomAccessIterator __m = __first + __len/2;
5213 unsigned __n_swaps = _VSTD::__sort3<_Compare>(__first, __m, --__lm1, __comp);
5214 // *__m is median
5215 // partition [__first, __m) < *__m and *__m <= [__m, __last)
5216 // (this inhibits tossing elements equivalent to __m around unnecessarily)
5219 // j points beyond range to be tested, *__lm1 is known to be <= *__m
5222 if (!__comp(*__i, *__m)) // if *__first == *__m
5224 // *__first == *__m, *__first doesn't go in first part
5230 // *__first == *__m, *__m <= all other elements
5274 if (__comp(*__j, *__m))
5283 // j points beyond range to be tested, *__lm1 is known to be <= *__m
5287 // known that *(__i - 1) < *__m
5290 // __m still guards upward moving __i
5291 while (__comp(*__i, *__m))
5294 while (!__comp(*--__j, *__m))
5300 // It is known that __m != __j
5301 // If __m just moved, follow it
5302 if (__m == __i)
5303 __m = __j;
5307 // [__first, __i) < *__m and *__m <= [__i, __last)
5308 if (__i != __m && __comp(*__m, *__i))
5310 swap(*__i, *__m);
5322 __j = __m = __first;
5325 if (__comp(*__j, *__m))
5328 __m = __j;
5336 __j = __m = __i;
5339 if (__comp(*__j, *__m))
5342 __m = __j;