Home
last modified time | relevance | path

Searched refs:__m (Results 1 – 25 of 44) sorted by relevance

12

/external/libcxx/include/
D__threading_support108 int __libcpp_recursive_mutex_init(__libcpp_recursive_mutex_t *__m);
111 int __libcpp_recursive_mutex_lock(__libcpp_recursive_mutex_t *__m);
114 bool __libcpp_recursive_mutex_trylock(__libcpp_recursive_mutex_t *__m);
117 int __libcpp_recursive_mutex_unlock(__libcpp_recursive_mutex_t *__m);
120 int __libcpp_recursive_mutex_destroy(__libcpp_recursive_mutex_t *__m);
123 int __libcpp_mutex_lock(__libcpp_mutex_t *__m);
126 bool __libcpp_mutex_trylock(__libcpp_mutex_t *__m);
129 int __libcpp_mutex_unlock(__libcpp_mutex_t *__m);
132 int __libcpp_mutex_destroy(__libcpp_mutex_t *__m);
142 int __libcpp_condvar_wait(__libcpp_condvar_t* __cv, __libcpp_mutex_t* __m);
[all …]
D__bit_reference72 __bit_reference(__storage_pointer __s, __storage_type __m) _NOEXCEPT
73 : __seg_(__s), __mask_(__m) {}
145 __bit_const_reference(__storage_pointer __s, __storage_type __m) _NOEXCEPT
146 : __seg_(__s), __mask_(__m) {}
165 …__storage_type __m = (~__storage_type(0) << __first.__ctz_) & (~__storage_type(0) >> (__clz_f - __…
166 __storage_type __b = *__first.__seg_ & __m;
181 __storage_type __m = ~__storage_type(0) >> (__bits_per_word - __n);
182 __storage_type __b = *__first.__seg_ & __m;
201 …__storage_type __m = (~__storage_type(0) << __first.__ctz_) & (~__storage_type(0) >> (__clz_f - __…
202 __storage_type __b = ~*__first.__seg_ & __m;
[all …]
Datomic563 #define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
564 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
565 __m == memory_order_acquire || \
566 __m == memory_order_acq_rel, \
569 #define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
570 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
571 __m == memory_order_acq_rel, \
574 #define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
899 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
900 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
[all …]
D__mutex_base107 explicit lock_guard(mutex_type& __m) _LIBCPP_THREAD_SAFETY_ANNOTATION(acquire_capability(__m))
108 : __m_(__m) {__m_.lock();}
110 …lock_guard(mutex_type& __m, adopt_lock_t) _LIBCPP_THREAD_SAFETY_ANNOTATION(requires_capability(__m
111 : __m_(__m) {}
134 explicit unique_lock(mutex_type& __m)
135 : __m_(_VSTD::addressof(__m)), __owns_(true) {__m_->lock();}
137 unique_lock(mutex_type& __m, defer_lock_t) _NOEXCEPT
138 : __m_(_VSTD::addressof(__m)), __owns_(false) {}
140 unique_lock(mutex_type& __m, try_to_lock_t)
141 : __m_(_VSTD::addressof(__m)), __owns_(__m.try_lock()) {}
[all …]
Dshared_mutex323 explicit shared_lock(mutex_type& __m)
324 : __m_(_VSTD::addressof(__m)),
329 shared_lock(mutex_type& __m, defer_lock_t) _NOEXCEPT
330 : __m_(_VSTD::addressof(__m)),
335 shared_lock(mutex_type& __m, try_to_lock_t)
336 : __m_(_VSTD::addressof(__m)),
337 __owns_(__m.try_lock_shared())
341 shared_lock(mutex_type& __m, adopt_lock_t)
342 : __m_(_VSTD::addressof(__m)),
348 shared_lock(mutex_type& __m,
[all …]
Dmap902 map(const map& __m)
903 : __tree_(__m.__tree_)
905 insert(__m.begin(), __m.end());
909 map& operator=(const map& __m)
912 __tree_ = __m.__tree_;
914 if (this != &__m) {
916 __tree_.value_comp() = __m.__tree_.value_comp();
917 __tree_.__copy_assign_alloc(__m.__tree_);
918 insert(__m.begin(), __m.end());
927 map(map&& __m)
[all …]
Dalgorithm1570 _ForwardIterator __m = __first;
1576 if (++__m == __last) // Otherwise if source exhaused, pattern not found
1578 if (!__pred(*__m, __value_)) // if there is a mismatch, restart with a new __first
1580 __first = __m;
1611 _RandomAccessIterator __m = __first;
1617 … ++__m; // no need to check range on __m because __s guarantees we have enough source
1618 if (!__pred(*__m, __value_)) // if there is a mismatch, restart with a new __first
1620 __first = __m;
2847 static _LIBCPP_CONSTEXPR const size_t __m = __log2<_Working_result_type, _Rp>::value;
2869 __n_ = __w_ / __m + (__w_ % __m != 0);
[all …]
Dnumeric226 _Tp __gcd(_Tp __m, _Tp __n)
229 return __n == 0 ? __m : __gcd<_Tp>(__n, __m % __n);
236 gcd(_Tp __m, _Up __n)
243 return static_cast<_Rp>(__gcd(static_cast<_Wp>(__abs<_Rp, _Tp>()(__m)),
250 lcm(_Tp __m, _Up __n)
255 if (__m == 0 || __n == 0)
259 _Rp __val1 = __abs<_Rp, _Tp>()(__m) / gcd(__m, __n);
Dregex676 regex_constants::match_flag_type __m
1026 bool isctype(char_type __c, char_class_type __m) const;
1259 regex_traits<_CharT>::isctype(char_type __c, char_class_type __m) const
1261 if (__ct_->is(__m, __c))
1263 return (__c == '_' && (__m & __regex_word));
2840 match_results<const _CharT*, _Allocator>& __m,
2846 match_results<const _CharT*, _Allocator>& __m,
2851 match_results<const _CharT*, _Allocator>& __m,
2856 match_results<const _CharT*, _Allocator>& __m,
2861 match_results<const _CharT*, _Allocator>& __m,
[all …]
Drandom1670 unsigned long long __m, unsigned long long _Mp,
1671 bool _MightOverflow = (__a != 0 && __m != 0 && __m-1 > (_Mp-__c)/__a)>
1676 template <unsigned long long __a, unsigned long long __c, unsigned long long __m>
1677 struct __lce_ta<__a, __c, __m, (unsigned long long)(~0), true>
1684 const result_type __q = __m / __a;
1685 const result_type __r = __m % __a;
1688 __x = __t0 + (__t0 < __t1) * __m - __t1;
1689 __x += __c - (__x >= __m - __c) * __m;
1694 template <unsigned long long __a, unsigned long long __m>
1695 struct __lce_ta<__a, 0, __m, (unsigned long long)(~0), true>
[all …]
/external/clang/lib/Headers/
Dmmintrin.h79 _mm_cvtsi64_si32(__m64 __m) in _mm_cvtsi64_si32() argument
81 return __builtin_ia32_vec_ext_v2si((__v2si)__m, 0); in _mm_cvtsi64_si32()
111 _mm_cvtm64_si64(__m64 __m) in _mm_cvtm64_si64() argument
113 return (long long)__m; in _mm_cvtm64_si64()
739 _mm_sll_pi16(__m64 __m, __m64 __count) in _mm_sll_pi16() argument
741 return (__m64)__builtin_ia32_psllw((__v4hi)__m, __count); in _mm_sll_pi16()
760 _mm_slli_pi16(__m64 __m, int __count) in _mm_slli_pi16() argument
762 return (__m64)__builtin_ia32_psllwi((__v4hi)__m, __count); in _mm_slli_pi16()
782 _mm_sll_pi32(__m64 __m, __m64 __count) in _mm_sll_pi32() argument
784 return (__m64)__builtin_ia32_pslld((__v2si)__m, __count); in _mm_sll_pi32()
[all …]
Dmm3dnow.h46 _m_pf2id(__m64 __m) { in _m_pf2id() argument
47 return (__m64)__builtin_ia32_pf2id((__v2sf)__m); in _m_pf2id()
91 _m_pfrcp(__m64 __m) { in _m_pfrcp() argument
92 return (__m64)__builtin_ia32_pfrcp((__v2sf)__m); in _m_pfrcp()
106 _m_pfrsqrt(__m64 __m) { in _m_pfrsqrt() argument
107 return (__m64)__builtin_ia32_pfrsqrt((__v2sf)__m); in _m_pfrsqrt()
126 _m_pi2fd(__m64 __m) { in _m_pi2fd() argument
127 return (__m64)__builtin_ia32_pi2fd((__v2si)__m); in _m_pi2fd()
140 _m_pf2iw(__m64 __m) { in _m_pf2iw() argument
141 return (__m64)__builtin_ia32_pf2iw((__v2sf)__m); in _m_pf2iw()
[all …]
Dxsavesintrin.h35 _xsaves(void *__p, unsigned long long __m) { in _xsaves() argument
36 __builtin_ia32_xsaves(__p, __m); in _xsaves()
40 _xrstors(void *__p, unsigned long long __m) { in _xrstors() argument
41 __builtin_ia32_xrstors(__p, __m); in _xrstors()
46 _xrstors64(void *__p, unsigned long long __m) { in _xrstors64() argument
47 __builtin_ia32_xrstors64(__p, __m); in _xrstors64()
51 _xsaves64(void *__p, unsigned long long __m) { in _xsaves64() argument
52 __builtin_ia32_xsaves64(__p, __m); in _xsaves64()
Dxsaveintrin.h35 _xsave(void *__p, unsigned long long __m) { in _xsave() argument
36 return __builtin_ia32_xsave(__p, __m); in _xsave()
40 _xrstor(void *__p, unsigned long long __m) { in _xrstor() argument
41 return __builtin_ia32_xrstor(__p, __m); in _xrstor()
46 _xsave64(void *__p, unsigned long long __m) { in _xsave64() argument
47 return __builtin_ia32_xsave64(__p, __m); in _xsave64()
51 _xrstor64(void *__p, unsigned long long __m) { in _xrstor64() argument
52 return __builtin_ia32_xrstor64(__p, __m); in _xrstor64()
Dxsavecintrin.h35 _xsavec(void *__p, unsigned long long __m) { in _xsavec() argument
36 __builtin_ia32_xsavec(__p, __m); in _xsavec()
41 _xsavec64(void *__p, unsigned long long __m) { in _xsavec64() argument
42 __builtin_ia32_xsavec64(__p, __m); in _xsavec64()
Dxsaveoptintrin.h35 _xsaveopt(void *__p, unsigned long long __m) { in _xsaveopt() argument
36 return __builtin_ia32_xsaveopt(__p, __m); in _xsaveopt()
41 _xsaveopt64(void *__p, unsigned long long __m) { in _xsaveopt64() argument
42 return __builtin_ia32_xsaveopt64(__p, __m); in _xsaveopt64()
Dclflushoptintrin.h35 _mm_clflushopt(char * __m) { in _mm_clflushopt() argument
36 __builtin_ia32_clflushopt(__m); in _mm_clflushopt()
Davxintrin.h2449 _mm_maskload_pd(double const *__p, __m128i __m) in _mm_maskload_pd() argument
2451 return (__m128d)__builtin_ia32_maskloadpd((const __v2df *)__p, (__v2di)__m); in _mm_maskload_pd()
2455 _mm256_maskload_pd(double const *__p, __m256i __m) in _mm256_maskload_pd() argument
2458 (__v4di)__m); in _mm256_maskload_pd()
2462 _mm_maskload_ps(float const *__p, __m128i __m) in _mm_maskload_ps() argument
2464 return (__m128)__builtin_ia32_maskloadps((const __v4sf *)__p, (__v4si)__m); in _mm_maskload_ps()
2468 _mm256_maskload_ps(float const *__p, __m256i __m) in _mm256_maskload_ps() argument
2470 return (__m256)__builtin_ia32_maskloadps256((const __v8sf *)__p, (__v8si)__m); in _mm256_maskload_ps()
2475 _mm256_maskstore_ps(float *__p, __m256i __m, __m256 __a) in _mm256_maskstore_ps() argument
2477 __builtin_ia32_maskstoreps256((__v8sf *)__p, (__v8si)__m, (__v8sf)__a); in _mm256_maskstore_ps()
[all …]
/external/libcxx/include/experimental/
Dnumeric70 _Tp __gcd(_Tp __m, _Tp __n)
73 return __n == 0 ? __m : __gcd<_Tp>(__n, __m % __n);
80 gcd(_Tp __m, _Up __n)
87 return static_cast<_Rp>(__gcd(static_cast<_Wp>(__abs<_Rp, _Tp>()(__m)),
94 lcm(_Tp __m, _Up __n)
99 if (__m == 0 || __n == 0)
103 _Rp __val1 = __abs<_Rp, _Tp>()(__m) / gcd(__m, __n);
/external/kernel-headers/original/uapi/linux/netfilter/
Dx_tables.h132 struct xt_entry_match *__m; \
136 __i += __m->u.match_size) { \
137 __m = (void *)e + __i; \
139 __ret = fn(__m , ## args); \
/external/iproute2/include/linux/netfilter/
Dx_tables.h131 struct xt_entry_match *__m; \
135 __i += __m->u.match_size) { \
136 __m = (void *)e + __i; \
138 __ret = fn(__m , ## args); \
/external/iptables/include/linux/netfilter/
Dx_tables.h131 struct xt_entry_match *__m; \
135 __i += __m->u.match_size) { \
136 __m = (void *)e + __i; \
138 __ret = fn(__m , ## args); \
/external/clang/test/CodeGen/
Dbuiltin-clflushopt.c5 void test_mm_clflushopt(char * __m) { in test_mm_clflushopt() argument
8 _mm_clflushopt(__m); in test_mm_clflushopt()
/external/libcxx/benchmarks/
Dunordered_set_operations.bench.cpp82 const uint32_t __m = 0x5bd1e995; in operator ()() local
86 __k *= __m; in operator ()()
88 __k *= __m; in operator ()()
89 __h *= __m; in operator ()()
92 __h *= __m; in operator ()()
/external/syslinux/com32/include/
Dstdio.h53 static __inline__ FILE *fdopen(int __fd, const char *__m) in fdopen() argument
55 (void)__m; in fdopen()

12