/bionic/tests/ |
D | stdatomic_test.cpp | 49 atomic_thread_fence(memory_order_relaxed); in TEST() 58 atomic_signal_fence(memory_order_relaxed); in TEST() 85 ASSERT_FALSE(atomic_flag_test_and_set_explicit(&f, memory_order_relaxed)); in TEST() 86 ASSERT_TRUE(atomic_flag_test_and_set_explicit(&f, memory_order_relaxed)); in TEST() 88 atomic_flag_clear_explicit(&f, memory_order_relaxed); in TEST() 89 ASSERT_FALSE(atomic_flag_test_and_set_explicit(&f, memory_order_relaxed)); in TEST() 96 atomic_store_explicit(&i, 123, memory_order_relaxed); in TEST() 97 ASSERT_EQ(123, atomic_load_explicit(&i, memory_order_relaxed)); in TEST() 104 ASSERT_EQ(456, atomic_exchange_explicit(&i, 123, memory_order_relaxed)); in TEST() 119 ASSERT_TRUE(atomic_compare_exchange_strong_explicit(&i, &expected, 456, memory_order_relaxed, in TEST() [all …]
|
/bionic/libc/bionic/ |
D | pthread_mutex.cpp | 282 locked_uncontended, memory_order_acquire, memory_order_relaxed))) { in __pthread_normal_mutex_trylock() 391 atomic_fetch_add_explicit(&mutex->state, MUTEX_COUNTER_BITS_ONE, memory_order_relaxed); in __recursive_increment() 415 uint32_t owner_tid = atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed); in __recursive_or_errorcheck_mutex_wait() 424 uint16_t old_state = atomic_load_explicit(&mutex->state, memory_order_relaxed); in __pthread_mutex_lock_with_timeout() 435 if (tid == atomic_load_explicit(&mutex->owner_tid, memory_order_relaxed)) { in __pthread_mutex_lock_with_timeout() 452 locked_uncontended, memory_order_acquire, memory_order_relaxed))) { in __pthread_mutex_lock_with_timeout() 453 atomic_store_explicit(&mutex->owner_tid, tid, memory_order_relaxed); in __pthread_mutex_lock_with_timeout() 471 memory_order_relaxed))) { in __pthread_mutex_lock_with_timeout() 472 atomic_store_explicit(&mutex->owner_tid, tid, memory_order_relaxed); in __pthread_mutex_lock_with_timeout() 483 memory_order_relaxed, in __pthread_mutex_lock_with_timeout() [all …]
|
D | pthread_rwlock.cpp | 257 if (atomic_load_explicit(&rwlock->state, memory_order_relaxed) != 0) { in pthread_rwlock_destroy() 273 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in __pthread_rwlock_tryrdlock() 282 memory_order_acquire, memory_order_relaxed))) { in __pthread_rwlock_tryrdlock() 292 if (atomic_load_explicit(&rwlock->writer_tid, memory_order_relaxed) == __get_thread()->tid) { in __pthread_rwlock_timedrdlock() 306 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in __pthread_rwlock_timedrdlock() 319 memory_order_relaxed); in __pthread_rwlock_timedrdlock() 334 memory_order_relaxed); in __pthread_rwlock_timedrdlock() 349 int old_state = atomic_load_explicit(&rwlock->state, memory_order_relaxed); in __pthread_rwlock_trywrlock() 353 __state_add_writer_flag(old_state), memory_order_acquire, memory_order_relaxed))) { in __pthread_rwlock_trywrlock() 355 atomic_store_explicit(&rwlock->writer_tid, __get_thread()->tid, memory_order_relaxed); in __pthread_rwlock_trywrlock() [all …]
|
D | pthread_key.cpp | 80 uintptr_t seq = atomic_load_explicit(&key_map[i].seq, memory_order_relaxed); in pthread_key_clean_all() 90 atomic_load_explicit(&key_map[i].key_destructor, memory_order_relaxed)); in pthread_key_clean_all() 95 if (atomic_load_explicit(&key_map[i].seq, memory_order_relaxed) != seq) { in pthread_key_clean_all() 120 uintptr_t seq = atomic_load_explicit(&key_map[i].seq, memory_order_relaxed); in pthread_key_create() 142 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed); in pthread_key_delete() 156 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed); in pthread_getspecific() 174 uintptr_t seq = atomic_load_explicit(&key_map[key].seq, memory_order_relaxed); in pthread_setspecific()
|
D | pthread_cond.cpp | 111 return COND_IS_SHARED(atomic_load_explicit(&state, memory_order_relaxed)); in process_shared() 115 return COND_GET_CLOCK(atomic_load_explicit(&state, memory_order_relaxed)) == CLOCK_REALTIME; in use_realtime_clock() 149 atomic_store_explicit(&cond->state, 0xdeadc04d, memory_order_relaxed); in pthread_cond_destroy() 167 atomic_fetch_add_explicit(&cond->state, COND_COUNTER_STEP, memory_order_relaxed); in __pthread_cond_pulse() 180 unsigned int old_state = atomic_load_explicit(&cond->state, memory_order_relaxed); in __pthread_cond_timedwait()
|
D | semaphore.cpp | 101 return (atomic_load_explicit(sem_count_ptr, memory_order_relaxed) & SEMCOUNT_SHARED_MASK); in SEM_GET_SHARED() 145 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed); in __sem_dec() 165 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed); in __sem_trydec() 189 unsigned int old_value = atomic_load_explicit(sem_count_ptr, memory_order_relaxed); in __sem_inc()
|
D | pthread_barrier.cpp | 124 uint32_t prev_wait_count = atomic_load_explicit(&barrier->wait_count, memory_order_relaxed); in pthread_barrier_wait() 137 memory_order_relaxed)) { in pthread_barrier_wait() 178 if (atomic_load_explicit(&barrier->wait_count, memory_order_relaxed) != 0) { in pthread_barrier_destroy()
|
D | system_properties.cpp | 386 uint_least32_t left_offset = atomic_load_explicit(¤t->left, memory_order_relaxed); in find_prop_bt() 402 uint_least32_t right_offset = atomic_load_explicit(¤t->right, memory_order_relaxed); in find_prop_bt() 438 uint_least32_t children_offset = atomic_load_explicit(¤t->children, memory_order_relaxed); in find_property() 463 uint_least32_t prop_offset = atomic_load_explicit(¤t->prop, memory_order_relaxed); in find_property() 658 uint_least32_t left_offset = atomic_load_explicit(&trie->left, memory_order_relaxed); in foreach_property() 663 uint_least32_t prop_offset = atomic_load_explicit(&trie->prop, memory_order_relaxed); in foreach_property() 669 uint_least32_t children_offset = atomic_load_explicit(&trie->children, memory_order_relaxed); in foreach_property() 674 uint_least32_t right_offset = atomic_load_explicit(&trie->right, memory_order_relaxed); in foreach_property() 1186 if (serial == load_const_atomic(&(pi->serial), memory_order_relaxed)) { in __system_property_read() 1218 if (serial == load_const_atomic(&(pi->serial), memory_order_relaxed)) { in __system_property_read_callback() [all …]
|
/bionic/benchmarks/ |
D | atomic_benchmark.cpp | 56 result += test_loc.load(std::memory_order_relaxed); in BM_load_relaxed() 94 result += test_loc.fetch_add(1, std::memory_order_relaxed); in BM_fetch_add_relaxed() 117 result += test_loc.load(std::memory_order_relaxed); in BM_acquire_fence() 128 result += test_loc.load(std::memory_order_relaxed); in BM_seq_cst_fence()
|
/bionic/libc/private/ |
D | bionic_lock.h | 56 LockedWithoutWaiter, memory_order_acquire, memory_order_relaxed)); in trylock() 62 LockedWithoutWaiter, memory_order_acquire, memory_order_relaxed))) { in lock()
|
/bionic/libc/include/ |
D | stdatomic.h | 87 using std::memory_order_relaxed; 249 memory_order_relaxed = __ATOMIC_RELAXED, enumerator
|