1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in
12 * the documentation and/or other materials provided with the
13 * distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
16 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
17 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
18 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
19 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
22 * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
25 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
29 #pragma once
30
31 #include <threads.h>
32
33 #include <errno.h>
34 #include <sched.h>
35 #include <stdlib.h>
36
37 #if defined(__BIONIC_THREADS_INLINE)
38
39 __BEGIN_DECLS
40
__bionic_thrd_error(int __pthread_code)41 static __inline int __bionic_thrd_error(int __pthread_code) {
42 switch (__pthread_code) {
43 case 0: return 0;
44 case ENOMEM: return thrd_nomem;
45 case ETIMEDOUT: return thrd_timedout;
46 case EBUSY: return thrd_busy;
47 default: return thrd_error;
48 }
49 }
50
call_once(once_flag * _Nonnull __flag,void (* _Nonnull __function)(void))51 __BIONIC_THREADS_INLINE void call_once(once_flag* _Nonnull __flag,
52 void (* _Nonnull __function)(void)) {
53 pthread_once(__flag, __function);
54 }
55
56
57
cnd_broadcast(cnd_t * _Nonnull __cnd)58 __BIONIC_THREADS_INLINE int cnd_broadcast(cnd_t* _Nonnull __cnd) {
59 return __bionic_thrd_error(pthread_cond_broadcast(__cnd));
60 }
61
cnd_destroy(cnd_t * _Nonnull __cnd)62 __BIONIC_THREADS_INLINE void cnd_destroy(cnd_t* _Nonnull __cnd) {
63 pthread_cond_destroy(__cnd);
64 }
65
cnd_init(cnd_t * _Nonnull __cnd)66 __BIONIC_THREADS_INLINE int cnd_init(cnd_t* _Nonnull __cnd) {
67 return __bionic_thrd_error(pthread_cond_init(__cnd, NULL));
68 }
69
cnd_signal(cnd_t * _Nonnull __cnd)70 __BIONIC_THREADS_INLINE int cnd_signal(cnd_t* _Nonnull __cnd) {
71 return __bionic_thrd_error(pthread_cond_signal(__cnd));
72 }
73
cnd_timedwait(cnd_t * _Nonnull __cnd,mtx_t * _Nonnull __mtx,const struct timespec * _Nullable __timeout)74 __BIONIC_THREADS_INLINE int cnd_timedwait(cnd_t* _Nonnull __cnd,
75 mtx_t* _Nonnull __mtx,
76 const struct timespec* _Nullable __timeout) {
77 return __bionic_thrd_error(pthread_cond_timedwait(__cnd, __mtx, __timeout));
78 }
79
cnd_wait(cnd_t * _Nonnull __cnd,mtx_t * _Nonnull __mtx)80 __BIONIC_THREADS_INLINE int cnd_wait(cnd_t* _Nonnull __cnd, mtx_t* _Nonnull __mtx) {
81 return __bionic_thrd_error(pthread_cond_wait(__cnd, __mtx));
82 }
83
84
85
mtx_destroy(mtx_t * _Nonnull __mtx)86 __BIONIC_THREADS_INLINE void mtx_destroy(mtx_t* _Nonnull __mtx) {
87 pthread_mutex_destroy(__mtx);
88 }
89
mtx_init(mtx_t * _Nonnull __mtx,int __type)90 __BIONIC_THREADS_INLINE int mtx_init(mtx_t* _Nonnull __mtx, int __type) {
91 int __pthread_type = (__type & mtx_recursive) ? PTHREAD_MUTEX_RECURSIVE
92 : PTHREAD_MUTEX_NORMAL;
93 __type &= ~mtx_recursive;
94 if (__type != mtx_plain && __type != mtx_timed) return thrd_error;
95
96 pthread_mutexattr_t __attr;
97 pthread_mutexattr_init(&__attr);
98 pthread_mutexattr_settype(&__attr, __pthread_type);
99 return __bionic_thrd_error(pthread_mutex_init(__mtx, &__attr));
100 }
101
mtx_lock(mtx_t * _Nonnull __mtx)102 __BIONIC_THREADS_INLINE int mtx_lock(mtx_t* _Nonnull __mtx) {
103 return __bionic_thrd_error(pthread_mutex_lock(__mtx));
104 }
105
mtx_timedlock(mtx_t * _Nonnull __mtx,const struct timespec * _Nullable __timeout)106 __BIONIC_THREADS_INLINE int mtx_timedlock(mtx_t* _Nonnull __mtx,
107 const struct timespec* _Nullable __timeout) {
108 return __bionic_thrd_error(pthread_mutex_timedlock(__mtx, __timeout));
109 }
110
mtx_trylock(mtx_t * _Nonnull __mtx)111 __BIONIC_THREADS_INLINE int mtx_trylock(mtx_t* _Nonnull __mtx) {
112 return __bionic_thrd_error(pthread_mutex_trylock(__mtx));
113 }
114
mtx_unlock(mtx_t * _Nonnull __mtx)115 __BIONIC_THREADS_INLINE int mtx_unlock(mtx_t* _Nonnull __mtx) {
116 return __bionic_thrd_error(pthread_mutex_unlock(__mtx));
117 }
118
119 #pragma clang diagnostic push
120 #pragma clang diagnostic ignored "-Wnullability-completeness"
121 struct __bionic_thrd_data {
122 thrd_start_t __func;
123 void* __arg;
124 };
125 #pragma clang diagnostic pop
126
__bionic_thrd_trampoline(void * _Nonnull __arg)127 static __inline void* _Nonnull __bionic_thrd_trampoline(void* _Nonnull __arg) {
128 struct __bionic_thrd_data __data =
129 *__BIONIC_CAST(static_cast, struct __bionic_thrd_data*, __arg);
130 free(__arg);
131 int __result = __data.__func(__data.__arg);
132 return __BIONIC_CAST(reinterpret_cast, void*,
133 __BIONIC_CAST(static_cast, uintptr_t, __result));
134 }
135
thrd_create(thrd_t * _Nonnull __thrd,thrd_start_t _Nonnull __func,void * _Nullable __arg)136 __BIONIC_THREADS_INLINE int thrd_create(thrd_t* _Nonnull __thrd,
137 thrd_start_t _Nonnull __func,
138 void* _Nullable __arg) {
139 struct __bionic_thrd_data* __pthread_arg =
140 __BIONIC_CAST(static_cast, struct __bionic_thrd_data*,
141 malloc(sizeof(struct __bionic_thrd_data)));
142 __pthread_arg->__func = __func;
143 __pthread_arg->__arg = __arg;
144 int __result = __bionic_thrd_error(pthread_create(__thrd, NULL,
145 __bionic_thrd_trampoline,
146 __pthread_arg));
147 if (__result != thrd_success) free(__pthread_arg);
148 return __result;
149 }
150
thrd_current(void)151 __BIONIC_THREADS_INLINE thrd_t thrd_current(void) {
152 return pthread_self();
153 }
154
thrd_detach(thrd_t __thrd)155 __BIONIC_THREADS_INLINE int thrd_detach(thrd_t __thrd) {
156 return __bionic_thrd_error(pthread_detach(__thrd));
157 }
158
thrd_equal(thrd_t __lhs,thrd_t __rhs)159 __BIONIC_THREADS_INLINE int thrd_equal(thrd_t __lhs, thrd_t __rhs) {
160 return pthread_equal(__lhs, __rhs);
161 }
162
thrd_exit(int __result)163 __BIONIC_THREADS_INLINE void thrd_exit(int __result) {
164 pthread_exit(__BIONIC_CAST(reinterpret_cast, void*,
165 __BIONIC_CAST(static_cast, uintptr_t, __result)));
166 }
167
thrd_join(thrd_t __thrd,int * _Nullable __result)168 __BIONIC_THREADS_INLINE int thrd_join(thrd_t __thrd, int* _Nullable __result) {
169 void* __pthread_result;
170 if (pthread_join(__thrd, &__pthread_result) != 0) return thrd_error;
171 if (__result) {
172 *__result = __BIONIC_CAST(reinterpret_cast, intptr_t, __pthread_result);
173 }
174 return thrd_success;
175 }
176
thrd_sleep(const struct timespec * _Nonnull __duration,struct timespec * _Nullable __remaining)177 __BIONIC_THREADS_INLINE int thrd_sleep(const struct timespec* _Nonnull __duration,
178 struct timespec* _Nullable __remaining) {
179 int __rc = nanosleep(__duration, __remaining);
180 if (__rc == 0) return 0;
181 return (errno == EINTR) ? -1 : -2;
182 }
183
thrd_yield(void)184 __BIONIC_THREADS_INLINE void thrd_yield(void) {
185 sched_yield();
186 }
187
188
189
tss_create(tss_t * _Nonnull __key,tss_dtor_t _Nullable __dtor)190 __BIONIC_THREADS_INLINE int tss_create(tss_t* _Nonnull __key, tss_dtor_t _Nullable __dtor) {
191 return __bionic_thrd_error(pthread_key_create(__key, __dtor));
192 }
193
tss_delete(tss_t __key)194 __BIONIC_THREADS_INLINE void tss_delete(tss_t __key) {
195 pthread_key_delete(__key);
196 }
197
tss_get(tss_t __key)198 __BIONIC_THREADS_INLINE void* _Nullable tss_get(tss_t __key) {
199 return pthread_getspecific(__key);
200 }
201
tss_set(tss_t __key,void * _Nonnull __value)202 __BIONIC_THREADS_INLINE int tss_set(tss_t __key, void* _Nonnull __value) {
203 return __bionic_thrd_error(pthread_setspecific(__key, __value));
204 }
205
206 __END_DECLS
207
208 #endif // __BIONIC_THREADS_INLINE
209