• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  *  * Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  *  * Redistributions in binary form must reproduce the above copyright
11  *    notice, this list of conditions and the following disclaimer in
12  *    the documentation and/or other materials provided with the
13  *    distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
16  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
17  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
18  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
19  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
22  * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
25  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26  * SUCH DAMAGE.
27  */
28 
29 #pragma once
30 
31 #include <threads.h>
32 
33 #include <errno.h>
34 #include <sched.h>
35 #include <stdlib.h>
36 
37 #if defined(__BIONIC_THREADS_INLINE)
38 
39 __BEGIN_DECLS
40 
__bionic_thrd_error(int __pthread_code)41 static __inline int __bionic_thrd_error(int __pthread_code) {
42   switch (__pthread_code) {
43     case 0: return 0;
44     case ENOMEM: return thrd_nomem;
45     case ETIMEDOUT: return thrd_timedout;
46     case EBUSY: return thrd_busy;
47     default: return thrd_error;
48   }
49 }
50 
call_once(once_flag * __flag,void (* __function)(void))51 __BIONIC_THREADS_INLINE void call_once(once_flag* __flag,
52                                        void (*__function)(void)) {
53   pthread_once(__flag, __function);
54 }
55 
56 
57 
cnd_broadcast(cnd_t * __cnd)58 __BIONIC_THREADS_INLINE int cnd_broadcast(cnd_t* __cnd) {
59   return __bionic_thrd_error(pthread_cond_broadcast(__cnd));
60 }
61 
cnd_destroy(cnd_t * __cnd)62 __BIONIC_THREADS_INLINE void cnd_destroy(cnd_t* __cnd) {
63   pthread_cond_destroy(__cnd);
64 }
65 
cnd_init(cnd_t * __cnd)66 __BIONIC_THREADS_INLINE int cnd_init(cnd_t* __cnd) {
67   return __bionic_thrd_error(pthread_cond_init(__cnd, NULL));
68 }
69 
cnd_signal(cnd_t * __cnd)70 __BIONIC_THREADS_INLINE int cnd_signal(cnd_t* __cnd) {
71   return __bionic_thrd_error(pthread_cond_signal(__cnd));
72 }
73 
cnd_timedwait(cnd_t * __cnd,mtx_t * __mtx,const struct timespec * __timeout)74 __BIONIC_THREADS_INLINE int cnd_timedwait(cnd_t* __cnd,
75                                           mtx_t* __mtx,
76                                           const struct timespec* __timeout) {
77   return __bionic_thrd_error(pthread_cond_timedwait(__cnd, __mtx, __timeout));
78 }
79 
cnd_wait(cnd_t * __cnd,mtx_t * __mtx)80 __BIONIC_THREADS_INLINE int cnd_wait(cnd_t* __cnd, mtx_t* __mtx) {
81   return __bionic_thrd_error(pthread_cond_wait(__cnd, __mtx));
82 }
83 
84 
85 
mtx_destroy(mtx_t * __mtx)86 __BIONIC_THREADS_INLINE void mtx_destroy(mtx_t* __mtx) {
87   pthread_mutex_destroy(__mtx);
88 }
89 
mtx_init(mtx_t * __mtx,int __type)90 __BIONIC_THREADS_INLINE int mtx_init(mtx_t* __mtx, int __type) {
91   int __pthread_type = (__type & mtx_recursive) ? PTHREAD_MUTEX_RECURSIVE
92                                                 : PTHREAD_MUTEX_NORMAL;
93   __type &= ~mtx_recursive;
94   if (__type != mtx_plain && __type != mtx_timed) return thrd_error;
95 
96   pthread_mutexattr_t __attr;
97   pthread_mutexattr_init(&__attr);
98   pthread_mutexattr_settype(&__attr, __pthread_type);
99   return __bionic_thrd_error(pthread_mutex_init(__mtx, &__attr));
100 }
101 
mtx_lock(mtx_t * __mtx)102 __BIONIC_THREADS_INLINE int mtx_lock(mtx_t* __mtx) {
103   return __bionic_thrd_error(pthread_mutex_lock(__mtx));
104 }
105 
mtx_timedlock(mtx_t * __mtx,const struct timespec * __timeout)106 __BIONIC_THREADS_INLINE int mtx_timedlock(mtx_t* __mtx,
107                                           const struct timespec* __timeout) {
108   return __bionic_thrd_error(pthread_mutex_timedlock(__mtx, __timeout));
109 }
110 
mtx_trylock(mtx_t * __mtx)111 __BIONIC_THREADS_INLINE int mtx_trylock(mtx_t* __mtx) {
112   return __bionic_thrd_error(pthread_mutex_trylock(__mtx));
113 }
114 
mtx_unlock(mtx_t * __mtx)115 __BIONIC_THREADS_INLINE int mtx_unlock(mtx_t* __mtx) {
116   return __bionic_thrd_error(pthread_mutex_unlock(__mtx));
117 }
118 
119 
120 
121 struct __bionic_thrd_data {
122   thrd_start_t __func;
123   void* __arg;
124 };
125 
__bionic_thrd_trampoline(void * __arg)126 static inline void* __bionic_thrd_trampoline(void* __arg) {
127   struct __bionic_thrd_data __data =
128       *__BIONIC_CAST(static_cast, struct __bionic_thrd_data*, __arg);
129   free(__arg);
130   int __result = __data.__func(__data.__arg);
131   return __BIONIC_CAST(reinterpret_cast, void*,
132                        __BIONIC_CAST(static_cast, uintptr_t, __result));
133 }
134 
thrd_create(thrd_t * __thrd,thrd_start_t __func,void * __arg)135 __BIONIC_THREADS_INLINE int thrd_create(thrd_t* __thrd,
136                                         thrd_start_t __func,
137                                         void* __arg) {
138   struct __bionic_thrd_data* __pthread_arg =
139       __BIONIC_CAST(static_cast, struct __bionic_thrd_data*,
140                     malloc(sizeof(struct __bionic_thrd_data)));
141   __pthread_arg->__func = __func;
142   __pthread_arg->__arg = __arg;
143   int __result = __bionic_thrd_error(pthread_create(__thrd, NULL,
144                                                     __bionic_thrd_trampoline,
145                                                     __pthread_arg));
146   if (__result != thrd_success) free(__pthread_arg);
147   return __result;
148 }
149 
thrd_current(void)150 __BIONIC_THREADS_INLINE thrd_t thrd_current(void) {
151   return pthread_self();
152 }
153 
thrd_detach(thrd_t __thrd)154 __BIONIC_THREADS_INLINE int thrd_detach(thrd_t __thrd) {
155   return __bionic_thrd_error(pthread_detach(__thrd));
156 }
157 
thrd_equal(thrd_t __lhs,thrd_t __rhs)158 __BIONIC_THREADS_INLINE int thrd_equal(thrd_t __lhs, thrd_t __rhs) {
159   return pthread_equal(__lhs, __rhs);
160 }
161 
thrd_exit(int __result)162 __BIONIC_THREADS_INLINE void thrd_exit(int __result) {
163   pthread_exit(__BIONIC_CAST(reinterpret_cast, void*,
164                              __BIONIC_CAST(static_cast, uintptr_t, __result)));
165 }
166 
thrd_join(thrd_t __thrd,int * __result)167 __BIONIC_THREADS_INLINE int thrd_join(thrd_t __thrd, int* __result) {
168   void* __pthread_result;
169   if (pthread_join(__thrd, &__pthread_result) != 0) return thrd_error;
170   if (__result) {
171     *__result = __BIONIC_CAST(reinterpret_cast, intptr_t, __pthread_result);
172   }
173   return thrd_success;
174 }
175 
thrd_sleep(const struct timespec * __duration,struct timespec * __remaining)176 __BIONIC_THREADS_INLINE int thrd_sleep(const struct timespec* __duration,
177                                        struct timespec* __remaining) {
178   int __rc = nanosleep(__duration, __remaining);
179   if (__rc == 0) return 0;
180   return (errno == EINTR) ? -1 : -2;
181 }
182 
thrd_yield(void)183 __BIONIC_THREADS_INLINE void thrd_yield(void) {
184   sched_yield();
185 }
186 
187 
188 
tss_create(tss_t * __key,tss_dtor_t __dtor)189 __BIONIC_THREADS_INLINE int tss_create(tss_t* __key, tss_dtor_t __dtor) {
190   return __bionic_thrd_error(pthread_key_create(__key, __dtor));
191 }
192 
tss_delete(tss_t __key)193 __BIONIC_THREADS_INLINE void tss_delete(tss_t __key) {
194   pthread_key_delete(__key);
195 }
196 
tss_get(tss_t __key)197 __BIONIC_THREADS_INLINE void* tss_get(tss_t __key) {
198   return pthread_getspecific(__key);
199 }
200 
tss_set(tss_t __key,void * __value)201 __BIONIC_THREADS_INLINE int tss_set(tss_t __key, void* __value) {
202   return __bionic_thrd_error(pthread_setspecific(__key, __value));
203 }
204 
205 __END_DECLS
206 
207 #endif  // __BIONIC_THREADS_INLINE
208