1 // Copyright 2015-2019 Espressif Systems (Shanghai) PTE LTD 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // http://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 // See the License for the specific language governing permissions and 13 // limitations under the License. 14 15 //replacement for gcc built-in functions 16 17 #include "sdkconfig.h" 18 #include <stdbool.h> 19 #include "xtensa/config/core-isa.h" 20 #include "xtensa/xtruntime.h" 21 22 //reserved to measure atomic operation time 23 #define atomic_benchmark_intr_disable() 24 #define atomic_benchmark_intr_restore(STATE) 25 26 // This allows nested interrupts disabling and restoring via local registers or stack. 27 // They can be called from interrupts too. 28 // WARNING: Only applies to current CPU. 29 #define _ATOMIC_ENTER_CRITICAL(void) ({ \ 30 unsigned state = XTOS_SET_INTLEVEL(XCHAL_EXCM_LEVEL); \ 31 atomic_benchmark_intr_disable(); \ 32 state; \ 33 }) 34 35 #define _ATOMIC_EXIT_CRITICAL(state) do { \ 36 atomic_benchmark_intr_restore(state); \ 37 XTOS_RESTORE_JUST_INTLEVEL(state); \ 38 } while (0) 39 40 #define ATOMIC_EXCHANGE(n, type) type __atomic_exchange_ ## n (type* mem, type val, int memorder) \ 41 { \ 42 unsigned state = _ATOMIC_ENTER_CRITICAL(); \ 43 type ret = *mem; \ 44 *mem = val; \ 45 _ATOMIC_EXIT_CRITICAL(state); \ 46 return ret; \ 47 } 48 49 #define CMP_EXCHANGE(n, type) bool __atomic_compare_exchange_ ## n (type* mem, type* expect, type desired, int success, int failure) \ 50 { \ 51 bool ret = false; \ 52 unsigned state = _ATOMIC_ENTER_CRITICAL(); \ 53 if (*mem == *expect) { \ 54 ret = true; \ 55 *mem = desired; \ 56 } else { \ 57 *expect = *mem; \ 58 } \ 59 _ATOMIC_EXIT_CRITICAL(state); \ 60 return ret; \ 61 } 62 63 #define FETCH_ADD(n, type) type __atomic_fetch_add_ ## n (type* ptr, type value, int memorder) \ 64 { \ 65 unsigned state = _ATOMIC_ENTER_CRITICAL(); \ 66 type ret = *ptr; \ 67 *ptr = *ptr + value; \ 68 _ATOMIC_EXIT_CRITICAL(state); \ 69 return ret; \ 70 } 71 72 #define FETCH_SUB(n, type) type __atomic_fetch_sub_ ## n (type* ptr, type value, int memorder) \ 73 { \ 74 unsigned state = _ATOMIC_ENTER_CRITICAL(); \ 75 type ret = *ptr; \ 76 *ptr = *ptr - value; \ 77 _ATOMIC_EXIT_CRITICAL(state); \ 78 return ret; \ 79 } 80 81 #define FETCH_AND(n, type) type __atomic_fetch_and_ ## n (type* ptr, type value, int memorder) \ 82 { \ 83 unsigned state = _ATOMIC_ENTER_CRITICAL(); \ 84 type ret = *ptr; \ 85 *ptr = *ptr & value; \ 86 _ATOMIC_EXIT_CRITICAL(state); \ 87 return ret; \ 88 } 89 90 #define FETCH_OR(n, type) type __atomic_fetch_or_ ## n (type* ptr, type value, int memorder) \ 91 { \ 92 unsigned state = _ATOMIC_ENTER_CRITICAL(); \ 93 type ret = *ptr; \ 94 *ptr = *ptr | value; \ 95 _ATOMIC_EXIT_CRITICAL(state); \ 96 return ret; \ 97 } 98 99 #define FETCH_XOR(n, type) type __atomic_fetch_xor_ ## n (type* ptr, type value, int memorder) \ 100 { \ 101 unsigned state = _ATOMIC_ENTER_CRITICAL(); \ 102 type ret = *ptr; \ 103 *ptr = *ptr ^ value; \ 104 _ATOMIC_EXIT_CRITICAL(state); \ 105 return ret; \ 106 } 107 108 #define SYNC_FETCH_OP(op, n, type) type __sync_fetch_and_ ## op ##_ ## n (type* ptr, type value, ...) \ 109 { \ 110 return __atomic_fetch_ ## op ##_ ## n (ptr, value, __ATOMIC_SEQ_CST); \ 111 } 112 113 #define SYNC_BOOL_CMP_EXCHANGE(n, type) bool __sync_bool_compare_and_swap_ ## n (type *ptr, type oldval, type newval, ...) \ 114 { \ 115 bool ret = false; \ 116 unsigned state = _ATOMIC_ENTER_CRITICAL(); \ 117 if (*ptr == oldval) { \ 118 *ptr = newval; \ 119 ret = true; \ 120 } \ 121 _ATOMIC_EXIT_CRITICAL(state); \ 122 return ret; \ 123 } 124 125 #define SYNC_VAL_CMP_EXCHANGE(n, type) type __sync_val_compare_and_swap_ ## n (type *ptr, type oldval, type newval, ...) \ 126 { \ 127 unsigned state = _ATOMIC_ENTER_CRITICAL(); \ 128 type ret = *ptr; \ 129 if (*ptr == oldval) { \ 130 *ptr = newval; \ 131 } \ 132 _ATOMIC_EXIT_CRITICAL(state); \ 133 return ret; \ 134 } 135 136 #ifndef XCHAL_HAVE_S32C1I 137 #error "XCHAL_HAVE_S32C1I not defined, include correct header!" 138 #endif 139 140 //this piece of code should only be compiled if the cpu doesn't support atomic compare and swap (s32c1i) 141 #if XCHAL_HAVE_S32C1I == 0 142 143 #pragma GCC diagnostic ignored "-Wbuiltin-declaration-mismatch" 144 145 ATOMIC_EXCHANGE(1, uint8_t) 146 ATOMIC_EXCHANGE(2, uint16_t) 147 ATOMIC_EXCHANGE(4, uint32_t) 148 ATOMIC_EXCHANGE(8, uint64_t) 149 150 CMP_EXCHANGE(1, uint8_t) 151 CMP_EXCHANGE(2, uint16_t) 152 CMP_EXCHANGE(4, uint32_t) 153 CMP_EXCHANGE(8, uint64_t) 154 155 FETCH_ADD(1, uint8_t) 156 FETCH_ADD(2, uint16_t) 157 FETCH_ADD(4, uint32_t) 158 FETCH_ADD(8, uint64_t) 159 160 FETCH_SUB(1, uint8_t) 161 FETCH_SUB(2, uint16_t) 162 FETCH_SUB(4, uint32_t) 163 FETCH_SUB(8, uint64_t) 164 165 FETCH_AND(1, uint8_t) 166 FETCH_AND(2, uint16_t) 167 FETCH_AND(4, uint32_t) 168 FETCH_AND(8, uint64_t) 169 170 FETCH_OR(1, uint8_t) 171 FETCH_OR(2, uint16_t) 172 FETCH_OR(4, uint32_t) 173 FETCH_OR(8, uint64_t) 174 175 FETCH_XOR(1, uint8_t) 176 FETCH_XOR(2, uint16_t) 177 FETCH_XOR(4, uint32_t) 178 FETCH_XOR(8, uint64_t) 179 180 SYNC_FETCH_OP(add, 1, uint8_t) 181 SYNC_FETCH_OP(add, 2, uint16_t) 182 SYNC_FETCH_OP(add, 4, uint32_t) 183 SYNC_FETCH_OP(add, 8, uint64_t) 184 185 SYNC_FETCH_OP(sub, 1, uint8_t) 186 SYNC_FETCH_OP(sub, 2, uint16_t) 187 SYNC_FETCH_OP(sub, 4, uint32_t) 188 SYNC_FETCH_OP(sub, 8, uint64_t) 189 190 SYNC_FETCH_OP(and, 1, uint8_t) 191 SYNC_FETCH_OP(and, 2, uint16_t) 192 SYNC_FETCH_OP(and, 4, uint32_t) 193 SYNC_FETCH_OP(and, 8, uint64_t) 194 195 SYNC_FETCH_OP(or, 1, uint8_t) 196 SYNC_FETCH_OP(or, 2, uint16_t) 197 SYNC_FETCH_OP(or, 4, uint32_t) 198 SYNC_FETCH_OP(or, 8, uint64_t) 199 200 SYNC_FETCH_OP(xor, 1, uint8_t) 201 SYNC_FETCH_OP(xor, 2, uint16_t) 202 SYNC_FETCH_OP(xor, 4, uint32_t) 203 SYNC_FETCH_OP(xor, 8, uint64_t) 204 205 SYNC_BOOL_CMP_EXCHANGE(1, uint8_t) 206 SYNC_BOOL_CMP_EXCHANGE(2, uint16_t) 207 SYNC_BOOL_CMP_EXCHANGE(4, uint32_t) 208 SYNC_BOOL_CMP_EXCHANGE(8, uint64_t) 209 210 SYNC_VAL_CMP_EXCHANGE(1, uint8_t) 211 SYNC_VAL_CMP_EXCHANGE(2, uint16_t) 212 SYNC_VAL_CMP_EXCHANGE(4, uint32_t) 213 SYNC_VAL_CMP_EXCHANGE(8, uint64_t) 214 215 #endif 216