1 /*
2 * z_Windows_NT-586_util.cpp -- platform specific routines.
3 */
4
5 //===----------------------------------------------------------------------===//
6 //
7 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
8 // See https://llvm.org/LICENSE.txt for license information.
9 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
10 //
11 //===----------------------------------------------------------------------===//
12
13 #include "kmp.h"
14
15 #if (KMP_ARCH_X86 || KMP_ARCH_X86_64)
16 /* Only 32-bit "add-exchange" instruction on IA-32 architecture causes us to
17 use compare_and_store for these routines */
18
__kmp_test_then_or8(volatile kmp_int8 * p,kmp_int8 d)19 kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 d) {
20 kmp_int8 old_value, new_value;
21
22 old_value = TCR_1(*p);
23 new_value = old_value | d;
24
25 while (!__kmp_compare_and_store8(p, old_value, new_value)) {
26 KMP_CPU_PAUSE();
27 old_value = TCR_1(*p);
28 new_value = old_value | d;
29 }
30 return old_value;
31 }
32
__kmp_test_then_and8(volatile kmp_int8 * p,kmp_int8 d)33 kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 d) {
34 kmp_int8 old_value, new_value;
35
36 old_value = TCR_1(*p);
37 new_value = old_value & d;
38
39 while (!__kmp_compare_and_store8(p, old_value, new_value)) {
40 KMP_CPU_PAUSE();
41 old_value = TCR_1(*p);
42 new_value = old_value & d;
43 }
44 return old_value;
45 }
46
__kmp_test_then_or32(volatile kmp_uint32 * p,kmp_uint32 d)47 kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 d) {
48 kmp_uint32 old_value, new_value;
49
50 old_value = TCR_4(*p);
51 new_value = old_value | d;
52
53 while (!__kmp_compare_and_store32((volatile kmp_int32 *)p, old_value,
54 new_value)) {
55 KMP_CPU_PAUSE();
56 old_value = TCR_4(*p);
57 new_value = old_value | d;
58 }
59 return old_value;
60 }
61
__kmp_test_then_and32(volatile kmp_uint32 * p,kmp_uint32 d)62 kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 d) {
63 kmp_uint32 old_value, new_value;
64
65 old_value = TCR_4(*p);
66 new_value = old_value & d;
67
68 while (!__kmp_compare_and_store32((volatile kmp_int32 *)p, old_value,
69 new_value)) {
70 KMP_CPU_PAUSE();
71 old_value = TCR_4(*p);
72 new_value = old_value & d;
73 }
74 return old_value;
75 }
76
__kmp_test_then_add8(volatile kmp_int8 * p,kmp_int8 d)77 kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 d) {
78 kmp_int64 old_value, new_value;
79
80 old_value = TCR_1(*p);
81 new_value = old_value + d;
82 while (!__kmp_compare_and_store8(p, old_value, new_value)) {
83 KMP_CPU_PAUSE();
84 old_value = TCR_1(*p);
85 new_value = old_value + d;
86 }
87 return old_value;
88 }
89
90 #if KMP_ARCH_X86
__kmp_test_then_add64(volatile kmp_int64 * p,kmp_int64 d)91 kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 d) {
92 kmp_int64 old_value, new_value;
93
94 old_value = TCR_8(*p);
95 new_value = old_value + d;
96 while (!__kmp_compare_and_store64(p, old_value, new_value)) {
97 KMP_CPU_PAUSE();
98 old_value = TCR_8(*p);
99 new_value = old_value + d;
100 }
101 return old_value;
102 }
103 #endif /* KMP_ARCH_X86 */
104
__kmp_test_then_or64(volatile kmp_uint64 * p,kmp_uint64 d)105 kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 d) {
106 kmp_uint64 old_value, new_value;
107
108 old_value = TCR_8(*p);
109 new_value = old_value | d;
110 while (!__kmp_compare_and_store64((volatile kmp_int64 *)p, old_value,
111 new_value)) {
112 KMP_CPU_PAUSE();
113 old_value = TCR_8(*p);
114 new_value = old_value | d;
115 }
116
117 return old_value;
118 }
119
__kmp_test_then_and64(volatile kmp_uint64 * p,kmp_uint64 d)120 kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 d) {
121 kmp_uint64 old_value, new_value;
122
123 old_value = TCR_8(*p);
124 new_value = old_value & d;
125 while (!__kmp_compare_and_store64((volatile kmp_int64 *)p, old_value,
126 new_value)) {
127 KMP_CPU_PAUSE();
128 old_value = TCR_8(*p);
129 new_value = old_value & d;
130 }
131
132 return old_value;
133 }
134
135 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
136