1 /*
2 * Copyright (c) 2023 Institute of Parallel And Distributed Systems (IPADS), Shanghai Jiao Tong University (SJTU)
3 * Licensed under the Mulan PSL v2.
4 * You can use this software according to the terms and conditions of the Mulan PSL v2.
5 * You may obtain a copy of Mulan PSL v2 at:
6 * http://license.coscl.org.cn/MulanPSL2
7 * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
8 * IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
9 * PURPOSE.
10 * See the Mulan PSL v2 for more details.
11 */
12 #include <common/types.h>
13 #include <common/errno.h>
14 #include <common/macro.h>
15 #include <common/lock.h>
16 #include <common/kprint.h>
17 #include <arch/sync.h>
18
19 /* Simple/compact RWLock from linux kernel */
20
rwlock_init(struct rwlock * rwlock)21 int rwlock_init(struct rwlock *rwlock)
22 {
23 if (rwlock == 0)
24 return -EINVAL;
25 rwlock->lock = 0;
26 return 0;
27 }
28
29 // clang-format off
30 /* WARN: when there are more than 0x7FFFFFFF readers exist, this function
31 * will not function correctly */
read_lock(struct rwlock * rwlock)32 void read_lock(struct rwlock *rwlock)
33 {
34 unsigned int tmp, tmp2;
35 asm volatile(
36 "1: ldaxr %w0, %2\n"
37 " add %w0, %w0, #1\n"
38 " tbnz %w0, #31, 1b\n"
39 " stxr %w1, %w0, %2\n"
40 " cbnz %w1, 1b\n"
41 : "=&r" (tmp), "=&r" (tmp2), "+Q" (rwlock->lock)
42 :
43 : "cc", "memory");
44 }
45
read_try_lock(struct rwlock * rwlock)46 int read_try_lock(struct rwlock *rwlock)
47 {
48 unsigned int tmp, tmp2;
49
50 asm volatile(
51 /* w1 = 1: stxr failed */
52 " mov %w1, #1\n"
53 "1: ldaxr %w0, %2\n"
54 " add %w0, %w0, #1\n"
55 /* 31 bit has been set => writer */
56 " tbnz %w0, #31, 2f\n"
57 " stxr %w1, %w0, %2\n"
58 "2:"
59 : "=&r" (tmp), "=&r" (tmp2), "+Q" (rwlock->lock)
60 :
61 : "cc", "memory");
62
63 /* Fail: tmp2 > 0 (stxr fail tmp2 = 1) return -1 */
64 return tmp2? -1: 0;
65 }
66
read_unlock(struct rwlock * rwlock)67 void read_unlock(struct rwlock *rwlock)
68 {
69 unsigned int tmp, tmp2;
70
71 asm volatile(
72 "1: ldxr %w0, %2\n"
73 " sub %w0, %w0, #1\n"
74 " stlxr %w1, %w0, %2\n"
75 " cbnz %w1, 1b"
76 : "=&r" (tmp), "=&r" (tmp2), "+Q" (rwlock->lock)
77 :
78 : "memory");
79 }
80
81
82 /* Writer lock, use the 31 bit of rwlock->lock (0x80000000) */
83
write_lock(struct rwlock * rwlock)84 void write_lock(struct rwlock *rwlock)
85 {
86 unsigned int tmp;
87
88 asm volatile(
89 "1: ldaxr %w0, %1\n"
90 " cbnz %w0, 1b\n"
91 " stxr %w0, %w2, %1\n"
92 " cbnz %w0, 1b\n"
93 : "=&r" (tmp), "+Q" (rwlock->lock)
94 : "r" (0x80000000)
95 : "memory");
96 }
97
98 /* Writer trylock, use the 31 bit of rwlock->lock (0x80000000) */
99
write_try_lock(struct rwlock * rwlock)100 int write_try_lock(struct rwlock *rwlock)
101 {
102 unsigned int tmp;
103
104 asm volatile(
105 "1: ldaxr %w0, %1\n"
106 " cbnz %w0, 2f\n"
107 " stxr %w0, %w2, %1\n"
108 "2:"
109 : "=&r" (tmp), "+Q" (rwlock->lock)
110 : "r" (0x80000000)
111 : "memory");
112
113 /* Fail: tmp > 0 (stxr fail tmp = 1) return -1 */
114 return tmp? -1: 0;
115 }
116
117
118 /* Writer unlock, set the rwlock to zero */
119
write_unlock(struct rwlock * rwlock)120 void write_unlock(struct rwlock *rwlock)
121 {
122 asm volatile(
123 "stlr wzr, %0"
124 : "=Q" (rwlock->lock) :: "memory");
125 }
126
127 // clang-format off
128