• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #ifndef __ASM_METAG_GLOBAL_LOCK_H
2 #define __ASM_METAG_GLOBAL_LOCK_H
3 
4 #include <asm/metag_mem.h>
5 
6 /**
7  * __global_lock1() - Acquire global voluntary lock (LOCK1).
8  * @flags:	Variable to store flags into.
9  *
10  * Acquires the Meta global voluntary lock (LOCK1), also taking care to disable
11  * all triggers so we cannot be interrupted, and to enforce a compiler barrier
12  * so that the compiler cannot reorder memory accesses across the lock.
13  *
14  * No other hardware thread will be able to acquire the voluntary or exclusive
15  * locks until the voluntary lock is released with @__global_unlock1, but they
16  * may continue to execute as long as they aren't trying to acquire either of
17  * the locks.
18  */
19 #define __global_lock1(flags) do {					\
20 	unsigned int __trval;						\
21 	asm volatile("MOV	%0,#0\n\t"				\
22 		     "SWAP	%0,TXMASKI\n\t"				\
23 		     "LOCK1"						\
24 		     : "=r" (__trval)					\
25 		     :							\
26 		     : "memory");					\
27 	(flags) = __trval;						\
28 } while (0)
29 
30 /**
31  * __global_unlock1() - Release global voluntary lock (LOCK1).
32  * @flags:	Variable to restore flags from.
33  *
34  * Releases the Meta global voluntary lock (LOCK1) acquired with
35  * @__global_lock1, also taking care to re-enable triggers, and to enforce a
36  * compiler barrier so that the compiler cannot reorder memory accesses across
37  * the unlock.
38  *
39  * This immediately allows another hardware thread to acquire the voluntary or
40  * exclusive locks.
41  */
42 #define __global_unlock1(flags) do {					\
43 	unsigned int __trval = (flags);					\
44 	asm volatile("LOCK0\n\t"					\
45 		     "MOV	TXMASKI,%0"				\
46 		     :							\
47 		     : "r" (__trval)					\
48 		     : "memory");					\
49 } while (0)
50 
51 /**
52  * __global_lock2() - Acquire global exclusive lock (LOCK2).
53  * @flags:	Variable to store flags into.
54  *
55  * Acquires the Meta global voluntary lock and global exclusive lock (LOCK2),
56  * also taking care to disable all triggers so we cannot be interrupted, to take
57  * the atomic lock (system event) and to enforce a compiler barrier so that the
58  * compiler cannot reorder memory accesses across the lock.
59  *
60  * No other hardware thread will be able to execute code until the locks are
61  * released with @__global_unlock2.
62  */
63 #define __global_lock2(flags) do {					\
64 	unsigned int __trval;						\
65 	unsigned int __aloc_hi = LINSYSEVENT_WR_ATOMIC_LOCK & 0xFFFF0000; \
66 	asm volatile("MOV	%0,#0\n\t"				\
67 		     "SWAP	%0,TXMASKI\n\t"				\
68 		     "LOCK2\n\t"					\
69 		     "SETD	[%1+#0x40],D1RtP"			\
70 		     : "=r&" (__trval)					\
71 		     : "u" (__aloc_hi)					\
72 		     : "memory");					\
73 	(flags) = __trval;						\
74 } while (0)
75 
76 /**
77  * __global_unlock2() - Release global exclusive lock (LOCK2).
78  * @flags:	Variable to restore flags from.
79  *
80  * Releases the Meta global exclusive lock (LOCK2) and global voluntary lock
81  * acquired with @__global_lock2, also taking care to release the atomic lock
82  * (system event), re-enable triggers, and to enforce a compiler barrier so that
83  * the compiler cannot reorder memory accesses across the unlock.
84  *
85  * This immediately allows other hardware threads to continue executing and one
86  * of them to acquire locks.
87  */
88 #define __global_unlock2(flags) do {					\
89 	unsigned int __trval = (flags);					\
90 	unsigned int __alock_hi = LINSYSEVENT_WR_ATOMIC_LOCK & 0xFFFF0000; \
91 	asm volatile("SETD	[%1+#0x00],D1RtP\n\t"			\
92 		     "LOCK0\n\t"					\
93 		     "MOV	TXMASKI,%0"				\
94 		     :							\
95 		     : "r" (__trval),					\
96 		       "u" (__alock_hi)					\
97 		     : "memory");					\
98 } while (0)
99 
100 #endif /* __ASM_METAG_GLOBAL_LOCK_H */
101