Home
last modified time | relevance | path

Searched defs:lock (Results 1 – 25 of 153) sorted by relevance

1234567

/arch/hexagon/include/asm/
Dspinlock.h28 static inline void arch_read_lock(arch_rwlock_t *lock) in arch_read_lock()
43 static inline void arch_read_unlock(arch_rwlock_t *lock) in arch_read_unlock()
58 static inline int arch_read_trylock(arch_rwlock_t *lock) in arch_read_trylock()
76 static inline void arch_write_lock(arch_rwlock_t *lock) in arch_write_lock()
91 static inline int arch_write_trylock(arch_rwlock_t *lock) in arch_write_trylock()
109 static inline void arch_write_unlock(arch_rwlock_t *lock) in arch_write_unlock()
115 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock()
130 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
136 static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
Dspinlock_types.h16 volatile unsigned int lock; member
22 volatile unsigned int lock; member
/arch/alpha/include/asm/
Dspinlock.h19 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked()
24 static inline void arch_spin_unlock(arch_spinlock_t * lock) in arch_spin_unlock()
30 static inline void arch_spin_lock(arch_spinlock_t * lock) in arch_spin_lock()
50 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
57 static inline void arch_read_lock(arch_rwlock_t *lock) in arch_read_lock()
77 static inline void arch_write_lock(arch_rwlock_t *lock) in arch_write_lock()
97 static inline int arch_read_trylock(arch_rwlock_t * lock) in arch_read_trylock()
119 static inline int arch_write_trylock(arch_rwlock_t * lock) in arch_write_trylock()
141 static inline void arch_read_unlock(arch_rwlock_t * lock) in arch_read_unlock()
157 static inline void arch_write_unlock(arch_rwlock_t * lock) in arch_write_unlock()
Dspinlock_types.h10 volatile unsigned int lock; member
16 volatile unsigned int lock; member
/arch/powerpc/include/asm/
Dsimple_spinlock.h35 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked()
40 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked()
49 static inline unsigned long __arch_spin_trylock(arch_spinlock_t *lock) in __arch_spin_trylock()
70 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
94 static inline void splpar_spin_yield(arch_spinlock_t *lock) {} in splpar_spin_yield()
95 static inline void splpar_rw_yield(arch_rwlock_t *lock) {} in splpar_rw_yield()
98 static inline void spin_yield(arch_spinlock_t *lock) in spin_yield()
106 static inline void rw_yield(arch_rwlock_t *lock) in rw_yield()
114 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock()
128 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
[all …]
Dqspinlock.h71 static __always_inline int queued_spin_is_locked(struct qspinlock *lock) in queued_spin_is_locked()
76 static __always_inline int queued_spin_value_unlocked(struct qspinlock lock) in queued_spin_value_unlocked()
81 static __always_inline int queued_spin_is_contended(struct qspinlock *lock) in queued_spin_is_contended()
92 static __always_inline int __queued_spin_trylock_nosteal(struct qspinlock *lock) in __queued_spin_trylock_nosteal()
114 static __always_inline int __queued_spin_trylock_steal(struct qspinlock *lock) in __queued_spin_trylock_steal()
138 static __always_inline int queued_spin_trylock(struct qspinlock *lock) in queued_spin_trylock()
148 static __always_inline void queued_spin_lock(struct qspinlock *lock) in queued_spin_lock()
154 static inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock()
/arch/sparc/include/asm/
Dspinlock_32.h16 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) argument
18 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock()
38 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
48 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
92 #define arch_read_lock(lock) \ argument
112 #define arch_read_unlock(lock) \ argument
133 static inline void arch_write_unlock(arch_rwlock_t *lock) in arch_write_unlock()
177 #define arch_read_trylock(lock) \ argument
Dspinlock_types.h10 volatile unsigned char lock; member
20 volatile unsigned int lock; member
/arch/ia64/include/asm/
Dspinlock.h43 static __always_inline void __ticket_spin_lock(arch_spinlock_t *lock) in __ticket_spin_lock()
63 static __always_inline int __ticket_spin_trylock(arch_spinlock_t *lock) in __ticket_spin_trylock()
72 static __always_inline void __ticket_spin_unlock(arch_spinlock_t *lock) in __ticket_spin_unlock()
82 static inline int __ticket_spin_is_locked(arch_spinlock_t *lock) in __ticket_spin_is_locked()
89 static inline int __ticket_spin_is_contended(arch_spinlock_t *lock) in __ticket_spin_is_contended()
96 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked()
101 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked()
106 static inline int arch_spin_is_contended(arch_spinlock_t *lock) in arch_spin_is_contended()
112 static __always_inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock()
117 static __always_inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
[all …]
Dacenv.h20 ia64_acpi_acquire_global_lock(unsigned int *lock) in ia64_acpi_acquire_global_lock()
32 ia64_acpi_release_global_lock(unsigned int *lock) in ia64_acpi_release_global_lock()
/arch/x86/include/asm/
Dqspinlock.h14 static __always_inline u32 queued_fetch_set_pending_acquire(struct qspinlock *lock) in queued_fetch_set_pending_acquire()
44 static inline void native_queued_spin_unlock(struct qspinlock *lock) in native_queued_spin_unlock()
49 static inline void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val) in queued_spin_lock_slowpath()
54 static inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock()
88 static inline bool virt_spin_lock(struct qspinlock *lock) in virt_spin_lock()
Dcmpxchg.h42 #define __xchg_op(ptr, arg, op, lock) \ argument
85 #define __raw_cmpxchg(ptr, old, new, size, lock) \ argument
158 #define __raw_try_cmpxchg(_ptr, _pold, _new, size, lock) \ argument
239 #define __xadd(ptr, inc, lock) __xchg_op((ptr), (inc), xadd, lock) argument
/arch/arm64/kvm/hyp/include/nvhe/
Drwlock.h38 static inline void hyp_write_lock(hyp_rwlock_t *lock) in hyp_write_lock()
65 static inline void hyp_write_unlock(hyp_rwlock_t *lock) in hyp_write_unlock()
73 static inline void hyp_read_lock(hyp_rwlock_t *lock) in hyp_read_lock()
101 static inline void hyp_read_unlock(hyp_rwlock_t *lock) in hyp_read_unlock()
121 static inline void hyp_assert_write_lock_held(hyp_rwlock_t *lock) in hyp_assert_write_lock_held()
126 static inline void hyp_assert_write_lock_held(hyp_rwlock_t *lock) { } in hyp_assert_write_lock_held()
Dspinlock.h44 static inline void hyp_spin_lock(hyp_spinlock_t *lock) in hyp_spin_lock()
82 static inline void hyp_spin_unlock(hyp_spinlock_t *lock) in hyp_spin_unlock()
101 static inline bool hyp_spin_is_locked(hyp_spinlock_t *lock) in hyp_spin_is_locked()
109 static inline void hyp_assert_lock_held(hyp_spinlock_t *lock) in hyp_assert_lock_held()
122 static inline void hyp_assert_lock_held(hyp_spinlock_t *lock) { } in hyp_assert_lock_held()
/arch/mips/include/asm/octeon/
Dcvmx-spinlock.h63 static inline void cvmx_spinlock_init(cvmx_spinlock_t *lock) in cvmx_spinlock_init()
74 static inline int cvmx_spinlock_locked(cvmx_spinlock_t *lock) in cvmx_spinlock_locked()
84 static inline void cvmx_spinlock_unlock(cvmx_spinlock_t *lock) in cvmx_spinlock_unlock()
103 static inline unsigned int cvmx_spinlock_trylock(cvmx_spinlock_t *lock) in cvmx_spinlock_trylock()
128 static inline void cvmx_spinlock_lock(cvmx_spinlock_t *lock) in cvmx_spinlock_lock()
/arch/arm/include/asm/
Dspinlock.h56 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock()
81 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
107 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
114 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked()
119 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked()
124 static inline int arch_spin_is_contended(arch_spinlock_t *lock) in arch_spin_is_contended()
Dmcs_spinlock.h9 #define arch_mcs_spin_lock_contended(lock) \ argument
17 #define arch_mcs_spin_unlock_contended(lock) \ argument
/arch/arc/include/asm/
Dspinlock.h17 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock()
44 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
67 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
222 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock()
244 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
261 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
/arch/sh/include/asm/
Dspinlock_types.h10 volatile unsigned int lock; member
16 volatile unsigned int lock; member
Dspinlock-cas.h28 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock()
33 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
38 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
Dspinlock-llsc.h26 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock()
46 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock()
61 static inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock()
/arch/powerpc/lib/
Dqspinlock.c17 struct qspinlock *lock; member
121 static __always_inline u32 trylock_clean_tail(struct qspinlock *lock, u32 tail) in trylock_clean_tail()
160 static __always_inline u32 publish_tail_cpu(struct qspinlock *lock, u32 tail) in publish_tail_cpu()
180 static __always_inline u32 set_mustq(struct qspinlock *lock) in set_mustq()
196 static __always_inline u32 clear_mustq(struct qspinlock *lock) in clear_mustq()
212 static __always_inline bool try_set_sleepy(struct qspinlock *lock, u32 old) in try_set_sleepy()
234 static __always_inline void seen_sleepy_owner(struct qspinlock *lock, u32 val) in seen_sleepy_owner()
250 static __always_inline void seen_sleepy_node(struct qspinlock *lock, u32 val) in seen_sleepy_node()
262 static struct qnode *get_tail_qnode(struct qspinlock *lock, u32 val) in get_tail_qnode()
287 static __always_inline bool __yield_to_locked_owner(struct qspinlock *lock, u32 val, bool paravirt,… in __yield_to_locked_owner()
[all …]
/arch/arm/mach-omap1/
Dsram.S49 lock: ldrh r4, [r2], #0 @ read back dpll value label
/arch/mips/include/asm/
Dspinlock.h21 static inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock()
/arch/s390/lib/
Dspinlock.c73 static inline int arch_load_niai4(int *lock) in arch_load_niai4()
84 static inline int arch_cmpxchg_niai8(int *lock, int old, int new) in arch_cmpxchg_niai8()
97 static inline struct spin_wait *arch_spin_decode_tail(int lock) in arch_spin_decode_tail()
106 static inline int arch_spin_yield_target(int lock, struct spin_wait *node) in arch_spin_yield_target()

1234567