• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /****************************************************************************
2  ****************************************************************************
3  ***
4  ***   This header was automatically generated from a Linux kernel header
5  ***   of the same name, to make information necessary for userspace to
6  ***   call into the kernel available to libc.  It contains only constants,
7  ***   structures, and macros generated from the original header, and thus,
8  ***   contains no copyrightable information.
9  ***
10  ****************************************************************************
11  ****************************************************************************/
12 #ifndef __ASM_PROC_LOCKS_H
13 #define __ASM_PROC_LOCKS_H
14 
15 #if __LINUX_ARM_ARCH__ >= 6
16 
17 #define __down_op(ptr,fail)   ({   __asm__ __volatile__(   "@ down_op\n"  "1:	ldrex	lr, [%0]\n"  "	sub	lr, lr, %1\n"  "	strex	ip, lr, [%0]\n"  "	teq	ip, #0\n"  "	bne	1b\n"  "	teq	lr, #0\n"  "	movmi	ip, %0\n"  "	blmi	" #fail   :   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   smp_mb();   })
18 
19 #define __down_op_ret(ptr,fail)   ({   unsigned int ret;   __asm__ __volatile__(   "@ down_op_ret\n"  "1:	ldrex	lr, [%1]\n"  "	sub	lr, lr, %2\n"  "	strex	ip, lr, [%1]\n"  "	teq	ip, #0\n"  "	bne	1b\n"  "	teq	lr, #0\n"  "	movmi	ip, %1\n"  "	movpl	ip, #0\n"  "	blmi	" #fail "\n"  "	mov	%0, ip"   : "=&r" (ret)   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   smp_mb();   ret;   })
20 
21 #define __up_op(ptr,wake)   ({   smp_mb();   __asm__ __volatile__(   "@ up_op\n"  "1:	ldrex	lr, [%0]\n"  "	add	lr, lr, %1\n"  "	strex	ip, lr, [%0]\n"  "	teq	ip, #0\n"  "	bne	1b\n"  "	cmp	lr, #0\n"  "	movle	ip, %0\n"  "	blle	" #wake   :   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   })
22 
23 #define RW_LOCK_BIAS 0x01000000
24 #define RW_LOCK_BIAS_STR "0x01000000"
25 
26 #define __down_op_write(ptr,fail)   ({   __asm__ __volatile__(   "@ down_op_write\n"  "1:	ldrex	lr, [%0]\n"  "	sub	lr, lr, %1\n"  "	strex	ip, lr, [%0]\n"  "	teq	ip, #0\n"  "	bne	1b\n"  "	teq	lr, #0\n"  "	movne	ip, %0\n"  "	blne	" #fail   :   : "r" (ptr), "I" (RW_LOCK_BIAS)   : "ip", "lr", "cc");   smp_mb();   })
27 
28 #define __up_op_write(ptr,wake)   ({   smp_mb();   __asm__ __volatile__(   "@ up_op_write\n"  "1:	ldrex	lr, [%0]\n"  "	adds	lr, lr, %1\n"  "	strex	ip, lr, [%0]\n"  "	teq	ip, #0\n"  "	bne	1b\n"  "	movcs	ip, %0\n"  "	blcs	" #wake   :   : "r" (ptr), "I" (RW_LOCK_BIAS)   : "ip", "lr", "cc");   })
29 
30 #define __down_op_read(ptr,fail)   __down_op(ptr, fail)
31 
32 #define __up_op_read(ptr,wake)   ({   smp_mb();   __asm__ __volatile__(   "@ up_op_read\n"  "1:	ldrex	lr, [%0]\n"  "	add	lr, lr, %1\n"  "	strex	ip, lr, [%0]\n"  "	teq	ip, #0\n"  "	bne	1b\n"  "	teq	lr, #0\n"  "	moveq	ip, %0\n"  "	bleq	" #wake   :   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   })
33 
34 #else
35 
36 #define __down_op(ptr,fail)   ({   __asm__ __volatile__(   "@ down_op\n"  "	mrs	ip, cpsr\n"  "	orr	lr, ip, #128\n"  "	msr	cpsr_c, lr\n"  "	ldr	lr, [%0]\n"  "	subs	lr, lr, %1\n"  "	str	lr, [%0]\n"  "	msr	cpsr_c, ip\n"  "	movmi	ip, %0\n"  "	blmi	" #fail   :   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   smp_mb();   })
37 
38 #define __down_op_ret(ptr,fail)   ({   unsigned int ret;   __asm__ __volatile__(   "@ down_op_ret\n"  "	mrs	ip, cpsr\n"  "	orr	lr, ip, #128\n"  "	msr	cpsr_c, lr\n"  "	ldr	lr, [%1]\n"  "	subs	lr, lr, %2\n"  "	str	lr, [%1]\n"  "	msr	cpsr_c, ip\n"  "	movmi	ip, %1\n"  "	movpl	ip, #0\n"  "	blmi	" #fail "\n"  "	mov	%0, ip"   : "=&r" (ret)   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   smp_mb();   ret;   })
39 
40 #define __up_op(ptr,wake)   ({   smp_mb();   __asm__ __volatile__(   "@ up_op\n"  "	mrs	ip, cpsr\n"  "	orr	lr, ip, #128\n"  "	msr	cpsr_c, lr\n"  "	ldr	lr, [%0]\n"  "	adds	lr, lr, %1\n"  "	str	lr, [%0]\n"  "	msr	cpsr_c, ip\n"  "	movle	ip, %0\n"  "	blle	" #wake   :   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   })
41 
42 #define RW_LOCK_BIAS 0x01000000
43 #define RW_LOCK_BIAS_STR "0x01000000"
44 
45 #define __down_op_write(ptr,fail)   ({   __asm__ __volatile__(   "@ down_op_write\n"  "	mrs	ip, cpsr\n"  "	orr	lr, ip, #128\n"  "	msr	cpsr_c, lr\n"  "	ldr	lr, [%0]\n"  "	subs	lr, lr, %1\n"  "	str	lr, [%0]\n"  "	msr	cpsr_c, ip\n"  "	movne	ip, %0\n"  "	blne	" #fail   :   : "r" (ptr), "I" (RW_LOCK_BIAS)   : "ip", "lr", "cc");   smp_mb();   })
46 
47 #define __up_op_write(ptr,wake)   ({   __asm__ __volatile__(   "@ up_op_write\n"  "	mrs	ip, cpsr\n"  "	orr	lr, ip, #128\n"  "	msr	cpsr_c, lr\n"  "	ldr	lr, [%0]\n"  "	adds	lr, lr, %1\n"  "	str	lr, [%0]\n"  "	msr	cpsr_c, ip\n"  "	movcs	ip, %0\n"  "	blcs	" #wake   :   : "r" (ptr), "I" (RW_LOCK_BIAS)   : "ip", "lr", "cc");   smp_mb();   })
48 
49 #define __down_op_read(ptr,fail)   __down_op(ptr, fail)
50 
51 #define __up_op_read(ptr,wake)   ({   smp_mb();   __asm__ __volatile__(   "@ up_op_read\n"  "	mrs	ip, cpsr\n"  "	orr	lr, ip, #128\n"  "	msr	cpsr_c, lr\n"  "	ldr	lr, [%0]\n"  "	adds	lr, lr, %1\n"  "	str	lr, [%0]\n"  "	msr	cpsr_c, ip\n"  "	moveq	ip, %0\n"  "	bleq	" #wake   :   : "r" (ptr), "I" (1)   : "ip", "lr", "cc");   })
52 
53 #endif
54 
55 #endif
56