/arch/xtensa/include/asm/ |
D | coprocessor.h | 132 #define __REG2_1(n,s,a) unsigned char n[s] __attribute__ ((aligned(a))); 133 #define __REG2_2(n,s,a) unsigned char n[s] __attribute__ ((aligned(a))); 136 __attribute__ ((aligned (XCHAL_NCP_SA_ALIGN))); 138 __attribute__ ((aligned (XCHAL_NCP_SA_ALIGN))); 143 __attribute__ ((aligned (XCHAL_CP0_SA_ALIGN))); 145 __attribute__ ((aligned (XCHAL_CP1_SA_ALIGN))); 147 __attribute__ ((aligned (XCHAL_CP2_SA_ALIGN))); 149 __attribute__ ((aligned (XCHAL_CP3_SA_ALIGN))); 151 __attribute__ ((aligned (XCHAL_CP4_SA_ALIGN))); 153 __attribute__ ((aligned (XCHAL_CP5_SA_ALIGN))); [all …]
|
/arch/sparc/kernel/ |
D | sstate.c | 37 static const char booting_msg[32] __attribute__((aligned(32))) = 39 static const char running_msg[32] __attribute__((aligned(32))) = 41 static const char halting_msg[32] __attribute__((aligned(32))) = 43 static const char poweroff_msg[32] __attribute__((aligned(32))) = 45 static const char rebooting_msg[32] __attribute__((aligned(32))) = 47 static const char panicking_msg[32] __attribute__((aligned(32))) =
|
/arch/frv/include/uapi/asm/ |
D | registers.h | 123 } __attribute__((aligned(8))); 149 unsigned long ibar[4] __attribute__((aligned(8))); 150 unsigned long dbar[4] __attribute__((aligned(8))); 151 unsigned long dbdr[4][4] __attribute__((aligned(8))); 152 unsigned long dbmr[4][4] __attribute__((aligned(8))); 153 } __attribute__((aligned(8))); 208 } __attribute__((aligned(8))); 218 } __attribute__((aligned(32)));
|
/arch/xtensa/lib/ |
D | memset.S | 55 .L0: # return here from .Ldstunaligned when dst is aligned 64 # set 16 bytes per iteration for word-aligned dst 114 bbci.l a5, 0, .L20 # branch if dst alignment half-aligned 115 # dst is only byte aligned 120 # now retest if dst aligned 121 bbci.l a5, 1, .L0 # if now aligned, return to main algorithm 123 # dst half-aligned 128 j .L0 # dst is now aligned, return to main algorithm
|
D | memcopy.S | 103 .Ldst1mod2: # dst is only byte aligned 112 _bbci.l a5, 1, .Ldstaligned # if dst is now aligned, then 114 .Ldst2mod4: # dst 16-bit aligned 124 j .Ldstaligned # dst is now aligned, return to main algorithm 137 .Ldstaligned: # return here from .Ldst?mod? once dst is aligned 140 movi a8, 3 # if source is not aligned, 145 # copy 16 bytes per iteration for word-aligned dst and word-aligned src 211 # copy 16 bytes per iteration for word-aligned dst and unaligned src 370 .Lbackdst1mod2: # dst is only byte aligned 379 _bbci.l a5, 1, .Lbackdstaligned # if dst is now aligned, then [all …]
|
D | usercopy.S | 87 .Ldstaligned: # return here from .Ldstunaligned when dst is aligned 90 movi a8, 3 # if source is also aligned, 101 .Ldst1mod2: # dst is only byte aligned 110 bbci.l a5, 1, .Ldstaligned # if dst is now aligned, then 112 .Ldst2mod4: # dst 16-bit aligned 122 j .Ldstaligned # dst is now aligned, return to main algorithm 152 # copy 16 bytes per iteration for word-aligned dst and word-aligned src 217 # copy 16 bytes per iteration for word-aligned dst and unaligned src
|
/arch/mips/include/uapi/asm/ |
D | ptrace.h | 48 } __attribute__ ((aligned (8))); 88 } __attribute__((aligned(8))); 95 } __attribute__((aligned(8)));
|
/arch/x86/include/uapi/asm/ |
D | siginfo.h | 7 typedef long long __kernel_si_clock_t __attribute__((aligned(4))); 9 # define __ARCH_SI_ATTRIBUTES __attribute__((aligned(8)))
|
D | statfs.h | 10 #define ARCH_PACK_COMPAT_STATFS64 __attribute__((packed,aligned(4)))
|
/arch/frv/include/asm/ |
D | cache.h | 20 #define __cacheline_aligned __attribute__((aligned(L1_CACHE_BYTES))) 21 #define ____cacheline_aligned __attribute__((aligned(L1_CACHE_BYTES)))
|
/arch/s390/include/asm/ |
D | qdio.h | 116 } __attribute__ ((packed, aligned(256))); 158 } __attribute__ ((packed, aligned(256))); 175 } __attribute__ ((packed, aligned(2048))); 216 } __attribute__ ((packed, aligned(16))); 224 } __attribute__ ((packed, aligned(256))); 240 } __attribute__ ((packed, aligned(1024))); 248 } __attribute__ ((packed, aligned(256)));
|
D | fcx.h | 56 } __attribute__ ((packed, aligned(64))); 77 } __attribute__ ((packed, aligned(16))); 172 } __attribute__ ((packed, aligned(8))); 290 } __attribute__ ((packed, aligned(8)));
|
/arch/sparc/lib/ |
D | M7memset.S | 144 andcc %o5, 7, %o3 ! is sp1 aligned on a 8 byte bound? 145 bz,pt %xcc, .blkalign ! already long word aligned 146 sub %o3, 8, %o3 ! -(bytes till long word aligned) 149 ! Set -(%o3) bytes till sp1 long word aligned 155 ! Now sp1 is long word aligned (sp1 is found in %o5) 161 andcc %o5, 63, %o3 ! is sp1 block aligned? 162 bz,pt %xcc, .blkwr ! now block aligned 163 sub %o3, 64, %o3 ! o3 is -(bytes till block aligned) 166 ! Store -(%o3) bytes till dst is block (64 byte) aligned. 168 ! Recall that dst is already long word aligned [all …]
|
/arch/arm/lib/ |
D | bitops.h | 10 strneb r1, [ip] @ assert word-aligned 35 strneb r1, [ip] @ assert word-aligned 65 strneb r1, [ip] @ assert word-aligned 92 strneb r1, [ip] @ assert word-aligned
|
/arch/arm/nwfpe/ |
D | fpa11.h | 64 } __attribute__ ((packed,aligned(4))) FPREG; 91 } __attribute__ ((packed,aligned(4))) FPA11;
|
/arch/sparc/include/asm/ |
D | processor_32.h | 51 unsigned long fork_kpsr __attribute__ ((aligned (8))); 55 unsigned long float_regs[32] __attribute__ ((aligned (8)));
|
/arch/s390/mm/ |
D | maccess.c | 22 unsigned long aligned, offset, count; in s390_kernel_write_odd() local 25 aligned = (unsigned long) dst & ~7UL; in s390_kernel_write_odd() 37 : "+&a" (aligned), "+&a" (count), "=m" (tmp) in s390_kernel_write_odd()
|
/arch/ia64/kernel/ |
D | ftrace.c | 21 static unsigned char __attribute__((aligned(8))) 59 static unsigned char __attribute__((aligned(8))) 129 unsigned char __attribute__((aligned(8))) replaced[MCOUNT_INSN_SIZE]; in ftrace_make_nop_check()
|
/arch/sh/lib/ |
D | __clear_user.S | 17 ! r4..(r4+31)&~32 -------- not aligned [ Area 0 ] 18 ! (r4+31)&~32..(r4+r5)&~32 -------- aligned [ Area 1 ] 19 ! (r4+r5)&~32..r4+r5 -------- not aligned [ Area 2 ]
|
D | memset-sh4.S | 24 bt/s 2f ! It's aligned 51 bt/s 11f ! dst is already aligned 60 11: ! dst is 32byte aligned
|
/arch/alpha/lib/ |
D | memset.S | 53 beq $3,aligned /* .. E1 (note EV5 zero-latency forwarding) */ 71 aligned: label
|
/arch/sh/drivers/dma/ |
D | dma-g2.c | 30 } __attribute__ ((aligned(32))); 37 } __attribute__ ((aligned(16))); 46 } __attribute__ ((aligned(256)));
|
/arch/ia64/include/uapi/asm/ |
D | statfs.h | 17 #define ARCH_PACK_COMPAT_STATFS64 __attribute__((packed,aligned(4)))
|
/arch/arm64/include/uapi/asm/ |
D | statfs.h | 20 #define ARCH_PACK_COMPAT_STATFS64 __attribute__((packed,aligned(4)))
|
/arch/arm/include/uapi/asm/ |
D | statfs.h | 10 #define ARCH_PACK_STATFS64 __attribute__((packed,aligned(4)))
|