/include/linux/byteorder/ |
D | big_endian.h | 14 #define __constant_htonl(x) ((__force __be32)(__u32)(x)) 15 #define __constant_ntohl(x) ((__force __u32)(__be32)(x)) 16 #define __constant_htons(x) ((__force __be16)(__u16)(x)) 17 #define __constant_ntohs(x) ((__force __u16)(__be16)(x)) 18 #define __constant_cpu_to_le64(x) ((__force __le64)___constant_swab64((x))) 19 #define __constant_le64_to_cpu(x) ___constant_swab64((__force __u64)(__le64)(x)) 20 #define __constant_cpu_to_le32(x) ((__force __le32)___constant_swab32((x))) 21 #define __constant_le32_to_cpu(x) ___constant_swab32((__force __u32)(__le32)(x)) 22 #define __constant_cpu_to_le16(x) ((__force __le16)___constant_swab16((x))) 23 #define __constant_le16_to_cpu(x) ___constant_swab16((__force __u16)(__le16)(x)) [all …]
|
D | little_endian.h | 14 #define __constant_htonl(x) ((__force __be32)___constant_swab32((x))) 15 #define __constant_ntohl(x) ___constant_swab32((__force __be32)(x)) 16 #define __constant_htons(x) ((__force __be16)___constant_swab16((x))) 17 #define __constant_ntohs(x) ___constant_swab16((__force __be16)(x)) 18 #define __constant_cpu_to_le64(x) ((__force __le64)(__u64)(x)) 19 #define __constant_le64_to_cpu(x) ((__force __u64)(__le64)(x)) 20 #define __constant_cpu_to_le32(x) ((__force __le32)(__u32)(x)) 21 #define __constant_le32_to_cpu(x) ((__force __u32)(__le32)(x)) 22 #define __constant_cpu_to_le16(x) ((__force __le16)(__u16)(x)) 23 #define __constant_le16_to_cpu(x) ((__force __u16)(__le16)(x)) [all …]
|
/include/net/ |
D | ip6_checksum.h | 46 __u32 sum = (__force u32)csum; in csum_ipv6_magic() 48 sum += (__force u32)saddr->s6_addr32[0]; in csum_ipv6_magic() 49 carry = (sum < (__force u32)saddr->s6_addr32[0]); in csum_ipv6_magic() 52 sum += (__force u32)saddr->s6_addr32[1]; in csum_ipv6_magic() 53 carry = (sum < (__force u32)saddr->s6_addr32[1]); in csum_ipv6_magic() 56 sum += (__force u32)saddr->s6_addr32[2]; in csum_ipv6_magic() 57 carry = (sum < (__force u32)saddr->s6_addr32[2]); in csum_ipv6_magic() 60 sum += (__force u32)saddr->s6_addr32[3]; in csum_ipv6_magic() 61 carry = (sum < (__force u32)saddr->s6_addr32[3]); in csum_ipv6_magic() 64 sum += (__force u32)daddr->s6_addr32[0]; in csum_ipv6_magic() [all …]
|
D | checksum.h | 56 return (__force __wsum)-1; /* invalid checksum */ in csum_and_copy_to_user() 62 u32 res = (__force u32)csum; in csum_add() 63 res += (__force u32)addend; in csum_add() 64 return (__force __wsum)(res + (res < (__force u32)addend)); in csum_add() 75 u32 sum = (__force u32)csum2; in csum_block_add() 78 return csum_add(csum, (__force __wsum)sum); in csum_block_add() 84 u32 sum = (__force u32)csum2; in csum_block_sub() 87 return csum_sub(csum, (__force __wsum)sum); in csum_block_sub() 92 return (__force __wsum)n; in csum_unfold() 95 #define CSUM_MANGLED_0 ((__force __sum16)0xffff) [all …]
|
D | inet_ecn.h | 74 u32 check = (__force u32)iph->check; in IP_ECN_set_ce() 92 check += (__force u16)htons(0xFFFB) + (__force u16)htons(ecn); in IP_ECN_set_ce() 94 iph->check = (__force __sum16)(check + (check>=0xFFFF)); in IP_ECN_set_ce()
|
D | inet_hashtables.h | 283 ((__force __portpair)(((__force __u32)(__be16)(__sport) << 16) | (__u32)(__dport))) 286 ((__force __portpair)(((__u32)(__dport) << 16) | (__force __u32)(__be16)(__sport))) 293 const __addrpair __name = (__force __addrpair) ( \ 294 (((__force __u64)(__be32)(__saddr)) << 32) | \ 295 ((__force __u64)(__be32)(__daddr))); 298 const __addrpair __name = (__force __addrpair) ( \ 299 (((__force __u64)(__be32)(__daddr)) << 32) | \ 300 ((__force __u64)(__be32)(__saddr)));
|
D | dsfield.h | 30 __u32 check = ntohs((__force __be16)iph->check); in ipv4_change_dsfield() 38 iph->check = (__force __sum16)htons(check); in ipv4_change_dsfield()
|
/include/sound/ |
D | asound.h | 169 #define SNDRV_PCM_ACCESS_MMAP_INTERLEAVED ((__force snd_pcm_access_t) 0) /* interleaved mmap */ 170 #define SNDRV_PCM_ACCESS_MMAP_NONINTERLEAVED ((__force snd_pcm_access_t) 1) /* noninterleaved mmap … 171 #define SNDRV_PCM_ACCESS_MMAP_COMPLEX ((__force snd_pcm_access_t) 2) /* complex mmap */ 172 #define SNDRV_PCM_ACCESS_RW_INTERLEAVED ((__force snd_pcm_access_t) 3) /* readi/writei */ 173 #define SNDRV_PCM_ACCESS_RW_NONINTERLEAVED ((__force snd_pcm_access_t) 4) /* readn/writen */ 177 #define SNDRV_PCM_FORMAT_S8 ((__force snd_pcm_format_t) 0) 178 #define SNDRV_PCM_FORMAT_U8 ((__force snd_pcm_format_t) 1) 179 #define SNDRV_PCM_FORMAT_S16_LE ((__force snd_pcm_format_t) 2) 180 #define SNDRV_PCM_FORMAT_S16_BE ((__force snd_pcm_format_t) 3) 181 #define SNDRV_PCM_FORMAT_U16_LE ((__force snd_pcm_format_t) 4) [all …]
|
D | core.h | 51 #define SNDRV_DEV_TOPLEVEL ((__force snd_device_type_t) 0) 52 #define SNDRV_DEV_CONTROL ((__force snd_device_type_t) 1) 53 #define SNDRV_DEV_LOWLEVEL_PRE ((__force snd_device_type_t) 2) 54 #define SNDRV_DEV_LOWLEVEL_NORMAL ((__force snd_device_type_t) 0x1000) 55 #define SNDRV_DEV_PCM ((__force snd_device_type_t) 0x1001) 56 #define SNDRV_DEV_RAWMIDI ((__force snd_device_type_t) 0x1002) 57 #define SNDRV_DEV_TIMER ((__force snd_device_type_t) 0x1003) 58 #define SNDRV_DEV_SEQUENCER ((__force snd_device_type_t) 0x1004) 59 #define SNDRV_DEV_HWDEP ((__force snd_device_type_t) 0x1005) 60 #define SNDRV_DEV_INFO ((__force snd_device_type_t) 0x1006) [all …]
|
/include/linux/unaligned/ |
D | generic.h | 10 #define __get_unaligned_le(ptr) ((__force typeof(*(ptr)))({ \ 18 #define __get_unaligned_be(ptr) ((__force typeof(*(ptr)))({ \ 30 *(u8 *)__gu_p = (__force u8)(val); \ 33 put_unaligned_le16((__force u16)(val), __gu_p); \ 36 put_unaligned_le32((__force u32)(val), __gu_p); \ 39 put_unaligned_le64((__force u64)(val), __gu_p); \ 51 *(u8 *)__gu_p = (__force u8)(val); \ 54 put_unaligned_be16((__force u16)(val), __gu_p); \ 57 put_unaligned_be32((__force u32)(val), __gu_p); \ 60 put_unaligned_be64((__force u64)(val), __gu_p); \
|
/include/linux/ |
D | serial_core.h | 343 #define UPF_FOURPORT ((__force upf_t) (1 << 1)) 344 #define UPF_SAK ((__force upf_t) (1 << 2)) 345 #define UPF_SPD_MASK ((__force upf_t) (0x1030)) 346 #define UPF_SPD_HI ((__force upf_t) (0x0010)) 347 #define UPF_SPD_VHI ((__force upf_t) (0x0020)) 348 #define UPF_SPD_CUST ((__force upf_t) (0x0030)) 349 #define UPF_SPD_SHI ((__force upf_t) (0x1000)) 350 #define UPF_SPD_WARP ((__force upf_t) (0x1010)) 351 #define UPF_SKIP_TEST ((__force upf_t) (1 << 6)) 352 #define UPF_AUTO_IRQ ((__force upf_t) (1 << 7)) [all …]
|
D | gfp.h | 50 #define __GFP_DMA ((__force gfp_t)___GFP_DMA) 51 #define __GFP_HIGHMEM ((__force gfp_t)___GFP_HIGHMEM) 52 #define __GFP_DMA32 ((__force gfp_t)___GFP_DMA32) 53 #define __GFP_MOVABLE ((__force gfp_t)___GFP_MOVABLE) /* Page is movable */ 70 #define __GFP_WAIT ((__force gfp_t)___GFP_WAIT) /* Can wait and reschedule? */ 71 #define __GFP_HIGH ((__force gfp_t)___GFP_HIGH) /* Should access emergency pools? */ 72 #define __GFP_IO ((__force gfp_t)___GFP_IO) /* Can start physical IO? */ 73 #define __GFP_FS ((__force gfp_t)___GFP_FS) /* Can call down to low-level FS? */ 74 #define __GFP_COLD ((__force gfp_t)___GFP_COLD) /* Cache-cold page required */ 75 #define __GFP_NOWARN ((__force gfp_t)___GFP_NOWARN) /* Suppress page allocation failure warning */ [all …]
|
D | mroute6.h | 217 #define MFC6_HASH(a, g) (((__force u32)(a)->s6_addr32[0] ^ \ 218 (__force u32)(a)->s6_addr32[1] ^ \ 219 (__force u32)(a)->s6_addr32[2] ^ \ 220 (__force u32)(a)->s6_addr32[3] ^ \ 221 (__force u32)(g)->s6_addr32[0] ^ \ 222 (__force u32)(g)->s6_addr32[1] ^ \ 223 (__force u32)(g)->s6_addr32[2] ^ \ 224 (__force u32)(g)->s6_addr32[3]) % MFC6_LINES)
|
D | io-mapping.h | 130 return (struct io_mapping __force *) ioremap_wc(base, size); in io_mapping_create_wc() 136 iounmap((void __force __iomem *) mapping); in io_mapping_free() 145 return ((char __force __iomem *) mapping) + offset; in io_mapping_map_atomic_wc() 158 return ((char __force __iomem *) mapping) + offset; in io_mapping_map_wc()
|
D | rculist_nulls.h | 41 (*((struct hlist_nulls_node __rcu __force **)&(head)->first)) 44 (*((struct hlist_nulls_node __rcu __force **)&(node)->next))
|
D | rcupdate.h | 478 typeof(*p) *_________p1 = (typeof(*p)*__force )ACCESS_ONCE(p); \ 480 ((typeof(*p) __force __kernel *)(_________p1)); \ 484 typeof(*p) *_________p1 = (typeof(*p)*__force )ACCESS_ONCE(p); \ 489 ((typeof(*p) __force __kernel *)(_________p1)); \ 496 ((typeof(*p) __force __kernel *)(p)); \ 517 (p) = (typeof(*v) __force space *)(v); \ 905 p = (typeof(*v) __force __rcu *)(v)
|
D | mroute.h | 226 #define MFC_HASH(a,b) (((((__force u32)(__be32)a)>>24)^(((__force u32)(__be32)b)>>26))&(MFC_LINES-1… 228 #define MFC_HASH(a,b) ((((__force u32)(__be32)a)^(((__force u32)(__be32)b)>>2))&(MFC_LINES-1))
|
D | suspend.h | 36 #define PM_SUSPEND_ON ((__force suspend_state_t) 0) 37 #define PM_SUSPEND_STANDBY ((__force suspend_state_t) 1) 38 #define PM_SUSPEND_MEM ((__force suspend_state_t) 3) 39 #define PM_SUSPEND_MAX ((__force suspend_state_t) 4)
|
/include/asm-generic/ |
D | uaccess.h | 84 *(u8 *)to = *(u8 __force *)from; in __copy_from_user() 87 *(u16 *)to = *(u16 __force *)from; in __copy_from_user() 90 *(u32 *)to = *(u32 __force *)from; in __copy_from_user() 94 *(u64 *)to = *(u64 __force *)from; in __copy_from_user() 102 memcpy(to, (const void __force *)from, n); in __copy_from_user() 114 *(u8 __force *)to = *(u8 *)from; in __copy_to_user() 117 *(u16 __force *)to = *(u16 *)from; in __copy_to_user() 120 *(u32 __force *)to = *(u32 *)from; in __copy_to_user() 124 *(u64 __force *)to = *(u64 *)from; in __copy_to_user() 132 memcpy((void __force *)to, from, n); in __copy_to_user() [all …]
|
D | signal-defs.h | 23 #define SIG_DFL ((__force __sighandler_t)0) /* default signal handling */ 24 #define SIG_IGN ((__force __sighandler_t)1) /* ignore signal */ 25 #define SIG_ERR ((__force __sighandler_t)-1) /* error return from signal */
|
D | cputime.h | 10 #define cputime_to_jiffies(__ct) (__force unsigned long)(__ct) 12 #define jiffies_to_cputime(__hz) (__force cputime_t)(__hz) 16 #define cputime64_to_jiffies64(__ct) (__force u64)(__ct) 17 #define jiffies64_to_cputime64(__jif) (__force cputime64_t)(__jif)
|
D | io.h | 38 return *(const volatile u8 __force *) addr; in __raw_readb() 45 return *(const volatile u16 __force *) addr; in __raw_readw() 52 return *(const volatile u32 __force *) addr; in __raw_readl() 63 *(volatile u8 __force *) addr = b; in __raw_writeb() 70 *(volatile u16 __force *) addr = b; in __raw_writew() 77 *(volatile u32 __force *) addr = b; in __raw_writel() 88 return *(const volatile u64 __force *) addr; in __raw_readq() 94 *(volatile u64 __force *) addr = b; in __raw_writeq() 285 #define __io_virt(x) ((void __force *) (x))
|
D | checksum.h | 52 u32 sum = (__force u32)csum; in csum_fold() 55 return (__force __sum16)~sum; in csum_fold()
|
D | percpu.h | 47 RELOC_HIDE((typeof(*(__p)) __kernel __force *)(__p), (__offset)); \ 79 (typeof(*(__p)) __kernel __force *)(__p); \
|
/include/linux/mlx4/ |
D | doorbell.h | 79 __raw_writel((__force u32) val[0], dest); in mlx4_write64() 80 __raw_writel((__force u32) val[1], dest + 4); in mlx4_write64()
|