Searched refs:cpu_to_be64 (Results 1 – 19 of 19) sorted by relevance
191 r->b = cpu_to_be64((b >> 1) | (a << 63)); in gf128mul_x_lle()192 r->a = cpu_to_be64((a >> 1) ^ _tt); in gf128mul_x_lle()203 r->a = cpu_to_be64((a << 1) | (b >> 63)); in gf128mul_x_bbe()204 r->b = cpu_to_be64((b << 1) ^ _tt); in gf128mul_x_bbe()
113 bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61); in sha512_base_do_finalize()114 bits[1] = cpu_to_be64(sctx->count[0] << 3); in sha512_base_do_finalize()
93 *bits = cpu_to_be64(sctx->count << 3); in sm3_base_do_finalize()
93 *bits = cpu_to_be64(sctx->count << 3); in sha256_base_do_finalize()
90 *bits = cpu_to_be64(sctx->count << 3); in sha1_base_do_finalize()
476 return ((p[0] ^ cpu_to_be64(0xff02000000000000UL)) | (p[1] ^ cpu_to_be64(1))) == 0UL; in ipv6_addr_is_ll_all_nodes()488 return ((p[0] ^ cpu_to_be64(0xff02000000000000UL)) | (p[1] ^ cpu_to_be64(2))) == 0UL; in ipv6_addr_is_ll_all_routers()505 return ((p[0] ^ cpu_to_be64(0xff02000000000000UL)) | in ipv6_addr_is_solict_mult()506 ((p[1] ^ cpu_to_be64(0x00000001ff000000UL)) & in ipv6_addr_is_solict_mult()507 cpu_to_be64(0xffffffffff000000UL))) == 0UL; in ipv6_addr_is_solict_mult()521 return ((p[0] ^ cpu_to_be64(0xff02000000000000UL)) | in ipv6_addr_is_all_snoopers()522 (p[1] ^ cpu_to_be64(0x6a))) == 0UL; in ipv6_addr_is_all_snoopers()
597 if (len && ((*a1 ^ *a2) & cpu_to_be64((~0UL) << (64 - len)))) in __ipv6_prefix_equal64_half()676 return (be[0] | (be[1] ^ cpu_to_be64(1))) == 0UL; in ipv6_addr_loopback()
508 *p = cpu_to_be64(rcd_sn - n); in tls_bigint_subtract()
328 #define IB_SERVICE_ID_AGN_MASK cpu_to_be64(0xFF00000000000000ULL)329 #define IB_CM_ASSIGN_SERVICE_ID cpu_to_be64(0x0200000000000000ULL)330 #define IB_CMA_SERVICE_ID cpu_to_be64(0x0000000001000000ULL)331 #define IB_CMA_SERVICE_ID_MASK cpu_to_be64(0xFFFFFFFFFF000000ULL)332 #define IB_SDP_SERVICE_ID cpu_to_be64(0x0000000000010000ULL)333 #define IB_SDP_SERVICE_ID_MASK cpu_to_be64(0xFFFFFFFFFFFF0000ULL)
12 #define OPA_MAKE_ID(x) (cpu_to_be64(OPA_SPECIAL_OUI << 40 | (x)))
53 put_unaligned(cpu_to_be64((_iba_get64(ptr) & ~mask) | prep_value), ptr); in _iba_set64()
161 #define IB_SA_COMP_MASK(n) ((__force ib_sa_comp_mask) cpu_to_be64(1ull << (n)))
20 #define cpu_to_fdt64(x) cpu_to_be64(x)
61 return (__force __virtio64)cpu_to_be64(val); in __cpu_to_virtio64()
92 #define cpu_to_be64 __cpu_to_be64 macro188 *var = cpu_to_be64(be64_to_cpu(*var) + val); in be64_add_cpu()
64 return (__force __rpmsg64)cpu_to_be64(val); in __cpu_to_rpmsg64()
82 __put_unaligned_t(__be64, cpu_to_be64(val), p); in put_unaligned_be64()
24 #define htonll(x) cpu_to_be64(x)
107 *((__be64 *)(p) + __mlx5_64_off(typ, fld)) = cpu_to_be64(v); \