• Home
  • Raw
  • Download

Lines Matching +full:1 +full:- +full:v0

5  * SipHash: a fast short-input PRF
8 * This implementation is specifically for SipHash2-4 for a secure PRF
9 * and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for
18 #include <asm/word-at-a-time.h>
21 #define SIPROUND SIPHASH_PERMUTATION(v0, v1, v2, v3)
24 u64 v0 = SIPHASH_CONST_0; \
29 v3 ^= key->key[1]; \
30 v2 ^= key->key[0]; \
31 v1 ^= key->key[1]; \
32 v0 ^= key->key[0];
38 v0 ^= b; \
44 return (v0 ^ v1) ^ (v2 ^ v3);
49 const u8 *end = data + len - (len % sizeof(u64)); in __siphash_aligned()
50 const u8 left = len & (sizeof(u64) - 1); in __siphash_aligned()
58 v0 ^= m; in __siphash_aligned()
72 case 1: b |= end[0]; in __siphash_aligned()
82 const u8 *end = data + len - (len % sizeof(u64)); in __siphash_unaligned()
83 const u8 left = len & (sizeof(u64) - 1); in __siphash_unaligned()
91 v0 ^= m; in __siphash_unaligned()
105 case 1: b |= end[0]; in __siphash_unaligned()
113 * siphash_1u64 - compute 64-bit siphash PRF value of a u64
123 v0 ^= first; in siphash_1u64()
129 * siphash_2u64 - compute 64-bit siphash PRF value of 2 u64
140 v0 ^= first; in siphash_2u64()
144 v0 ^= second; in siphash_2u64()
150 * siphash_3u64 - compute 64-bit siphash PRF value of 3 u64
163 v0 ^= first; in siphash_3u64()
167 v0 ^= second; in siphash_3u64()
171 v0 ^= third; in siphash_3u64()
177 * siphash_4u64 - compute 64-bit siphash PRF value of 4 u64
191 v0 ^= first; in siphash_4u64()
195 v0 ^= second; in siphash_4u64()
199 v0 ^= third; in siphash_4u64()
203 v0 ^= forth; in siphash_4u64()
224 v0 ^= combined; in siphash_3u32()
231 /* Note that on 64-bit, we make HalfSipHash1-3 actually be SipHash1-3, for
232 * performance reasons. On 32-bit, below, we actually implement HalfSipHash1-3.
240 v0 ^= b; \
245 return (v0 ^ v1) ^ (v2 ^ v3);
250 const u8 *end = data + len - (len % sizeof(u64)); in __hsiphash_aligned()
251 const u8 left = len & (sizeof(u64) - 1); in __hsiphash_aligned()
258 v0 ^= m; in __hsiphash_aligned()
272 case 1: b |= end[0]; in __hsiphash_aligned()
283 const u8 *end = data + len - (len % sizeof(u64)); in __hsiphash_unaligned()
284 const u8 left = len & (sizeof(u64) - 1); in __hsiphash_unaligned()
291 v0 ^= m; in __hsiphash_unaligned()
305 case 1: b |= end[0]; in __hsiphash_unaligned()
313 * hsiphash_1u32 - compute 64-bit hsiphash PRF value of a u32
326 * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
337 v0 ^= combined; in hsiphash_2u32()
343 * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
356 v0 ^= combined; in hsiphash_3u32()
363 * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
377 v0 ^= combined; in hsiphash_4u32()
381 v0 ^= combined; in hsiphash_4u32()
386 #define HSIPROUND HSIPHASH_PERMUTATION(v0, v1, v2, v3)
389 u32 v0 = HSIPHASH_CONST_0; \
394 v3 ^= key->key[1]; \
395 v2 ^= key->key[0]; \
396 v1 ^= key->key[1]; \
397 v0 ^= key->key[0];
402 v0 ^= b; \
412 const u8 *end = data + len - (len % sizeof(u32)); in __hsiphash_aligned()
413 const u8 left = len & (sizeof(u32) - 1); in __hsiphash_aligned()
420 v0 ^= m; in __hsiphash_aligned()
425 case 1: b |= end[0]; in __hsiphash_aligned()
435 const u8 *end = data + len - (len % sizeof(u32)); in __hsiphash_unaligned()
436 const u8 left = len & (sizeof(u32) - 1); in __hsiphash_unaligned()
443 v0 ^= m; in __hsiphash_unaligned()
448 case 1: b |= end[0]; in __hsiphash_unaligned()
455 * hsiphash_1u32 - compute 32-bit hsiphash PRF value of a u32
464 v0 ^= first; in hsiphash_1u32()
470 * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
480 v0 ^= first; in hsiphash_2u32()
483 v0 ^= second; in hsiphash_2u32()
489 * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
501 v0 ^= first; in hsiphash_3u32()
504 v0 ^= second; in hsiphash_3u32()
507 v0 ^= third; in hsiphash_3u32()
513 * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
526 v0 ^= first; in hsiphash_4u32()
529 v0 ^= second; in hsiphash_4u32()
532 v0 ^= third; in hsiphash_4u32()
535 v0 ^= forth; in hsiphash_4u32()