• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1  /* Copyright (C) 2016 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
2   *
3   * This file is provided under a dual BSD/GPLv2 license.
4   *
5   * SipHash: a fast short-input PRF
6   * https://131002.net/siphash/
7   *
8   * This implementation is specifically for SipHash2-4 for a secure PRF
9   * and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for
10   * hashtables.
11   */
12  
13  #ifndef _LINUX_SIPHASH_H
14  #define _LINUX_SIPHASH_H
15  
16  #include <linux/types.h>
17  #include <linux/kernel.h>
18  
19  #define SIPHASH_ALIGNMENT __alignof__(u64)
20  typedef struct {
21  	u64 key[2];
22  } siphash_key_t;
23  
siphash_key_is_zero(const siphash_key_t * key)24  static inline bool siphash_key_is_zero(const siphash_key_t *key)
25  {
26  	return !(key->key[0] | key->key[1]);
27  }
28  
29  u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key);
30  u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key);
31  
32  u64 siphash_1u64(const u64 a, const siphash_key_t *key);
33  u64 siphash_2u64(const u64 a, const u64 b, const siphash_key_t *key);
34  u64 siphash_3u64(const u64 a, const u64 b, const u64 c,
35  		 const siphash_key_t *key);
36  u64 siphash_4u64(const u64 a, const u64 b, const u64 c, const u64 d,
37  		 const siphash_key_t *key);
38  u64 siphash_1u32(const u32 a, const siphash_key_t *key);
39  u64 siphash_3u32(const u32 a, const u32 b, const u32 c,
40  		 const siphash_key_t *key);
41  
siphash_2u32(const u32 a,const u32 b,const siphash_key_t * key)42  static inline u64 siphash_2u32(const u32 a, const u32 b,
43  			       const siphash_key_t *key)
44  {
45  	return siphash_1u64((u64)b << 32 | a, key);
46  }
siphash_4u32(const u32 a,const u32 b,const u32 c,const u32 d,const siphash_key_t * key)47  static inline u64 siphash_4u32(const u32 a, const u32 b, const u32 c,
48  			       const u32 d, const siphash_key_t *key)
49  {
50  	return siphash_2u64((u64)b << 32 | a, (u64)d << 32 | c, key);
51  }
52  
53  
___siphash_aligned(const __le64 * data,size_t len,const siphash_key_t * key)54  static inline u64 ___siphash_aligned(const __le64 *data, size_t len,
55  				     const siphash_key_t *key)
56  {
57  	if (__builtin_constant_p(len) && len == 4)
58  		return siphash_1u32(le32_to_cpup((const __le32 *)data), key);
59  	if (__builtin_constant_p(len) && len == 8)
60  		return siphash_1u64(le64_to_cpu(data[0]), key);
61  	if (__builtin_constant_p(len) && len == 16)
62  		return siphash_2u64(le64_to_cpu(data[0]), le64_to_cpu(data[1]),
63  				    key);
64  	if (__builtin_constant_p(len) && len == 24)
65  		return siphash_3u64(le64_to_cpu(data[0]), le64_to_cpu(data[1]),
66  				    le64_to_cpu(data[2]), key);
67  	if (__builtin_constant_p(len) && len == 32)
68  		return siphash_4u64(le64_to_cpu(data[0]), le64_to_cpu(data[1]),
69  				    le64_to_cpu(data[2]), le64_to_cpu(data[3]),
70  				    key);
71  	return __siphash_aligned(data, len, key);
72  }
73  
74  /**
75   * siphash - compute 64-bit siphash PRF value
76   * @data: buffer to hash
77   * @size: size of @data
78   * @key: the siphash key
79   */
siphash(const void * data,size_t len,const siphash_key_t * key)80  static inline u64 siphash(const void *data, size_t len,
81  			  const siphash_key_t *key)
82  {
83  	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
84  	    !IS_ALIGNED((unsigned long)data, SIPHASH_ALIGNMENT))
85  		return __siphash_unaligned(data, len, key);
86  	return ___siphash_aligned(data, len, key);
87  }
88  
89  #define HSIPHASH_ALIGNMENT __alignof__(unsigned long)
90  typedef struct {
91  	unsigned long key[2];
92  } hsiphash_key_t;
93  
94  u32 __hsiphash_aligned(const void *data, size_t len,
95  		       const hsiphash_key_t *key);
96  u32 __hsiphash_unaligned(const void *data, size_t len,
97  			 const hsiphash_key_t *key);
98  
99  u32 hsiphash_1u32(const u32 a, const hsiphash_key_t *key);
100  u32 hsiphash_2u32(const u32 a, const u32 b, const hsiphash_key_t *key);
101  u32 hsiphash_3u32(const u32 a, const u32 b, const u32 c,
102  		  const hsiphash_key_t *key);
103  u32 hsiphash_4u32(const u32 a, const u32 b, const u32 c, const u32 d,
104  		  const hsiphash_key_t *key);
105  
___hsiphash_aligned(const __le32 * data,size_t len,const hsiphash_key_t * key)106  static inline u32 ___hsiphash_aligned(const __le32 *data, size_t len,
107  				      const hsiphash_key_t *key)
108  {
109  	if (__builtin_constant_p(len) && len == 4)
110  		return hsiphash_1u32(le32_to_cpu(data[0]), key);
111  	if (__builtin_constant_p(len) && len == 8)
112  		return hsiphash_2u32(le32_to_cpu(data[0]), le32_to_cpu(data[1]),
113  				     key);
114  	if (__builtin_constant_p(len) && len == 12)
115  		return hsiphash_3u32(le32_to_cpu(data[0]), le32_to_cpu(data[1]),
116  				     le32_to_cpu(data[2]), key);
117  	if (__builtin_constant_p(len) && len == 16)
118  		return hsiphash_4u32(le32_to_cpu(data[0]), le32_to_cpu(data[1]),
119  				     le32_to_cpu(data[2]), le32_to_cpu(data[3]),
120  				     key);
121  	return __hsiphash_aligned(data, len, key);
122  }
123  
124  /**
125   * hsiphash - compute 32-bit hsiphash PRF value
126   * @data: buffer to hash
127   * @size: size of @data
128   * @key: the hsiphash key
129   */
hsiphash(const void * data,size_t len,const hsiphash_key_t * key)130  static inline u32 hsiphash(const void *data, size_t len,
131  			   const hsiphash_key_t *key)
132  {
133  	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
134  	    !IS_ALIGNED((unsigned long)data, HSIPHASH_ALIGNMENT))
135  		return __hsiphash_unaligned(data, len, key);
136  	return ___hsiphash_aligned(data, len, key);
137  }
138  
139  /*
140   * These macros expose the raw SipHash and HalfSipHash permutations.
141   * Do not use them directly! If you think you have a use for them,
142   * be sure to CC the maintainer of this file explaining why.
143   */
144  
145  #define SIPHASH_PERMUTATION(a, b, c, d) ( \
146  	(a) += (b), (b) = rol64((b), 13), (b) ^= (a), (a) = rol64((a), 32), \
147  	(c) += (d), (d) = rol64((d), 16), (d) ^= (c), \
148  	(a) += (d), (d) = rol64((d), 21), (d) ^= (a), \
149  	(c) += (b), (b) = rol64((b), 17), (b) ^= (c), (c) = rol64((c), 32))
150  
151  #define SIPHASH_CONST_0 0x736f6d6570736575ULL
152  #define SIPHASH_CONST_1 0x646f72616e646f6dULL
153  #define SIPHASH_CONST_2 0x6c7967656e657261ULL
154  #define SIPHASH_CONST_3 0x7465646279746573ULL
155  
156  #define HSIPHASH_PERMUTATION(a, b, c, d) ( \
157  	(a) += (b), (b) = rol32((b), 5), (b) ^= (a), (a) = rol32((a), 16), \
158  	(c) += (d), (d) = rol32((d), 8), (d) ^= (c), \
159  	(a) += (d), (d) = rol32((d), 7), (d) ^= (a), \
160  	(c) += (b), (b) = rol32((b), 13), (b) ^= (c), (c) = rol32((c), 16))
161  
162  #define HSIPHASH_CONST_0 0U
163  #define HSIPHASH_CONST_1 0U
164  #define HSIPHASH_CONST_2 0x6c796765U
165  #define HSIPHASH_CONST_3 0x74656462U
166  
167  #endif /* _LINUX_SIPHASH_H */
168