1 /* Copyright (C) 2016 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
2  *
3  * This file is provided under a dual BSD/GPLv2 license.
4  *
5  * SipHash: a fast short-input PRF
6  * https://131002.net/siphash/
7  *
8  * This implementation is specifically for SipHash2-4 for a secure PRF
9  * and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for
10  * hashtables.
11  */
12 
13 #ifndef _LINUX_SIPHASH_H
14 #define _LINUX_SIPHASH_H
15 
16 #include <linux/types.h>
17 #include <linux/kernel.h>
18 
19 #define SIPHASH_ALIGNMENT __alignof__(u64)
20 typedef struct {
21 	u64 key[2];
22 } siphash_key_t;
23 
siphash_key_is_zero(const siphash_key_t * key)24 static inline bool siphash_key_is_zero(const siphash_key_t *key)
25 {
26 	return !(key->key[0] | key->key[1]);
27 }
28 
29 u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key);
30 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
31 u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key);
32 #endif
33 
34 u64 siphash_1u64(const u64 a, const siphash_key_t *key);
35 u64 siphash_2u64(const u64 a, const u64 b, const siphash_key_t *key);
36 u64 siphash_3u64(const u64 a, const u64 b, const u64 c,
37 		 const siphash_key_t *key);
38 u64 siphash_4u64(const u64 a, const u64 b, const u64 c, const u64 d,
39 		 const siphash_key_t *key);
40 u64 siphash_1u32(const u32 a, const siphash_key_t *key);
41 u64 siphash_3u32(const u32 a, const u32 b, const u32 c,
42 		 const siphash_key_t *key);
43 
siphash_2u32(const u32 a,const u32 b,const siphash_key_t * key)44 static inline u64 siphash_2u32(const u32 a, const u32 b,
45 			       const siphash_key_t *key)
46 {
47 	return siphash_1u64((u64)b << 32 | a, key);
48 }
siphash_4u32(const u32 a,const u32 b,const u32 c,const u32 d,const siphash_key_t * key)49 static inline u64 siphash_4u32(const u32 a, const u32 b, const u32 c,
50 			       const u32 d, const siphash_key_t *key)
51 {
52 	return siphash_2u64((u64)b << 32 | a, (u64)d << 32 | c, key);
53 }
54 
55 
___siphash_aligned(const __le64 * data,size_t len,const siphash_key_t * key)56 static inline u64 ___siphash_aligned(const __le64 *data, size_t len,
57 				     const siphash_key_t *key)
58 {
59 	if (__builtin_constant_p(len) && len == 4)
60 		return siphash_1u32(le32_to_cpup((const __le32 *)data), key);
61 	if (__builtin_constant_p(len) && len == 8)
62 		return siphash_1u64(le64_to_cpu(data[0]), key);
63 	if (__builtin_constant_p(len) && len == 16)
64 		return siphash_2u64(le64_to_cpu(data[0]), le64_to_cpu(data[1]),
65 				    key);
66 	if (__builtin_constant_p(len) && len == 24)
67 		return siphash_3u64(le64_to_cpu(data[0]), le64_to_cpu(data[1]),
68 				    le64_to_cpu(data[2]), key);
69 	if (__builtin_constant_p(len) && len == 32)
70 		return siphash_4u64(le64_to_cpu(data[0]), le64_to_cpu(data[1]),
71 				    le64_to_cpu(data[2]), le64_to_cpu(data[3]),
72 				    key);
73 	return __siphash_aligned(data, len, key);
74 }
75 
76 /**
77  * siphash - compute 64-bit siphash PRF value
78  * @data: buffer to hash
79  * @size: size of @data
80  * @key: the siphash key
81  */
siphash(const void * data,size_t len,const siphash_key_t * key)82 static inline u64 siphash(const void *data, size_t len,
83 			  const siphash_key_t *key)
84 {
85 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
86 	if (!IS_ALIGNED((unsigned long)data, SIPHASH_ALIGNMENT))
87 		return __siphash_unaligned(data, len, key);
88 #endif
89 	return ___siphash_aligned(data, len, key);
90 }
91 
92 #define HSIPHASH_ALIGNMENT __alignof__(unsigned long)
93 typedef struct {
94 	unsigned long key[2];
95 } hsiphash_key_t;
96 
97 u32 __hsiphash_aligned(const void *data, size_t len,
98 		       const hsiphash_key_t *key);
99 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
100 u32 __hsiphash_unaligned(const void *data, size_t len,
101 			 const hsiphash_key_t *key);
102 #endif
103 
104 u32 hsiphash_1u32(const u32 a, const hsiphash_key_t *key);
105 u32 hsiphash_2u32(const u32 a, const u32 b, const hsiphash_key_t *key);
106 u32 hsiphash_3u32(const u32 a, const u32 b, const u32 c,
107 		  const hsiphash_key_t *key);
108 u32 hsiphash_4u32(const u32 a, const u32 b, const u32 c, const u32 d,
109 		  const hsiphash_key_t *key);
110 
___hsiphash_aligned(const __le32 * data,size_t len,const hsiphash_key_t * key)111 static inline u32 ___hsiphash_aligned(const __le32 *data, size_t len,
112 				      const hsiphash_key_t *key)
113 {
114 	if (__builtin_constant_p(len) && len == 4)
115 		return hsiphash_1u32(le32_to_cpu(data[0]), key);
116 	if (__builtin_constant_p(len) && len == 8)
117 		return hsiphash_2u32(le32_to_cpu(data[0]), le32_to_cpu(data[1]),
118 				     key);
119 	if (__builtin_constant_p(len) && len == 12)
120 		return hsiphash_3u32(le32_to_cpu(data[0]), le32_to_cpu(data[1]),
121 				     le32_to_cpu(data[2]), key);
122 	if (__builtin_constant_p(len) && len == 16)
123 		return hsiphash_4u32(le32_to_cpu(data[0]), le32_to_cpu(data[1]),
124 				     le32_to_cpu(data[2]), le32_to_cpu(data[3]),
125 				     key);
126 	return __hsiphash_aligned(data, len, key);
127 }
128 
129 /**
130  * hsiphash - compute 32-bit hsiphash PRF value
131  * @data: buffer to hash
132  * @size: size of @data
133  * @key: the hsiphash key
134  */
hsiphash(const void * data,size_t len,const hsiphash_key_t * key)135 static inline u32 hsiphash(const void *data, size_t len,
136 			   const hsiphash_key_t *key)
137 {
138 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
139 	if (!IS_ALIGNED((unsigned long)data, HSIPHASH_ALIGNMENT))
140 		return __hsiphash_unaligned(data, len, key);
141 #endif
142 	return ___hsiphash_aligned(data, len, key);
143 }
144 
145 #endif /* _LINUX_SIPHASH_H */
146