Lines Matching +full:1 +full:- +full:v0
5 * SipHash: a fast short-input PRF
8 * This implementation is specifically for SipHash2-4 for a secure PRF
9 * and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for
18 #include <asm/word-at-a-time.h>
23 v0 += v1; v1 = rol64(v1, 13); v1 ^= v0; v0 = rol64(v0, 32); \
25 v0 += v3; v3 = rol64(v3, 21); v3 ^= v0; \
30 u64 v0 = 0x736f6d6570736575ULL; \
35 v3 ^= key->key[1]; \
36 v2 ^= key->key[0]; \
37 v1 ^= key->key[1]; \
38 v0 ^= key->key[0];
44 v0 ^= b; \
50 return (v0 ^ v1) ^ (v2 ^ v3);
54 const u8 *end = data + len - (len % sizeof(u64)); in __siphash_aligned()
55 const u8 left = len & (sizeof(u64) - 1); in __siphash_aligned()
63 v0 ^= m; in __siphash_aligned()
77 case 1: b |= end[0]; in __siphash_aligned()
87 const u8 *end = data + len - (len % sizeof(u64)); in __siphash_unaligned()
88 const u8 left = len & (sizeof(u64) - 1); in __siphash_unaligned()
96 v0 ^= m; in __siphash_unaligned()
110 case 1: b |= end[0]; in __siphash_unaligned()
119 * siphash_1u64 - compute 64-bit siphash PRF value of a u64
129 v0 ^= first; in siphash_1u64()
135 * siphash_2u64 - compute 64-bit siphash PRF value of 2 u64
146 v0 ^= first; in siphash_2u64()
150 v0 ^= second; in siphash_2u64()
156 * siphash_3u64 - compute 64-bit siphash PRF value of 3 u64
169 v0 ^= first; in siphash_3u64()
173 v0 ^= second; in siphash_3u64()
177 v0 ^= third; in siphash_3u64()
183 * siphash_4u64 - compute 64-bit siphash PRF value of 4 u64
197 v0 ^= first; in siphash_4u64()
201 v0 ^= second; in siphash_4u64()
205 v0 ^= third; in siphash_4u64()
209 v0 ^= forth; in siphash_4u64()
230 v0 ^= combined; in siphash_3u32()
237 /* Note that on 64-bit, we make HalfSipHash1-3 actually be SipHash1-3, for
238 * performance reasons. On 32-bit, below, we actually implement HalfSipHash1-3.
246 v0 ^= b; \
251 return (v0 ^ v1) ^ (v2 ^ v3);
255 const u8 *end = data + len - (len % sizeof(u64)); in __hsiphash_aligned()
256 const u8 left = len & (sizeof(u64) - 1); in __hsiphash_aligned()
263 v0 ^= m; in __hsiphash_aligned()
277 case 1: b |= end[0]; in __hsiphash_aligned()
288 const u8 *end = data + len - (len % sizeof(u64)); in __hsiphash_unaligned()
289 const u8 left = len & (sizeof(u64) - 1); in __hsiphash_unaligned()
296 v0 ^= m; in __hsiphash_unaligned()
310 case 1: b |= end[0]; in __hsiphash_unaligned()
319 * hsiphash_1u32 - compute 64-bit hsiphash PRF value of a u32
332 * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
343 v0 ^= combined; in hsiphash_2u32()
349 * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
362 v0 ^= combined; in hsiphash_3u32()
369 * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
383 v0 ^= combined; in hsiphash_4u32()
387 v0 ^= combined; in hsiphash_4u32()
394 v0 += v1; v1 = rol32(v1, 5); v1 ^= v0; v0 = rol32(v0, 16); \
396 v0 += v3; v3 = rol32(v3, 7); v3 ^= v0; \
401 u32 v0 = 0; \
406 v3 ^= key->key[1]; \
407 v2 ^= key->key[0]; \
408 v1 ^= key->key[1]; \
409 v0 ^= key->key[0];
414 v0 ^= b; \
423 const u8 *end = data + len - (len % sizeof(u32)); in __hsiphash_aligned()
424 const u8 left = len & (sizeof(u32) - 1); in __hsiphash_aligned()
431 v0 ^= m; in __hsiphash_aligned()
436 case 1: b |= end[0]; in __hsiphash_aligned()
446 const u8 *end = data + len - (len % sizeof(u32)); in __hsiphash_unaligned()
447 const u8 left = len & (sizeof(u32) - 1); in __hsiphash_unaligned()
454 v0 ^= m; in __hsiphash_unaligned()
459 case 1: b |= end[0]; in __hsiphash_unaligned()
467 * hsiphash_1u32 - compute 32-bit hsiphash PRF value of a u32
476 v0 ^= first; in hsiphash_1u32()
482 * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
492 v0 ^= first; in hsiphash_2u32()
495 v0 ^= second; in hsiphash_2u32()
501 * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
513 v0 ^= first; in hsiphash_3u32()
516 v0 ^= second; in hsiphash_3u32()
519 v0 ^= third; in hsiphash_3u32()
525 * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
538 v0 ^= first; in hsiphash_4u32()
541 v0 ^= second; in hsiphash_4u32()
544 v0 ^= third; in hsiphash_4u32()
547 v0 ^= forth; in hsiphash_4u32()