Lines Matching +full:- +full:a

1 /* sha256.c - TinyCrypt SHA-256 crypto hash algorithm implementation */
9 * - Redistributions of source code must retain the above copyright notice,
12 * - Redistributions in binary form must reproduce the above copyright
16 * - Neither the name of Intel Corporation nor the names of its contributors
22 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
53 s->iv[0] = 0x6a09e667; in tc_sha256_init()
54 s->iv[1] = 0xbb67ae85; in tc_sha256_init()
55 s->iv[2] = 0x3c6ef372; in tc_sha256_init()
56 s->iv[3] = 0xa54ff53a; in tc_sha256_init()
57 s->iv[4] = 0x510e527f; in tc_sha256_init()
58 s->iv[5] = 0x9b05688c; in tc_sha256_init()
59 s->iv[6] = 0x1f83d9ab; in tc_sha256_init()
60 s->iv[7] = 0x5be0cd19; in tc_sha256_init()
75 while (datalen-- > 0) { in tc_sha256_update()
76 s->leftover[s->leftover_offset++] = *(data++); in tc_sha256_update()
77 if (s->leftover_offset >= TC_SHA256_BLOCK_SIZE) { in tc_sha256_update()
78 compress(s->iv, s->leftover); in tc_sha256_update()
79 s->leftover_offset = 0; in tc_sha256_update()
80 s->bits_hashed += (TC_SHA256_BLOCK_SIZE << 3); in tc_sha256_update()
97 s->bits_hashed += (s->leftover_offset << 3); in tc_sha256_final()
99 s->leftover[s->leftover_offset++] = 0x80; /* always room for one byte */ in tc_sha256_final()
100 if (s->leftover_offset > (sizeof(s->leftover) - 8)) { in tc_sha256_final()
102 _set(s->leftover + s->leftover_offset, 0x00, in tc_sha256_final()
103 sizeof(s->leftover) - s->leftover_offset); in tc_sha256_final()
104 compress(s->iv, s->leftover); in tc_sha256_final()
105 s->leftover_offset = 0; in tc_sha256_final()
108 /* add the padding and the length in big-Endian format */ in tc_sha256_final()
109 _set(s->leftover + s->leftover_offset, 0x00, in tc_sha256_final()
110 sizeof(s->leftover) - 8 - s->leftover_offset); in tc_sha256_final()
111 s->leftover[sizeof(s->leftover) - 1] = (uint8_t)(s->bits_hashed); in tc_sha256_final()
112 s->leftover[sizeof(s->leftover) - 2] = (uint8_t)(s->bits_hashed >> 8); in tc_sha256_final()
113 s->leftover[sizeof(s->leftover) - 3] = (uint8_t)(s->bits_hashed >> 16); in tc_sha256_final()
114 s->leftover[sizeof(s->leftover) - 4] = (uint8_t)(s->bits_hashed >> 24); in tc_sha256_final()
115 s->leftover[sizeof(s->leftover) - 5] = (uint8_t)(s->bits_hashed >> 32); in tc_sha256_final()
116 s->leftover[sizeof(s->leftover) - 6] = (uint8_t)(s->bits_hashed >> 40); in tc_sha256_final()
117 s->leftover[sizeof(s->leftover) - 7] = (uint8_t)(s->bits_hashed >> 48); in tc_sha256_final()
118 s->leftover[sizeof(s->leftover) - 8] = (uint8_t)(s->bits_hashed >> 56); in tc_sha256_final()
121 compress(s->iv, s->leftover); in tc_sha256_final()
125 unsigned int t = *((unsigned int *) &s->iv[i]); in tc_sha256_final()
139 * Initializing SHA-256 Hash constant words K.
157 static inline unsigned int ROTR(unsigned int a, unsigned int n) in ROTR() argument
159 return (((a) >> n) | ((a) << (32 - n))); in ROTR()
162 #define Sigma0(a)(ROTR((a), 2) ^ ROTR((a), 13) ^ ROTR((a), 22)) argument
163 #define Sigma1(a)(ROTR((a), 6) ^ ROTR((a), 11) ^ ROTR((a), 25)) argument
164 #define sigma0(a)(ROTR((a), 7) ^ ROTR((a), 18) ^ ((a) >> 3)) argument
165 #define sigma1(a)(ROTR((a), 17) ^ ROTR((a), 19) ^ ((a) >> 10)) argument
167 #define Ch(a, b, c)(((a) & (b)) ^ ((~(a)) & (c))) argument
168 #define Maj(a, b, c)(((a) & (b)) ^ ((a) & (c)) ^ ((b) & (c))) argument
183 unsigned int a, b, c, d, e, f, g, h; in compress() local
190 a = iv[0]; b = iv[1]; c = iv[2]; d = iv[3]; in compress()
197 t2 = Sigma0(a) + Maj(a, b, c); in compress()
199 d = c; c = b; b = a; a = t1 + t2; in compress()
210 t2 = Sigma0(a) + Maj(a, b, c); in compress()
212 d = c; c = b; b = a; a = t1 + t2; in compress()
215 iv[0] += a; iv[1] += b; iv[2] += c; iv[3] += d; in compress()