19 #if defined(HAVE_SHA512) || defined(HAVE_SHA384)
28 const cx_hash_info_t cx_sha384_info
33 (cx_err_t(*)(cx_hash_t * ctx)) cx_sha384_init_no_throw,
34 (cx_err_t(*)(cx_hash_t * ctx,
const uint8_t *data,
size_t len)) cx_sha512_update,
35 (cx_err_t(*)(cx_hash_t * ctx,
uint8_t *digest)) cx_sha512_final,
41 const cx_hash_info_t cx_sha512_info
46 (cx_err_t(*)(cx_hash_t * ctx)) cx_sha512_init_no_throw,
47 (cx_err_t(*)(cx_hash_t * ctx,
const uint8_t *data,
size_t len)) cx_sha512_update,
48 (cx_err_t(*)(cx_hash_t * ctx,
uint8_t *digest)) cx_sha512_final,
53 #ifndef HAVE_SHA512_WITH_INIT_ALT_METHOD
55 #if defined(HAVE_SHA384)
57 _64BITS(0x629a292a, 0x367cd507),
58 _64BITS(0x9159015a, 0x3070dd17),
59 _64BITS(0x152fecd8, 0xf70e5939),
60 _64BITS(0x67332667, 0xffc00b31),
61 _64BITS(0x8eb44a87, 0x68581511),
62 _64BITS(0xdb0c2e0d, 0x64f98fa7),
63 _64BITS(0x47b5481d, 0xbefa4fa4)};
65 #if defined(HAVE_SHA512)
67 _64BITS(0xbb67ae85, 0x84caa73b),
68 _64BITS(0x3c6ef372, 0xfe94f82b),
69 _64BITS(0xa54ff53a, 0x5f1d36f1),
70 _64BITS(0x510e527f, 0xade682d1),
71 _64BITS(0x9b05688c, 0x2b3e6c1f),
72 _64BITS(0x1f83d9ab, 0xfb41bd6b),
73 _64BITS(0x5be0cd19, 0x137e2179)};
76 #if defined(HAVE_SHA384)
77 cx_err_t cx_sha384_init_no_throw(cx_sha512_t *hash)
79 memset(hash, 0,
sizeof(cx_sha512_t));
80 hash->header.info = &cx_sha384_info;
81 memmove(hash->acc, hzero_384,
sizeof(hzero_384));
86 #if defined(HAVE_SHA512)
87 cx_err_t cx_sha512_init_no_throw(cx_sha512_t *hash)
89 memset(hash, 0,
sizeof(cx_sha512_t));
90 hash->header.info = &cx_sha512_info;
91 memmove(hash->acc, hzero,
sizeof(hzero));
97 #ifndef HAVE_SHA512_WITH_BLOCK_ALT_METHOD
101 #define rotR64(x, n) cx_rotr64(&x, n)
102 #define shR64(x, n) cx_shr64(&x, n)
114 x->
l = x1.
l ^ x2.
l ^ x3.
l;
115 x->
h = x1.
h ^ x2.
h ^ x3.
h;
128 x->
l = x1.
l ^ x2.
l ^ x3.
l;
129 x->
h = x1.
h ^ x2.
h ^ x3.
h;
131 #define sigma0(x) sig512(&x, 1, 8, 7)
132 #define sigma1(x) sig512(&x, 19, 61, 6)
133 #define sum0(x) sum512(&x, 28, 34, 39)
134 #define sum1(x) sum512(&x, 14, 18, 41)
137 #define ch(r, x, y, z) \
138 r.l = ((x.l) & (y.l)) ^ (~(x.l) & (z.l)); \
139 r.h = ((x.h) & (y.h)) ^ (~(x.h) & (z.h))
142 #define maj(r, x, y, z) \
143 r.l = ((x.l) & (y.l)) ^ ((x.l) & (z.l)) ^ ((y.l) & (z.l)); \
144 r.h = ((x.h) & (y.h)) ^ ((x.h) & (z.h)) ^ ((y.h) & (z.h))
148 #define rotR64(x, n) cx_rotr64(x, n)
149 #define shR64(x, n) cx_shr64(x, n)
151 #define sig512(x, a, b, c) (rotR64((x), a) ^ rotR64((x), b) ^ shR64((x), c))
153 #define sum512(x, a, b, c) (rotR64((x), a) ^ rotR64((x), b) ^ rotR64((x), c))
155 #define sigma0(x) sig512(x, 1, 8, 7)
156 #define sigma1(x) sig512(x, 19, 61, 6)
157 #define sum0(x) sum512(x, 28, 34, 39)
158 #define sum1(x) sum512(x, 14, 18, 41)
160 #define ch(x, y, z) (((x) & (y)) ^ (~(x) & (z)))
161 #define maj(x, y, z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z)))
206 _64BITS(0x5fcb6fab, 0x3ad6faec),
_64BITS(0x6c44198c, 0x4a475817)};
208 void cx_sha512_block(cx_sha512_t *hash)
238 #ifdef ARCH_LITTLE_ENDIAN
255 for (j = 0; j < 80; j++) {
256 #ifndef NATIVE_64BITS
266 ADD64(t2, X[(j - 7) & 0xF]);
272 ADD64(t2, X[(j - 16) & 0xF]);
291 ADD64(t1, X[j & 0xF]);
311 memmove(&ACC[1], &ACC[0],
sizeof(ACC) -
sizeof(
uint64bits_t));
322 xj2 = X[(j - 2) & 0xF];
323 xj15 = X[(j - 15) & 0xF];
324 X[j & 0xF] = (sigma1(xj2) + X[(j - 7) & 0xF] + sigma0(xj15) + X[(j - 16) & 0xF]);
326 t1 = H + sum1(E) + ch(E, F, G) + primeSqrt[j] + X[j & 0xF];
327 t2 = sum0(A) + maj(A, B, C);
355 #ifndef NATIVE_64BITS
356 ADD64(accumulator[0], A);
357 ADD64(accumulator[1], B);
358 ADD64(accumulator[2], C);
359 ADD64(accumulator[3], D);
360 ADD64(accumulator[4], E);
361 ADD64(accumulator[5], F);
362 ADD64(accumulator[6], G);
363 ADD64(accumulator[7], H);
377 void cx_sha512_block(cx_sha512_t *ctx);
380 cx_err_t cx_sha512_update(cx_sha512_t *ctx,
const uint8_t *data,
size_t len)
388 return CX_INVALID_PARAMETER;
396 return CX_INVALID_PARAMETER;
406 if ((blen + len) >= block_size) {
407 r = block_size - blen;
409 if (ctx->header.counter == CX_HASH_MAX_BLOCK_COUNT) {
410 return CX_INVALID_PARAMETER;
412 memcpy(block + blen, data, r);
413 cx_sha512_block(ctx);
415 ctx->header.counter++;
419 }
while (len >= block_size);
423 memcpy(block + blen, data, len);
429 cx_err_t cx_sha512_final(cx_sha512_t *ctx,
uint8_t *digest)
435 unsigned int blen = ctx->blen;
442 bitlen = (((uint64_t) ctx->header.counter) * 128UL + (uint64_t) blen - 1UL) * 8UL;
444 if ((128 - blen) < 16) {
445 memset(block + blen, 0, 128 - blen);
446 cx_sha512_block(ctx);
450 memset(block + blen, 0, 128 - blen);
451 #ifdef ARCH_LITTLE_ENDIAN
454 (*(uint64_t *) (&block[128 - 8])) = bitlen;
456 cx_sha512_block(ctx);
458 len = (ctx->header.info->md_type == CX_SHA512) ? 512 >> 3 : 384 >> 3;
459 #ifdef ARCH_LITTLE_ENDIAN
462 memcpy(digest, acc, len);
466 #if defined(HAVE_SHA512)
467 size_t cx_hash_sha512(
const uint8_t *in,
size_t in_len,
uint8_t *out,
size_t out_len)
469 if (out_len < CX_SHA512_SIZE) {
472 cx_sha512_init_no_throw(&
G_cx.sha512);
473 if (cx_sha512_update(&
G_cx.sha512, in, in_len) != CX_OK) {
476 cx_sha512_final(&
G_cx.sha512, out);
477 explicit_bzero(&
G_cx.sha512,
sizeof(cx_sha512_t));
478 return CX_SHA512_SIZE;
void cx_swap_buffer64(uint64bits_t *v, int len)
void cx_swap_uint64(uint64bits_t *v)
64-bit types, native or by-hands, depending on target and/or compiler support.
uint32_t l
32 least significant bits
uint32_t h
32 most significant bits