11 #if CRYPTOPP_MSC_VERSION 12 # pragma warning(disable: 4100 4731) 15 #ifndef CRYPTOPP_IMPORTS 16 #ifndef CRYPTOPP_GENERATE_X64_MASM 23 #if defined(CRYPTOPP_DISABLE_SHA_ASM) 24 # undef CRYPTOPP_X86_ASM_AVAILABLE 25 # undef CRYPTOPP_X32_ASM_AVAILABLE 26 # undef CRYPTOPP_X64_ASM_AVAILABLE 27 # undef CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE 34 #define blk0(i) (W[i] = data[i]) 35 #define blk1(i) (W[i&15] = rotlFixed(W[(i+13)&15]^W[(i+8)&15]^W[(i+2)&15]^W[i&15],1)) 37 void SHA1::InitState(HashWordType *state)
39 state[0] = 0x67452301L;
40 state[1] = 0xEFCDAB89L;
41 state[2] = 0x98BADCFEL;
42 state[3] = 0x10325476L;
43 state[4] = 0xC3D2E1F0L;
46 #define f1(x,y,z) (z^(x&(y^z))) 47 #define f2(x,y,z) (x^y^z) 48 #define f3(x,y,z) ((x&y)|(z&(x|y))) 49 #define f4(x,y,z) (x^y^z) 52 #define R0(v,w,x,y,z,i) z+=f1(w,x,y)+blk0(i)+0x5A827999+rotlFixed(v,5);w=rotlFixed(w,30); 53 #define R1(v,w,x,y,z,i) z+=f1(w,x,y)+blk1(i)+0x5A827999+rotlFixed(v,5);w=rotlFixed(w,30); 54 #define R2(v,w,x,y,z,i) z+=f2(w,x,y)+blk1(i)+0x6ED9EBA1+rotlFixed(v,5);w=rotlFixed(w,30); 55 #define R3(v,w,x,y,z,i) z+=f3(w,x,y)+blk1(i)+0x8F1BBCDC+rotlFixed(v,5);w=rotlFixed(w,30); 56 #define R4(v,w,x,y,z,i) z+=f4(w,x,y)+blk1(i)+0xCA62C1D6+rotlFixed(v,5);w=rotlFixed(w,30); 58 void SHA1::Transform(word32 *state,
const word32 *data)
68 R0(a,b,c,d,e, 0); R0(e,a,b,c,d, 1); R0(d,e,a,b,c, 2); R0(c,d,e,a,b, 3);
69 R0(b,c,d,e,a, 4); R0(a,b,c,d,e, 5); R0(e,a,b,c,d, 6); R0(d,e,a,b,c, 7);
70 R0(c,d,e,a,b, 8); R0(b,c,d,e,a, 9); R0(a,b,c,d,e,10); R0(e,a,b,c,d,11);
71 R0(d,e,a,b,c,12); R0(c,d,e,a,b,13); R0(b,c,d,e,a,14); R0(a,b,c,d,e,15);
72 R1(e,a,b,c,d,16); R1(d,e,a,b,c,17); R1(c,d,e,a,b,18); R1(b,c,d,e,a,19);
73 R2(a,b,c,d,e,20); R2(e,a,b,c,d,21); R2(d,e,a,b,c,22); R2(c,d,e,a,b,23);
74 R2(b,c,d,e,a,24); R2(a,b,c,d,e,25); R2(e,a,b,c,d,26); R2(d,e,a,b,c,27);
75 R2(c,d,e,a,b,28); R2(b,c,d,e,a,29); R2(a,b,c,d,e,30); R2(e,a,b,c,d,31);
76 R2(d,e,a,b,c,32); R2(c,d,e,a,b,33); R2(b,c,d,e,a,34); R2(a,b,c,d,e,35);
77 R2(e,a,b,c,d,36); R2(d,e,a,b,c,37); R2(c,d,e,a,b,38); R2(b,c,d,e,a,39);
78 R3(a,b,c,d,e,40); R3(e,a,b,c,d,41); R3(d,e,a,b,c,42); R3(c,d,e,a,b,43);
79 R3(b,c,d,e,a,44); R3(a,b,c,d,e,45); R3(e,a,b,c,d,46); R3(d,e,a,b,c,47);
80 R3(c,d,e,a,b,48); R3(b,c,d,e,a,49); R3(a,b,c,d,e,50); R3(e,a,b,c,d,51);
81 R3(d,e,a,b,c,52); R3(c,d,e,a,b,53); R3(b,c,d,e,a,54); R3(a,b,c,d,e,55);
82 R3(e,a,b,c,d,56); R3(d,e,a,b,c,57); R3(c,d,e,a,b,58); R3(b,c,d,e,a,59);
83 R4(a,b,c,d,e,60); R4(e,a,b,c,d,61); R4(d,e,a,b,c,62); R4(c,d,e,a,b,63);
84 R4(b,c,d,e,a,64); R4(a,b,c,d,e,65); R4(e,a,b,c,d,66); R4(d,e,a,b,c,67);
85 R4(c,d,e,a,b,68); R4(b,c,d,e,a,69); R4(a,b,c,d,e,70); R4(e,a,b,c,d,71);
86 R4(d,e,a,b,c,72); R4(c,d,e,a,b,73); R4(b,c,d,e,a,74); R4(a,b,c,d,e,75);
87 R4(e,a,b,c,d,76); R4(d,e,a,b,c,77); R4(c,d,e,a,b,78); R4(b,c,d,e,a,79);
100 void SHA224::InitState(HashWordType *state)
102 static const word32 s[8] = {0xc1059ed8, 0x367cd507, 0x3070dd17, 0xf70e5939, 0xffc00b31, 0x68581511, 0x64f98fa7, 0xbefa4fa4};
103 memcpy(state, s,
sizeof(s));
106 void SHA256::InitState(HashWordType *state)
108 static const word32 s[8] = {0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19};
109 memcpy(state, s,
sizeof(s));
112 #if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE 113 CRYPTOPP_ALIGN_DATA(16) extern const word32 SHA256_K[64] CRYPTOPP_SECTION_ALIGN16 = {
115 extern const word32 SHA256_K[64] = {
117 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
118 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
119 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
120 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
121 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,
122 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
123 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,
124 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
125 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
126 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
127 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,
128 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
129 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,
130 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
131 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
132 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
135 #endif // #ifndef CRYPTOPP_GENERATE_X64_MASM 137 #if (defined(CRYPTOPP_X86_ASM_AVAILABLE) || defined(CRYPTOPP_X32_ASM_AVAILABLE) || defined(CRYPTOPP_GENERATE_X64_MASM)) 139 static void CRYPTOPP_FASTCALL X86_SHA256_HashBlocks(word32 *state,
const word32 *data,
size_t len
140 #
if defined(_MSC_VER) && (_MSC_VER == 1200)
145 #if defined(_MSC_VER) && (_MSC_VER == 1200) 146 AS2(mov ecx, [state])
150 #define LOCALS_SIZE 8*4 + 16*4 + 4*WORD_SZ 151 #define H(i) [BASE+ASM_MOD(1024+7-(i),8)*4] 159 #define Wt(i) BASE+8*4+ASM_MOD(1024+15-(i),16)*4 160 #define Wt_2(i) Wt((i)-2) 161 #define Wt_15(i) Wt((i)-15) 162 #define Wt_7(i) Wt((i)-7) 163 #define K_END [BASE+8*4+16*4+0*WORD_SZ] 164 #define STATE_SAVE [BASE+8*4+16*4+1*WORD_SZ] 165 #define DATA_SAVE [BASE+8*4+16*4+2*WORD_SZ] 166 #define DATA_END [BASE+8*4+16*4+3*WORD_SZ] 167 #define Kt(i) WORD_REG(si)+(i)*4 168 #if CRYPTOPP_BOOL_X32 170 #elif CRYPTOPP_BOOL_X86 172 #elif defined(__GNUC__) 178 #define RA0(i, edx, edi) \ 179 AS2( add edx, [Kt(i)] )\ 180 AS2( add edx, [Wt(i)] )\ 181 AS2( add edx, H(i) )\ 183 #define RA1(i, edx, edi) 185 #define RB0(i, edx, edi) 187 #define RB1(i, edx, edi) \ 188 AS2( mov AS_REG_7d, [Wt_2(i)] )\ 189 AS2( mov edi, [Wt_15(i)])\ 190 AS2( mov ebx, AS_REG_7d )\ 191 AS2( shr AS_REG_7d, 10 )\ 193 AS2( xor AS_REG_7d, ebx )\ 195 AS2( xor ebx, AS_REG_7d )\ 196 AS2( add ebx, [Wt_7(i)])\ 197 AS2( mov AS_REG_7d, edi )\ 198 AS2( shr AS_REG_7d, 3 )\ 200 AS2( add ebx, [Wt(i)])\ 201 AS2( xor AS_REG_7d, edi )\ 202 AS2( add edx, [Kt(i)])\ 204 AS2( add edx, H(i) )\ 205 AS2( xor AS_REG_7d, edi )\ 206 AS2( add AS_REG_7d, ebx )\ 207 AS2( mov [Wt(i)], AS_REG_7d)\ 208 AS2( add edx, AS_REG_7d )\ 210 #define ROUND(i, r, eax, ecx, edi, edx)\ 213 AS2( mov edx, F(i) )\ 214 AS2( xor edx, G(i) )\ 216 AS2( xor edx, G(i) )\ 217 AS2( mov AS_REG_7d, edi )\ 219 AS2( ror AS_REG_7d, 25 )\ 221 AS2( xor AS_REG_7d, edi )\ 223 AS2( xor AS_REG_7d, edi )\ 224 AS2( add edx, AS_REG_7d )\ 229 AS2( xor ecx, B(i) )\ 231 AS2( xor eax, B(i) )\ 232 AS2( mov AS_REG_7d, ebx )\ 235 AS2( add edx, D(i) )\ 236 AS2( mov D(i), edx )\ 237 AS2( ror AS_REG_7d, 22 )\ 238 AS2( xor AS_REG_7d, ebx )\ 240 AS2( xor AS_REG_7d, ebx )\ 241 AS2( add eax, AS_REG_7d )\ 242 AS2( mov H(i), eax )\ 246 #if CRYPTOPP_BOOL_X64 247 #define SWAP_COPY(i) \ 248 AS2( mov WORD_REG(bx), [WORD_REG(dx)+i*WORD_SZ])\ 249 AS1( bswap WORD_REG(bx))\ 250 AS2( mov [Wt(i*2+1)], WORD_REG(bx)) 252 #define SWAP_COPY(i) \ 253 AS2( mov WORD_REG(bx), [WORD_REG(dx)+i*WORD_SZ])\ 254 AS1( bswap WORD_REG(bx))\ 255 AS2( mov [Wt(i)], WORD_REG(bx)) 258 #if defined(__GNUC__) 259 #if CRYPTOPP_BOOL_X64 264 #
if CRYPTOPP_BOOL_X64
268 #elif defined(CRYPTOPP_GENERATE_X64_MASM)
270 X86_SHA256_HashBlocks PROC FRAME
275 alloc_stack(LOCALS_SIZE+8)
278 lea rsi, [?SHA256_K@
CryptoPP@@3QBIB + 48*4]
281 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
284 AS2( lea WORD_REG(si), [SHA256_K+48*4])
286 #
if !defined(_MSC_VER) || (_MSC_VER < 1400)
293 AS2( sub WORD_REG(sp), LOCALS_SIZE)
296 AS2( mov STATE_SAVE, WORD_REG(cx))
297 AS2( mov DATA_SAVE, WORD_REG(dx))
298 AS2( lea WORD_REG(ax), [WORD_REG(di) + WORD_REG(dx)])
299 AS2( mov DATA_END, WORD_REG(ax))
300 AS2( mov K_END, WORD_REG(si))
302 #
if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE
303 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
306 AS1( dec DWORD PTR K_END)
308 AS2( movdqa xmm0, XMMWORD_PTR [WORD_REG(cx)+0*16])
309 AS2( movdqa xmm1, XMMWORD_PTR [WORD_REG(cx)+1*16])
312 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
313 #
if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE
327 #
if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE
329 AS2( movdqa E(0), xmm1)
330 AS2( movdqa A(0), xmm0)
332 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
335 AS2( sub WORD_REG(si), 48*4)
336 SWAP_COPY(0) SWAP_COPY(1) SWAP_COPY(2) SWAP_COPY(3)
337 SWAP_COPY(4) SWAP_COPY(5) SWAP_COPY(6) SWAP_COPY(7)
338 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
339 SWAP_COPY(8) SWAP_COPY(9) SWAP_COPY(10) SWAP_COPY(11)
340 SWAP_COPY(12) SWAP_COPY(13) SWAP_COPY(14) SWAP_COPY(15)
347 ROUND(0, 0, eax, ecx, edi, edx)
348 ROUND(1, 0, ecx, eax, edx, edi)
349 ROUND(2, 0, eax, ecx, edi, edx)
350 ROUND(3, 0, ecx, eax, edx, edi)
351 ROUND(4, 0, eax, ecx, edi, edx)
352 ROUND(5, 0, ecx, eax, edx, edi)
353 ROUND(6, 0, eax, ecx, edi, edx)
354 ROUND(7, 0, ecx, eax, edx, edi)
355 ROUND(8, 0, eax, ecx, edi, edx)
356 ROUND(9, 0, ecx, eax, edx, edi)
357 ROUND(10, 0, eax, ecx, edi, edx)
358 ROUND(11, 0, ecx, eax, edx, edi)
359 ROUND(12, 0, eax, ecx, edi, edx)
360 ROUND(13, 0, ecx, eax, edx, edi)
361 ROUND(14, 0, eax, ecx, edi, edx)
362 ROUND(15, 0, ecx, eax, edx, edi)
365 AS2(add WORD_REG(si), 4*16)
366 ROUND(0, 1, eax, ecx, edi, edx)
367 ROUND(1, 1, ecx, eax, edx, edi)
368 ROUND(2, 1, eax, ecx, edi, edx)
369 ROUND(3, 1, ecx, eax, edx, edi)
370 ROUND(4, 1, eax, ecx, edi, edx)
371 ROUND(5, 1, ecx, eax, edx, edi)
372 ROUND(6, 1, eax, ecx, edi, edx)
373 ROUND(7, 1, ecx, eax, edx, edi)
374 ROUND(8, 1, eax, ecx, edi, edx)
375 ROUND(9, 1, ecx, eax, edx, edi)
376 ROUND(10, 1, eax, ecx, edi, edx)
377 ROUND(11, 1, ecx, eax, edx, edi)
378 ROUND(12, 1, eax, ecx, edi, edx)
379 ROUND(13, 1, ecx, eax, edx, edi)
380 ROUND(14, 1, eax, ecx, edi, edx)
381 ROUND(15, 1, ecx, eax, edx, edi)
382 AS2( cmp WORD_REG(si), K_END)
387 AS2( mov WORD_REG(dx), DATA_SAVE)
388 AS2( add WORD_REG(dx), 64)
389 AS2( mov AS_REG_7, STATE_SAVE)
390 AS2( mov DATA_SAVE, WORD_REG(dx))
392 #
if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE
393 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
394 AS2( test DWORD PTR K_END, 1)
397 AS2( movdqa xmm1, XMMWORD_PTR [AS_REG_7+1*16])
398 AS2( movdqa xmm0, XMMWORD_PTR [AS_REG_7+0*16])
399 AS2( paddd xmm1, E(0))
400 AS2( paddd xmm0, A(0))
401 AS2( movdqa [AS_REG_7+1*16], xmm1)
402 AS2( movdqa [AS_REG_7+0*16], xmm0)
403 AS2( cmp WORD_REG(dx), DATA_END)
409 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
410 #
if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE
414 AS2( add [AS_REG_7+0*4], ecx)
415 AS2( add [AS_REG_7+4*4], edi)
419 AS2( add [AS_REG_7+1*4], eax)
420 AS2( add [AS_REG_7+2*4], ebx)
421 AS2( add [AS_REG_7+3*4], ecx)
425 AS2( add [AS_REG_7+5*4], eax)
426 AS2( add [AS_REG_7+6*4], ebx)
427 AS2( add [AS_REG_7+7*4], ecx)
428 AS2( mov ecx, AS_REG_7d)
429 AS2( cmp WORD_REG(dx), DATA_END)
431 #
if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE
438 #
if !defined(_MSC_VER) || (_MSC_VER < 1400)
442 #ifdef CRYPTOPP_GENERATE_X64_MASM
443 add rsp, LOCALS_SIZE+8
449 X86_SHA256_HashBlocks ENDP
455 :
"c" (state),
"d" (data),
"S" (SHA256_K+48),
"D" (len)
456 #
if CRYPTOPP_BOOL_X64
459 :
"memory",
"cc",
"%eax" 460 #
if CRYPTOPP_BOOL_X64
461 ,
"%rbx",
"%r8",
"%r10" 467 #endif // (defined(CRYPTOPP_X86_ASM_AVAILABLE) || defined(CRYPTOPP_GENERATE_X64_MASM)) 469 #ifndef CRYPTOPP_GENERATE_X64_MASM 471 #ifdef CRYPTOPP_X64_MASM_AVAILABLE 473 void CRYPTOPP_FASTCALL X86_SHA256_HashBlocks(word32 *state,
const word32 *data,
size_t len);
477 #if (defined(CRYPTOPP_X86_ASM_AVAILABLE) || defined(CRYPTOPP_X32_ASM_AVAILABLE) || defined(CRYPTOPP_X64_MASM_AVAILABLE)) && !defined(CRYPTOPP_DISABLE_SHA_ASM) 479 size_t SHA256::HashMultipleBlocks(
const word32 *input,
size_t length)
481 X86_SHA256_HashBlocks(m_state, input, (length&(
size_t(0)-BLOCKSIZE)) - !
HasSSE2());
482 return length % BLOCKSIZE;
485 size_t SHA224::HashMultipleBlocks(
const word32 *input,
size_t length)
487 X86_SHA256_HashBlocks(m_state, input, (length&(
size_t(0)-BLOCKSIZE)) - !
HasSSE2());
488 return length % BLOCKSIZE;
493 #define blk2(i) (W[i&15]+=s1(W[(i-2)&15])+W[(i-7)&15]+s0(W[(i-15)&15])) 495 #define Ch(x,y,z) (z^(x&(y^z))) 496 #define Maj(x,y,z) (y^((x^y)&(y^z))) 498 #define a(i) T[(0-i)&7] 499 #define b(i) T[(1-i)&7] 500 #define c(i) T[(2-i)&7] 501 #define d(i) T[(3-i)&7] 502 #define e(i) T[(4-i)&7] 503 #define f(i) T[(5-i)&7] 504 #define g(i) T[(6-i)&7] 505 #define h(i) T[(7-i)&7] 507 #define R(i) h(i)+=S1(e(i))+Ch(e(i),f(i),g(i))+SHA256_K[i+j]+(j?blk2(i):blk0(i));\ 508 d(i)+=h(i);h(i)+=S0(a(i))+Maj(a(i),b(i),c(i)) 511 #define S0(x) (rotrFixed(x,2)^rotrFixed(x,13)^rotrFixed(x,22)) 512 #define S1(x) (rotrFixed(x,6)^rotrFixed(x,11)^rotrFixed(x,25)) 513 #define s0(x) (rotrFixed(x,7)^rotrFixed(x,18)^(x>>3)) 514 #define s1(x) (rotrFixed(x,17)^rotrFixed(x,19)^(x>>10)) 517 #if defined(__OPTIMIZE_SIZE__) 518 void SHA256::Transform(word32 *state,
const word32 *data)
522 unsigned int i = 0, j = 0;
525 memcpy(t, state, 8*4);
526 word32 e = t[4], a = t[0];
535 w += Ch(e, t[5], t[6]);
539 a = w + Maj(a, t[1], t[2]);
550 word32 w = s1(W[i+16-2]) + s0(W[i+16-15]) + W[i] + W[i+16-7];
555 w += Ch(e, t[5], t[6]);
559 a = w + Maj(a, t[1], t[2]);
562 w = s1(W[(i+1)+16-2]) + s0(W[(i+1)+16-15]) + W[(i+1)] + W[(i+1)+16-7];
563 W[(i+1)+16] = W[(i+1)] = w;
567 w += Ch(e, (t-1)[5], (t-1)[6]);
569 (t-1)[3] = (t-1)[3+8] = e;
571 a = w + Maj(a, (t-1)[1], (t-1)[2]);
572 (t-1)[-1] = (t-1)[7] = a;
590 void SHA256::Transform(word32 *state,
const word32 *data)
593 #if (defined(CRYPTOPP_X86_ASM_AVAILABLE) || defined(CRYPTOPP_X32_ASM_AVAILABLE) || defined(CRYPTOPP_X64_MASM_AVAILABLE)) && !defined(CRYPTOPP_DISABLE_SHA_ASM) 596 X86_SHA256_HashBlocks(state, W, BLOCKSIZE - !
HasSSE2());
600 memcpy(T, state,
sizeof(T));
602 for (
unsigned int j=0; j<64; j+=16)
604 R( 0); R( 1); R( 2); R( 3);
605 R( 4); R( 5); R( 6); R( 7);
606 R( 8); R( 9); R(10); R(11);
607 R(12); R(13); R(14); R(15);
630 void SHA384::InitState(HashWordType *state)
632 static const word64 s[8] = {
633 W64LIT(0xcbbb9d5dc1059ed8), W64LIT(0x629a292a367cd507),
634 W64LIT(0x9159015a3070dd17), W64LIT(0x152fecd8f70e5939),
635 W64LIT(0x67332667ffc00b31), W64LIT(0x8eb44a8768581511),
636 W64LIT(0xdb0c2e0d64f98fa7), W64LIT(0x47b5481dbefa4fa4)};
637 memcpy(state, s,
sizeof(s));
640 void SHA512::InitState(HashWordType *state)
642 static const word64 s[8] = {
643 W64LIT(0x6a09e667f3bcc908), W64LIT(0xbb67ae8584caa73b),
644 W64LIT(0x3c6ef372fe94f82b), W64LIT(0xa54ff53a5f1d36f1),
645 W64LIT(0x510e527fade682d1), W64LIT(0x9b05688c2b3e6c1f),
646 W64LIT(0x1f83d9abfb41bd6b), W64LIT(0x5be0cd19137e2179)};
647 memcpy(state, s,
sizeof(s));
650 #if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE && (CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32) 651 CRYPTOPP_ALIGN_DATA(16) static const word64 SHA512_K[80] CRYPTOPP_SECTION_ALIGN16 = {
653 CRYPTOPP_ALIGN_DATA(16) static const word64 SHA512_K[80] CRYPTOPP_SECTION_ALIGN16 = {
655 W64LIT(0x428a2f98d728ae22), W64LIT(0x7137449123ef65cd),
656 W64LIT(0xb5c0fbcfec4d3b2f), W64LIT(0xe9b5dba58189dbbc),
657 W64LIT(0x3956c25bf348b538), W64LIT(0x59f111f1b605d019),
658 W64LIT(0x923f82a4af194f9b), W64LIT(0xab1c5ed5da6d8118),
659 W64LIT(0xd807aa98a3030242), W64LIT(0x12835b0145706fbe),
660 W64LIT(0x243185be4ee4b28c), W64LIT(0x550c7dc3d5ffb4e2),
661 W64LIT(0x72be5d74f27b896f), W64LIT(0x80deb1fe3b1696b1),
662 W64LIT(0x9bdc06a725c71235), W64LIT(0xc19bf174cf692694),
663 W64LIT(0xe49b69c19ef14ad2), W64LIT(0xefbe4786384f25e3),
664 W64LIT(0x0fc19dc68b8cd5b5), W64LIT(0x240ca1cc77ac9c65),
665 W64LIT(0x2de92c6f592b0275), W64LIT(0x4a7484aa6ea6e483),
666 W64LIT(0x5cb0a9dcbd41fbd4), W64LIT(0x76f988da831153b5),
667 W64LIT(0x983e5152ee66dfab), W64LIT(0xa831c66d2db43210),
668 W64LIT(0xb00327c898fb213f), W64LIT(0xbf597fc7beef0ee4),
669 W64LIT(0xc6e00bf33da88fc2), W64LIT(0xd5a79147930aa725),
670 W64LIT(0x06ca6351e003826f), W64LIT(0x142929670a0e6e70),
671 W64LIT(0x27b70a8546d22ffc), W64LIT(0x2e1b21385c26c926),
672 W64LIT(0x4d2c6dfc5ac42aed), W64LIT(0x53380d139d95b3df),
673 W64LIT(0x650a73548baf63de), W64LIT(0x766a0abb3c77b2a8),
674 W64LIT(0x81c2c92e47edaee6), W64LIT(0x92722c851482353b),
675 W64LIT(0xa2bfe8a14cf10364), W64LIT(0xa81a664bbc423001),
676 W64LIT(0xc24b8b70d0f89791), W64LIT(0xc76c51a30654be30),
677 W64LIT(0xd192e819d6ef5218), W64LIT(0xd69906245565a910),
678 W64LIT(0xf40e35855771202a), W64LIT(0x106aa07032bbd1b8),
679 W64LIT(0x19a4c116b8d2d0c8), W64LIT(0x1e376c085141ab53),
680 W64LIT(0x2748774cdf8eeb99), W64LIT(0x34b0bcb5e19b48a8),
681 W64LIT(0x391c0cb3c5c95a63), W64LIT(0x4ed8aa4ae3418acb),
682 W64LIT(0x5b9cca4f7763e373), W64LIT(0x682e6ff3d6b2b8a3),
683 W64LIT(0x748f82ee5defb2fc), W64LIT(0x78a5636f43172f60),
684 W64LIT(0x84c87814a1f0ab72), W64LIT(0x8cc702081a6439ec),
685 W64LIT(0x90befffa23631e28), W64LIT(0xa4506cebde82bde9),
686 W64LIT(0xbef9a3f7b2c67915), W64LIT(0xc67178f2e372532b),
687 W64LIT(0xca273eceea26619c), W64LIT(0xd186b8c721c0c207),
688 W64LIT(0xeada7dd6cde0eb1e), W64LIT(0xf57d4f7fee6ed178),
689 W64LIT(0x06f067aa72176fba), W64LIT(0x0a637dc5a2c898a6),
690 W64LIT(0x113f9804bef90dae), W64LIT(0x1b710b35131c471b),
691 W64LIT(0x28db77f523047d84), W64LIT(0x32caab7b40c72493),
692 W64LIT(0x3c9ebe0a15c9bebc), W64LIT(0x431d67c49c100d4c),
693 W64LIT(0x4cc5d4becb3e42b6), W64LIT(0x597f299cfc657e2a),
694 W64LIT(0x5fcb6fab3ad6faec), W64LIT(0x6c44198c4a475817)
697 #if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE && (CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32) 699 CRYPTOPP_NAKED
static void CRYPTOPP_FASTCALL SHA512_SSE2_Transform(word64 *state,
const word64 *data)
711 AS2( lea ebx, SHA512_K)
715 AS2( and esp, 0xfffffff0)
720 #
if CRYPTOPP_BOOL_X32
721 AS2( lea edi, [esp+8+8*8])
722 AS2( lea esi, [esp+8+20*8+8])
724 AS2( lea edi, [esp+4+8*8])
725 AS2( lea esi, [esp+4+20*8+8])
728 AS2( movdqa xmm0, [ecx+0*16])
729 AS2( movdq2q mm4, xmm0)
730 AS2( movdqa [edi+0*16], xmm0)
731 AS2( movdqa xmm0, [ecx+1*16])
732 AS2( movdqa [edi+1*16], xmm0)
733 AS2( movdqa xmm0, [ecx+2*16])
734 AS2( movdq2q mm5, xmm0)
735 AS2( movdqa [edi+2*16], xmm0)
736 AS2( movdqa xmm0, [ecx+3*16])
737 AS2( movdqa [edi+3*16], xmm0)
740 #define SSE2_S0_S1(r, a, b, c) \
744 AS2( psllq mm6, 64-c)\
748 AS2( psllq mm6, c-b)\
752 AS2( psllq mm6, b-a)\
755 #define SSE2_s0(r, a, b, c) \
756 AS2( movdqa xmm6, r)\
758 AS2( movdqa xmm7, r)\
759 AS2( psllq xmm6, 64-c)\
760 AS2( pxor xmm7, xmm6)\
765 AS2( psllq xmm6, c-a)\
768 #define SSE2_s1(r, a, b, c) \
769 AS2( movdqa xmm6, r)\
771 AS2( movdqa xmm7, r)\
772 AS2( psllq xmm6, 64-c)\
773 AS2( pxor xmm7, xmm6)\
776 AS2( psllq xmm6, c-b)\
777 AS2( pxor xmm7, xmm6)\
783 AS2( paddq mm0, [edi+7*8])
784 AS2( movq mm2, [edi+5*8])
785 AS2( movq mm3, [edi+6*8])
788 SSE2_S0_S1(mm5,14,18,41)
792 AS2( movq mm2, [edi+1*8])
795 AS2( pand mm2, [edi+2*8])
799 AS2( paddq mm5, [edi+3*8])
800 AS2( movq [edi+3*8], mm5)
801 AS2( movq [edi+11*8], mm5)
802 SSE2_S0_S1(mm4,28,34,39)
804 AS2( movq [edi-8], mm4)
805 AS2( movq [edi+7*8], mm4)
810 AS2( movq mm0, [edx+eax*8])
811 AS2( movq [esi+eax*8], mm0)
812 AS2( movq [esi+eax*8+16*8], mm0)
813 AS2( paddq mm0, [ebx+eax*8])
814 ASC( call, SHA512_Round)
824 AS2( movdqu xmm0, [esi+(16-2)*8])
827 AS2( movdqu xmm3, [esi])
828 AS2( paddq xmm3, [esi+(16-7)*8])
829 AS2( movdqa xmm2, [esi+(16-15)*8])
830 SSE2_s1(xmm0, 6, 19, 61)
831 AS2( paddq xmm0, xmm3)
832 SSE2_s0(xmm2, 1, 7, 8)
833 AS2( paddq xmm0, xmm2)
834 AS2( movdq2q mm0, xmm0)
835 AS2( movhlps xmm1, xmm0)
836 AS2( paddq mm0, [ebx+eax*8])
837 AS2( movlps [esi], xmm0)
838 AS2( movlps [esi+8], xmm1)
839 AS2( movlps [esi+8*16], xmm0)
840 AS2( movlps [esi+8*17], xmm1)
842 ASC( call, SHA512_Round)
844 AS2( movdq2q mm0, xmm1)
845 AS2( paddq mm0, [ebx+eax*8+8])
846 ASC( call, SHA512_Round)
856 #
if CRYPTOPP_BOOL_X32
857 AS2( lea esi, [esp+8+20*8+8+esi*8])
859 AS2( lea esi, [esp+4+20*8+8+esi*8])
865 #define SSE2_CombineState(i) \
866 AS2( movdqa xmm0, [edi+i*16])\
867 AS2( paddq xmm0, [ecx+i*16])\
868 AS2( movdqa [ecx+i*16], xmm0)
878 #
if defined(__GNUC__)
882 :
"a" (SHA512_K),
"c" (state),
"d" (data)
883 :
"%esi",
"%edi",
"memory",
"cc" 892 #endif // #if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE 894 void SHA512::Transform(word64 *state,
const word64 *data)
899 #if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE && (CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32) 902 SHA512_SSE2_Transform(state, data);
907 #define S0(x) (rotrFixed(x,28)^rotrFixed(x,34)^rotrFixed(x,39)) 908 #define S1(x) (rotrFixed(x,14)^rotrFixed(x,18)^rotrFixed(x,41)) 909 #define s0(x) (rotrFixed(x,1)^rotrFixed(x,8)^(x>>7)) 910 #define s1(x) (rotrFixed(x,19)^rotrFixed(x,61)^(x>>6)) 912 #define R(i) h(i)+=S1(e(i))+Ch(e(i),f(i),g(i))+SHA512_K[i+j]+(j?blk2(i):blk0(i));\ 913 d(i)+=h(i);h(i)+=S0(a(i))+Maj(a(i),b(i),c(i)) 918 memcpy(T, state,
sizeof(T));
920 for (
unsigned int j=0; j<80; j+=16)
922 R( 0); R( 1); R( 2); R( 3);
923 R( 4); R( 5); R( 6); R( 7);
924 R( 8); R( 9); R(10); R(11);
925 R(12); R(13); R(14); R(15);
940 #endif // #ifndef CRYPTOPP_GENERATE_X64_MASM 941 #endif // #ifndef CRYPTOPP_IMPORTS Utility functions for the Crypto++ library.
Library configuration file.
Classes and functions for secure memory allocations.
bool IsAlignedOn(const void *ptr, unsigned int alignment)
Determines whether ptr is aligned to a minimum value.
Fixed size stack-based SecBlock with 16-byte alignment.
#define CRYPTOPP_ASSERT(exp)
Debugging and diagnostic assertion.
Functions for CPU features and intrinsics.
Classes for SHA-1 and SHA-2 family of message digests.
bool HasSSE2()
Determines SSE2 availability.
Crypto++ library namespace.
byte ByteReverse(byte value)
Reverses bytes in a 8-bit value.