11 #if CRYPTOPP_MSC_VERSION 12 # pragma warning(disable: 4100 4731) 15 #ifndef CRYPTOPP_IMPORTS 16 #ifndef CRYPTOPP_GENERATE_X64_MASM 27 #define blk0(i) (W[i] = data[i]) 28 #define blk1(i) (W[i&15] = rotlFixed(W[(i+13)&15]^W[(i+8)&15]^W[(i+2)&15]^W[i&15],1)) 30 void SHA1::InitState(HashWordType *state)
32 state[0] = 0x67452301L;
33 state[1] = 0xEFCDAB89L;
34 state[2] = 0x98BADCFEL;
35 state[3] = 0x10325476L;
36 state[4] = 0xC3D2E1F0L;
39 #define f1(x,y,z) (z^(x&(y^z))) 40 #define f2(x,y,z) (x^y^z) 41 #define f3(x,y,z) ((x&y)|(z&(x|y))) 42 #define f4(x,y,z) (x^y^z) 45 #define R0(v,w,x,y,z,i) z+=f1(w,x,y)+blk0(i)+0x5A827999+rotlFixed(v,5);w=rotlFixed(w,30); 46 #define R1(v,w,x,y,z,i) z+=f1(w,x,y)+blk1(i)+0x5A827999+rotlFixed(v,5);w=rotlFixed(w,30); 47 #define R2(v,w,x,y,z,i) z+=f2(w,x,y)+blk1(i)+0x6ED9EBA1+rotlFixed(v,5);w=rotlFixed(w,30); 48 #define R3(v,w,x,y,z,i) z+=f3(w,x,y)+blk1(i)+0x8F1BBCDC+rotlFixed(v,5);w=rotlFixed(w,30); 49 #define R4(v,w,x,y,z,i) z+=f4(w,x,y)+blk1(i)+0xCA62C1D6+rotlFixed(v,5);w=rotlFixed(w,30); 51 void SHA1::Transform(word32 *state,
const word32 *data)
61 R0(a,b,c,d,e, 0); R0(e,a,b,c,d, 1); R0(d,e,a,b,c, 2); R0(c,d,e,a,b, 3);
62 R0(b,c,d,e,a, 4); R0(a,b,c,d,e, 5); R0(e,a,b,c,d, 6); R0(d,e,a,b,c, 7);
63 R0(c,d,e,a,b, 8); R0(b,c,d,e,a, 9); R0(a,b,c,d,e,10); R0(e,a,b,c,d,11);
64 R0(d,e,a,b,c,12); R0(c,d,e,a,b,13); R0(b,c,d,e,a,14); R0(a,b,c,d,e,15);
65 R1(e,a,b,c,d,16); R1(d,e,a,b,c,17); R1(c,d,e,a,b,18); R1(b,c,d,e,a,19);
66 R2(a,b,c,d,e,20); R2(e,a,b,c,d,21); R2(d,e,a,b,c,22); R2(c,d,e,a,b,23);
67 R2(b,c,d,e,a,24); R2(a,b,c,d,e,25); R2(e,a,b,c,d,26); R2(d,e,a,b,c,27);
68 R2(c,d,e,a,b,28); R2(b,c,d,e,a,29); R2(a,b,c,d,e,30); R2(e,a,b,c,d,31);
69 R2(d,e,a,b,c,32); R2(c,d,e,a,b,33); R2(b,c,d,e,a,34); R2(a,b,c,d,e,35);
70 R2(e,a,b,c,d,36); R2(d,e,a,b,c,37); R2(c,d,e,a,b,38); R2(b,c,d,e,a,39);
71 R3(a,b,c,d,e,40); R3(e,a,b,c,d,41); R3(d,e,a,b,c,42); R3(c,d,e,a,b,43);
72 R3(b,c,d,e,a,44); R3(a,b,c,d,e,45); R3(e,a,b,c,d,46); R3(d,e,a,b,c,47);
73 R3(c,d,e,a,b,48); R3(b,c,d,e,a,49); R3(a,b,c,d,e,50); R3(e,a,b,c,d,51);
74 R3(d,e,a,b,c,52); R3(c,d,e,a,b,53); R3(b,c,d,e,a,54); R3(a,b,c,d,e,55);
75 R3(e,a,b,c,d,56); R3(d,e,a,b,c,57); R3(c,d,e,a,b,58); R3(b,c,d,e,a,59);
76 R4(a,b,c,d,e,60); R4(e,a,b,c,d,61); R4(d,e,a,b,c,62); R4(c,d,e,a,b,63);
77 R4(b,c,d,e,a,64); R4(a,b,c,d,e,65); R4(e,a,b,c,d,66); R4(d,e,a,b,c,67);
78 R4(c,d,e,a,b,68); R4(b,c,d,e,a,69); R4(a,b,c,d,e,70); R4(e,a,b,c,d,71);
79 R4(d,e,a,b,c,72); R4(c,d,e,a,b,73); R4(b,c,d,e,a,74); R4(a,b,c,d,e,75);
80 R4(e,a,b,c,d,76); R4(d,e,a,b,c,77); R4(c,d,e,a,b,78); R4(b,c,d,e,a,79);
93 void SHA224::InitState(HashWordType *state)
95 static const word32 s[8] = {0xc1059ed8, 0x367cd507, 0x3070dd17, 0xf70e5939, 0xffc00b31, 0x68581511, 0x64f98fa7, 0xbefa4fa4};
96 memcpy(state, s,
sizeof(s));
99 void SHA256::InitState(HashWordType *state)
101 static const word32 s[8] = {0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19};
102 memcpy(state, s,
sizeof(s));
105 #if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE 106 CRYPTOPP_ALIGN_DATA(16) extern const word32 SHA256_K[64] CRYPTOPP_SECTION_ALIGN16 = {
108 extern const word32 SHA256_K[64] = {
110 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
111 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
112 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
113 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
114 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,
115 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
116 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,
117 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
118 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
119 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
120 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,
121 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
122 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,
123 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
124 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
125 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
128 #endif // #ifndef CRYPTOPP_GENERATE_X64_MASM 130 #if defined(CRYPTOPP_X86_ASM_AVAILABLE) || defined(CRYPTOPP_X32_ASM_AVAILABLE) || defined(CRYPTOPP_GENERATE_X64_MASM) 132 static void CRYPTOPP_FASTCALL X86_SHA256_HashBlocks(word32 *state,
const word32 *data,
size_t len
133 #
if defined(_MSC_VER) && (_MSC_VER == 1200)
138 #if defined(_MSC_VER) && (_MSC_VER == 1200) 139 AS2(mov ecx, [state])
143 #define LOCALS_SIZE 8*4 + 16*4 + 4*WORD_SZ 144 #define H(i) [BASE+ASM_MOD(1024+7-(i),8)*4] 152 #define Wt(i) BASE+8*4+ASM_MOD(1024+15-(i),16)*4 153 #define Wt_2(i) Wt((i)-2) 154 #define Wt_15(i) Wt((i)-15) 155 #define Wt_7(i) Wt((i)-7) 156 #define K_END [BASE+8*4+16*4+0*WORD_SZ] 157 #define STATE_SAVE [BASE+8*4+16*4+1*WORD_SZ] 158 #define DATA_SAVE [BASE+8*4+16*4+2*WORD_SZ] 159 #define DATA_END [BASE+8*4+16*4+3*WORD_SZ] 160 #define Kt(i) WORD_REG(si)+(i)*4 161 #if CRYPTOPP_BOOL_X32 163 #elif CRYPTOPP_BOOL_X86 165 #elif defined(__GNUC__) 171 #define RA0(i, edx, edi) \ 172 AS2( add edx, [Kt(i)] )\ 173 AS2( add edx, [Wt(i)] )\ 174 AS2( add edx, H(i) )\ 176 #define RA1(i, edx, edi) 178 #define RB0(i, edx, edi) 180 #define RB1(i, edx, edi) \ 181 AS2( mov AS_REG_7d, [Wt_2(i)] )\ 182 AS2( mov edi, [Wt_15(i)])\ 183 AS2( mov ebx, AS_REG_7d )\ 184 AS2( shr AS_REG_7d, 10 )\ 186 AS2( xor AS_REG_7d, ebx )\ 188 AS2( xor ebx, AS_REG_7d )\ 189 AS2( add ebx, [Wt_7(i)])\ 190 AS2( mov AS_REG_7d, edi )\ 191 AS2( shr AS_REG_7d, 3 )\ 193 AS2( add ebx, [Wt(i)])\ 194 AS2( xor AS_REG_7d, edi )\ 195 AS2( add edx, [Kt(i)])\ 197 AS2( add edx, H(i) )\ 198 AS2( xor AS_REG_7d, edi )\ 199 AS2( add AS_REG_7d, ebx )\ 200 AS2( mov [Wt(i)], AS_REG_7d)\ 201 AS2( add edx, AS_REG_7d )\ 203 #define ROUND(i, r, eax, ecx, edi, edx)\ 206 AS2( mov edx, F(i) )\ 207 AS2( xor edx, G(i) )\ 209 AS2( xor edx, G(i) )\ 210 AS2( mov AS_REG_7d, edi )\ 212 AS2( ror AS_REG_7d, 25 )\ 214 AS2( xor AS_REG_7d, edi )\ 216 AS2( xor AS_REG_7d, edi )\ 217 AS2( add edx, AS_REG_7d )\ 222 AS2( xor ecx, B(i) )\ 224 AS2( xor eax, B(i) )\ 225 AS2( mov AS_REG_7d, ebx )\ 228 AS2( add edx, D(i) )\ 229 AS2( mov D(i), edx )\ 230 AS2( ror AS_REG_7d, 22 )\ 231 AS2( xor AS_REG_7d, ebx )\ 233 AS2( xor AS_REG_7d, ebx )\ 234 AS2( add eax, AS_REG_7d )\ 235 AS2( mov H(i), eax )\ 239 #if CRYPTOPP_BOOL_X64 240 #define SWAP_COPY(i) \ 241 AS2( mov WORD_REG(bx), [WORD_REG(dx)+i*WORD_SZ])\ 242 AS1( bswap WORD_REG(bx))\ 243 AS2( mov [Wt(i*2+1)], WORD_REG(bx)) 245 #define SWAP_COPY(i) \ 246 AS2( mov WORD_REG(bx), [WORD_REG(dx)+i*WORD_SZ])\ 247 AS1( bswap WORD_REG(bx))\ 248 AS2( mov [Wt(i)], WORD_REG(bx)) 251 #if defined(__GNUC__) 252 #if CRYPTOPP_BOOL_X64 257 #
if CRYPTOPP_BOOL_X64
261 #elif defined(CRYPTOPP_GENERATE_X64_MASM)
263 X86_SHA256_HashBlocks PROC FRAME
268 alloc_stack(LOCALS_SIZE+8)
271 lea rsi, [?SHA256_K@
CryptoPP@@3QBIB + 48*4]
274 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
277 AS2( lea WORD_REG(si), [SHA256_K+48*4])
279 #
if !defined(_MSC_VER) || (_MSC_VER < 1400)
286 AS2( sub WORD_REG(sp), LOCALS_SIZE)
289 AS2( mov STATE_SAVE, WORD_REG(cx))
290 AS2( mov DATA_SAVE, WORD_REG(dx))
291 AS2( lea WORD_REG(ax), [WORD_REG(di) + WORD_REG(dx)])
292 AS2( mov DATA_END, WORD_REG(ax))
293 AS2( mov K_END, WORD_REG(si))
295 #
if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE
296 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
299 AS1( dec DWORD PTR K_END)
301 AS2( movdqa xmm0, XMMWORD_PTR [WORD_REG(cx)+0*16])
302 AS2( movdqa xmm1, XMMWORD_PTR [WORD_REG(cx)+1*16])
305 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
306 #
if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE
318 #
if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE
320 AS2( movdqa E(0), xmm1)
321 AS2( movdqa A(0), xmm0)
323 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
326 AS2( sub WORD_REG(si), 48*4)
327 SWAP_COPY(0) SWAP_COPY(1) SWAP_COPY(2) SWAP_COPY(3)
328 SWAP_COPY(4) SWAP_COPY(5) SWAP_COPY(6) SWAP_COPY(7)
329 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
330 SWAP_COPY(8) SWAP_COPY(9) SWAP_COPY(10) SWAP_COPY(11)
331 SWAP_COPY(12) SWAP_COPY(13) SWAP_COPY(14) SWAP_COPY(15)
338 ROUND(0, 0, eax, ecx, edi, edx)
339 ROUND(1, 0, ecx, eax, edx, edi)
340 ROUND(2, 0, eax, ecx, edi, edx)
341 ROUND(3, 0, ecx, eax, edx, edi)
342 ROUND(4, 0, eax, ecx, edi, edx)
343 ROUND(5, 0, ecx, eax, edx, edi)
344 ROUND(6, 0, eax, ecx, edi, edx)
345 ROUND(7, 0, ecx, eax, edx, edi)
346 ROUND(8, 0, eax, ecx, edi, edx)
347 ROUND(9, 0, ecx, eax, edx, edi)
348 ROUND(10, 0, eax, ecx, edi, edx)
349 ROUND(11, 0, ecx, eax, edx, edi)
350 ROUND(12, 0, eax, ecx, edi, edx)
351 ROUND(13, 0, ecx, eax, edx, edi)
352 ROUND(14, 0, eax, ecx, edi, edx)
353 ROUND(15, 0, ecx, eax, edx, edi)
356 AS2(add WORD_REG(si), 4*16)
357 ROUND(0, 1, eax, ecx, edi, edx)
358 ROUND(1, 1, ecx, eax, edx, edi)
359 ROUND(2, 1, eax, ecx, edi, edx)
360 ROUND(3, 1, ecx, eax, edx, edi)
361 ROUND(4, 1, eax, ecx, edi, edx)
362 ROUND(5, 1, ecx, eax, edx, edi)
363 ROUND(6, 1, eax, ecx, edi, edx)
364 ROUND(7, 1, ecx, eax, edx, edi)
365 ROUND(8, 1, eax, ecx, edi, edx)
366 ROUND(9, 1, ecx, eax, edx, edi)
367 ROUND(10, 1, eax, ecx, edi, edx)
368 ROUND(11, 1, ecx, eax, edx, edi)
369 ROUND(12, 1, eax, ecx, edi, edx)
370 ROUND(13, 1, ecx, eax, edx, edi)
371 ROUND(14, 1, eax, ecx, edi, edx)
372 ROUND(15, 1, ecx, eax, edx, edi)
373 AS2( cmp WORD_REG(si), K_END)
376 AS2( mov WORD_REG(dx), DATA_SAVE)
377 AS2( add WORD_REG(dx), 64)
378 AS2( mov AS_REG_7, STATE_SAVE)
379 AS2( mov DATA_SAVE, WORD_REG(dx))
381 #
if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE
382 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
383 AS2( test DWORD PTR K_END, 1)
386 AS2( movdqa xmm1, XMMWORD_PTR [AS_REG_7+1*16])
387 AS2( movdqa xmm0, XMMWORD_PTR [AS_REG_7+0*16])
388 AS2( paddd xmm1, E(0))
389 AS2( paddd xmm0, A(0))
390 AS2( movdqa [AS_REG_7+1*16], xmm1)
391 AS2( movdqa [AS_REG_7+0*16], xmm0)
392 AS2( cmp WORD_REG(dx), DATA_END)
396 #
if CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32
397 #
if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE
401 AS2( add [AS_REG_7+0*4], ecx)
402 AS2( add [AS_REG_7+4*4], edi)
406 AS2( add [AS_REG_7+1*4], eax)
407 AS2( add [AS_REG_7+2*4], ebx)
408 AS2( add [AS_REG_7+3*4], ecx)
412 AS2( add [AS_REG_7+5*4], eax)
413 AS2( add [AS_REG_7+6*4], ebx)
414 AS2( add [AS_REG_7+7*4], ecx)
415 AS2( mov ecx, AS_REG_7d)
416 AS2( cmp WORD_REG(dx), DATA_END)
418 #
if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE
425 #
if !defined(_MSC_VER) || (_MSC_VER < 1400)
429 #ifdef CRYPTOPP_GENERATE_X64_MASM
430 add rsp, LOCALS_SIZE+8
436 X86_SHA256_HashBlocks ENDP
442 :
"c" (state),
"d" (data),
"S" (SHA256_K+48),
"D" (len)
443 #
if CRYPTOPP_BOOL_X64
446 :
"memory",
"cc",
"%eax" 447 #
if CRYPTOPP_BOOL_X64
448 ,
"%rbx",
"%r8",
"%r10" 454 #endif // (defined(CRYPTOPP_X86_ASM_AVAILABLE) || defined(CRYPTOPP_GENERATE_X64_MASM)) 456 #ifndef CRYPTOPP_GENERATE_X64_MASM 458 #ifdef CRYPTOPP_X64_MASM_AVAILABLE 460 void CRYPTOPP_FASTCALL X86_SHA256_HashBlocks(word32 *state,
const word32 *data,
size_t len);
464 #if defined(CRYPTOPP_X86_ASM_AVAILABLE) || defined(CRYPTOPP_X32_ASM_AVAILABLE) || defined(CRYPTOPP_X64_MASM_AVAILABLE) 466 size_t SHA256::HashMultipleBlocks(
const word32 *input,
size_t length)
468 X86_SHA256_HashBlocks(m_state, input, (length&(
size_t(0)-BLOCKSIZE)) - !HasSSE2());
469 return length % BLOCKSIZE;
472 size_t SHA224::HashMultipleBlocks(
const word32 *input,
size_t length)
474 X86_SHA256_HashBlocks(m_state, input, (length&(
size_t(0)-BLOCKSIZE)) - !HasSSE2());
475 return length % BLOCKSIZE;
480 #define blk2(i) (W[i&15]+=s1(W[(i-2)&15])+W[(i-7)&15]+s0(W[(i-15)&15])) 482 #define Ch(x,y,z) (z^(x&(y^z))) 483 #define Maj(x,y,z) (y^((x^y)&(y^z))) 485 #define a(i) T[(0-i)&7] 486 #define b(i) T[(1-i)&7] 487 #define c(i) T[(2-i)&7] 488 #define d(i) T[(3-i)&7] 489 #define e(i) T[(4-i)&7] 490 #define f(i) T[(5-i)&7] 491 #define g(i) T[(6-i)&7] 492 #define h(i) T[(7-i)&7] 494 #define R(i) h(i)+=S1(e(i))+Ch(e(i),f(i),g(i))+SHA256_K[i+j]+(j?blk2(i):blk0(i));\ 495 d(i)+=h(i);h(i)+=S0(a(i))+Maj(a(i),b(i),c(i)) 498 #define S0(x) (rotrFixed(x,2)^rotrFixed(x,13)^rotrFixed(x,22)) 499 #define S1(x) (rotrFixed(x,6)^rotrFixed(x,11)^rotrFixed(x,25)) 500 #define s0(x) (rotrFixed(x,7)^rotrFixed(x,18)^(x>>3)) 501 #define s1(x) (rotrFixed(x,17)^rotrFixed(x,19)^(x>>10)) 503 void SHA256::Transform(word32 *state,
const word32 *data)
506 #if defined(CRYPTOPP_X86_ASM_AVAILABLE) || defined(CRYPTOPP_X32_ASM_AVAILABLE) || defined(CRYPTOPP_X64_MASM_AVAILABLE) 509 X86_SHA256_HashBlocks(state, W, BLOCKSIZE - !HasSSE2());
513 memcpy(T, state,
sizeof(T));
515 for (
unsigned int j=0; j<64; j+=16)
517 R( 0); R( 1); R( 2); R( 3);
518 R( 4); R( 5); R( 6); R( 7);
519 R( 8); R( 9); R(10); R(11);
520 R(12); R(13); R(14); R(15);
617 void SHA384::InitState(HashWordType *state)
619 static const word64 s[8] = {
620 W64LIT(0xcbbb9d5dc1059ed8), W64LIT(0x629a292a367cd507),
621 W64LIT(0x9159015a3070dd17), W64LIT(0x152fecd8f70e5939),
622 W64LIT(0x67332667ffc00b31), W64LIT(0x8eb44a8768581511),
623 W64LIT(0xdb0c2e0d64f98fa7), W64LIT(0x47b5481dbefa4fa4)};
624 memcpy(state, s,
sizeof(s));
627 void SHA512::InitState(HashWordType *state)
629 static const word64 s[8] = {
630 W64LIT(0x6a09e667f3bcc908), W64LIT(0xbb67ae8584caa73b),
631 W64LIT(0x3c6ef372fe94f82b), W64LIT(0xa54ff53a5f1d36f1),
632 W64LIT(0x510e527fade682d1), W64LIT(0x9b05688c2b3e6c1f),
633 W64LIT(0x1f83d9abfb41bd6b), W64LIT(0x5be0cd19137e2179)};
634 memcpy(state, s,
sizeof(s));
637 #if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE && (CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32) 638 CRYPTOPP_ALIGN_DATA(16) static const word64 SHA512_K[80] CRYPTOPP_SECTION_ALIGN16 = {
640 static const word64 SHA512_K[80] = {
642 W64LIT(0x428a2f98d728ae22), W64LIT(0x7137449123ef65cd),
643 W64LIT(0xb5c0fbcfec4d3b2f), W64LIT(0xe9b5dba58189dbbc),
644 W64LIT(0x3956c25bf348b538), W64LIT(0x59f111f1b605d019),
645 W64LIT(0x923f82a4af194f9b), W64LIT(0xab1c5ed5da6d8118),
646 W64LIT(0xd807aa98a3030242), W64LIT(0x12835b0145706fbe),
647 W64LIT(0x243185be4ee4b28c), W64LIT(0x550c7dc3d5ffb4e2),
648 W64LIT(0x72be5d74f27b896f), W64LIT(0x80deb1fe3b1696b1),
649 W64LIT(0x9bdc06a725c71235), W64LIT(0xc19bf174cf692694),
650 W64LIT(0xe49b69c19ef14ad2), W64LIT(0xefbe4786384f25e3),
651 W64LIT(0x0fc19dc68b8cd5b5), W64LIT(0x240ca1cc77ac9c65),
652 W64LIT(0x2de92c6f592b0275), W64LIT(0x4a7484aa6ea6e483),
653 W64LIT(0x5cb0a9dcbd41fbd4), W64LIT(0x76f988da831153b5),
654 W64LIT(0x983e5152ee66dfab), W64LIT(0xa831c66d2db43210),
655 W64LIT(0xb00327c898fb213f), W64LIT(0xbf597fc7beef0ee4),
656 W64LIT(0xc6e00bf33da88fc2), W64LIT(0xd5a79147930aa725),
657 W64LIT(0x06ca6351e003826f), W64LIT(0x142929670a0e6e70),
658 W64LIT(0x27b70a8546d22ffc), W64LIT(0x2e1b21385c26c926),
659 W64LIT(0x4d2c6dfc5ac42aed), W64LIT(0x53380d139d95b3df),
660 W64LIT(0x650a73548baf63de), W64LIT(0x766a0abb3c77b2a8),
661 W64LIT(0x81c2c92e47edaee6), W64LIT(0x92722c851482353b),
662 W64LIT(0xa2bfe8a14cf10364), W64LIT(0xa81a664bbc423001),
663 W64LIT(0xc24b8b70d0f89791), W64LIT(0xc76c51a30654be30),
664 W64LIT(0xd192e819d6ef5218), W64LIT(0xd69906245565a910),
665 W64LIT(0xf40e35855771202a), W64LIT(0x106aa07032bbd1b8),
666 W64LIT(0x19a4c116b8d2d0c8), W64LIT(0x1e376c085141ab53),
667 W64LIT(0x2748774cdf8eeb99), W64LIT(0x34b0bcb5e19b48a8),
668 W64LIT(0x391c0cb3c5c95a63), W64LIT(0x4ed8aa4ae3418acb),
669 W64LIT(0x5b9cca4f7763e373), W64LIT(0x682e6ff3d6b2b8a3),
670 W64LIT(0x748f82ee5defb2fc), W64LIT(0x78a5636f43172f60),
671 W64LIT(0x84c87814a1f0ab72), W64LIT(0x8cc702081a6439ec),
672 W64LIT(0x90befffa23631e28), W64LIT(0xa4506cebde82bde9),
673 W64LIT(0xbef9a3f7b2c67915), W64LIT(0xc67178f2e372532b),
674 W64LIT(0xca273eceea26619c), W64LIT(0xd186b8c721c0c207),
675 W64LIT(0xeada7dd6cde0eb1e), W64LIT(0xf57d4f7fee6ed178),
676 W64LIT(0x06f067aa72176fba), W64LIT(0x0a637dc5a2c898a6),
677 W64LIT(0x113f9804bef90dae), W64LIT(0x1b710b35131c471b),
678 W64LIT(0x28db77f523047d84), W64LIT(0x32caab7b40c72493),
679 W64LIT(0x3c9ebe0a15c9bebc), W64LIT(0x431d67c49c100d4c),
680 W64LIT(0x4cc5d4becb3e42b6), W64LIT(0x597f299cfc657e2a),
681 W64LIT(0x5fcb6fab3ad6faec), W64LIT(0x6c44198c4a475817)
684 #if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE && (CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32) 686 CRYPTOPP_NAKED
static void CRYPTOPP_FASTCALL SHA512_SSE2_Transform(word64 *state,
const word64 *data)
698 AS2( lea ebx, SHA512_K)
702 AS2( and esp, 0xfffffff0)
707 #
if CRYPTOPP_BOOL_X32
708 AS2( lea edi, [esp+8+8*8])
709 AS2( lea esi, [esp+8+20*8+8])
711 AS2( lea edi, [esp+4+8*8])
712 AS2( lea esi, [esp+4+20*8+8])
715 AS2( movdqa xmm0, [ecx+0*16])
716 AS2( movdq2q mm4, xmm0)
717 AS2( movdqa [edi+0*16], xmm0)
718 AS2( movdqa xmm0, [ecx+1*16])
719 AS2( movdqa [edi+1*16], xmm0)
720 AS2( movdqa xmm0, [ecx+2*16])
721 AS2( movdq2q mm5, xmm0)
722 AS2( movdqa [edi+2*16], xmm0)
723 AS2( movdqa xmm0, [ecx+3*16])
724 AS2( movdqa [edi+3*16], xmm0)
727 #define SSE2_S0_S1(r, a, b, c) \
731 AS2( psllq mm6, 64-c)\
735 AS2( psllq mm6, c-b)\
739 AS2( psllq mm6, b-a)\
742 #define SSE2_s0(r, a, b, c) \
743 AS2( movdqa xmm6, r)\
745 AS2( movdqa xmm7, r)\
746 AS2( psllq xmm6, 64-c)\
747 AS2( pxor xmm7, xmm6)\
752 AS2( psllq xmm6, c-a)\
755 #define SSE2_s1(r, a, b, c) \
756 AS2( movdqa xmm6, r)\
758 AS2( movdqa xmm7, r)\
759 AS2( psllq xmm6, 64-c)\
760 AS2( pxor xmm7, xmm6)\
763 AS2( psllq xmm6, c-b)\
764 AS2( pxor xmm7, xmm6)\
770 AS2( paddq mm0, [edi+7*8])
771 AS2( movq mm2, [edi+5*8])
772 AS2( movq mm3, [edi+6*8])
775 SSE2_S0_S1(mm5,14,18,41)
779 AS2( movq mm2, [edi+1*8])
782 AS2( pand mm2, [edi+2*8])
786 AS2( paddq mm5, [edi+3*8])
787 AS2( movq [edi+3*8], mm5)
788 AS2( movq [edi+11*8], mm5)
789 SSE2_S0_S1(mm4,28,34,39)
791 AS2( movq [edi-8], mm4)
792 AS2( movq [edi+7*8], mm4)
797 AS2( movq mm0, [edx+eax*8])
798 AS2( movq [esi+eax*8], mm0)
799 AS2( movq [esi+eax*8+16*8], mm0)
800 AS2( paddq mm0, [ebx+eax*8])
801 ASC( call, SHA512_Round)
811 AS2( movdqu xmm0, [esi+(16-2)*8])
814 AS2( movdqu xmm3, [esi])
815 AS2( paddq xmm3, [esi+(16-7)*8])
816 AS2( movdqa xmm2, [esi+(16-15)*8])
817 SSE2_s1(xmm0, 6, 19, 61)
818 AS2( paddq xmm0, xmm3)
819 SSE2_s0(xmm2, 1, 7, 8)
820 AS2( paddq xmm0, xmm2)
821 AS2( movdq2q mm0, xmm0)
822 AS2( movhlps xmm1, xmm0)
823 AS2( paddq mm0, [ebx+eax*8])
824 AS2( movlps [esi], xmm0)
825 AS2( movlps [esi+8], xmm1)
826 AS2( movlps [esi+8*16], xmm0)
827 AS2( movlps [esi+8*17], xmm1)
829 ASC( call, SHA512_Round)
831 AS2( movdq2q mm0, xmm1)
832 AS2( paddq mm0, [ebx+eax*8+8])
833 ASC( call, SHA512_Round)
843 #
if CRYPTOPP_BOOL_X32
844 AS2( lea esi, [esp+8+20*8+8+esi*8])
846 AS2( lea esi, [esp+4+20*8+8+esi*8])
852 #define SSE2_CombineState(i) \
853 AS2( movdqa xmm0, [edi+i*16])\
854 AS2( paddq xmm0, [ecx+i*16])\
855 AS2( movdqa [ecx+i*16], xmm0)
865 #
if defined(__GNUC__)
869 :
"a" (SHA512_K),
"c" (state),
"d" (data)
870 :
"%esi",
"%edi",
"memory",
"cc" 879 #endif // #if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE 881 void SHA512::Transform(word64 *state,
const word64 *data)
883 #if CRYPTOPP_BOOL_SSE2_ASM_AVAILABLE && (CRYPTOPP_BOOL_X86 || CRYPTOPP_BOOL_X32) 886 SHA512_SSE2_Transform(state, data);
891 #define S0(x) (rotrFixed(x,28)^rotrFixed(x,34)^rotrFixed(x,39)) 892 #define S1(x) (rotrFixed(x,14)^rotrFixed(x,18)^rotrFixed(x,41)) 893 #define s0(x) (rotrFixed(x,1)^rotrFixed(x,8)^(x>>7)) 894 #define s1(x) (rotrFixed(x,19)^rotrFixed(x,61)^(x>>6)) 896 #define R(i) h(i)+=S1(e(i))+Ch(e(i),f(i),g(i))+SHA512_K[i+j]+(j?blk2(i):blk0(i));\ 897 d(i)+=h(i);h(i)+=S0(a(i))+Maj(a(i),b(i),c(i)) 902 memcpy(T, state,
sizeof(T));
904 for (
unsigned int j=0; j<80; j+=16)
906 R( 0); R( 1); R( 2); R( 3);
907 R( 4); R( 5); R( 6); R( 7);
908 R( 8); R( 9); R(10); R(11);
909 R(12); R(13); R(14); R(15);
924 #endif // #ifndef CRYPTOPP_GENERATE_X64_MASM 925 #endif // #ifndef CRYPTOPP_IMPORTS Utility functions for the Crypto++ library.
Library configuration file.
Classes and functions for secure memory allocations.
Fixed size stack-based SecBlock with 16-byte alignment.
Classes, functions, intrinsics and features for X86, X32 nd X64 assembly.
Classes for SHA-1 and SHA-2 family of message digests.
Crypto++ library namespace.
byte ByteReverse(byte value)
Reverses bytes in a 8-bit value.