Created
March 3, 2011 21:08
-
-
Save anonymous/853566 to your computer and use it in GitHub Desktop.
cpuminer 0.7.1 sha256_4way.c compiled with icc
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -- Machine type EFI2 | |
# mark_description "Intel(R) C Intel(R) 64 Compiler XE for applications running on Intel(R) 64, Version 12.0.1.107 Build 2010111"; | |
# mark_description "6"; | |
# mark_description "-I. -I./compat/jansson -I ../curl/include -fsource-asm -S -DHAVE_CONFIG_H -pthread -fno-st"; | |
# mark_description "rict-aliasing -O2 -msse3 -MT sha256_4way.o -MD -MP -MF .deps/sha256_4way.Tpo -c"; | |
.file "sha256_4way.c" | |
.text | |
..TXTST0: | |
# -- Begin ScanHash_4WaySSE2 | |
# mark_begin; | |
.align 16,0x90 | |
.globl ScanHash_4WaySSE2 | |
ScanHash_4WaySSE2: | |
# parameter 1: %rdi | |
# parameter 2: %rsi | |
# parameter 3: %rdx | |
# parameter 4: %rcx | |
# parameter 5: %r8 | |
# parameter 6: %r9d | |
# parameter 7: 16 + %rbp | |
..B1.1: # Preds ..B1.0 | |
### { | |
..___tag_value_ScanHash_4WaySSE2.1: #107.1 | |
pushq %rbp #107.1 | |
..___tag_value_ScanHash_4WaySSE2.3: # | |
movq %rsp, %rbp #107.1 | |
..___tag_value_ScanHash_4WaySSE2.4: # | |
andq $-128, %rsp #107.1 | |
pushq %r12 #107.1 | |
..___tag_value_ScanHash_4WaySSE2.6: # | |
pushq %r13 #107.1 | |
..___tag_value_ScanHash_4WaySSE2.7: # | |
pushq %r14 #107.1 | |
..___tag_value_ScanHash_4WaySSE2.8: # | |
pushq %r15 #107.1 | |
..___tag_value_ScanHash_4WaySSE2.9: # | |
pushq %rbx #107.1 | |
..___tag_value_ScanHash_4WaySSE2.10: # | |
subq $1240, %rsp #107.1 | |
### unsigned int *nNonce_p = (unsigned int*)(pdata + 12); | |
### unsigned int nonce = 0; | |
xorl %eax, %eax #109.24 | |
movq %rdx, %r14 #109.24 | |
movq 16(%rbp), %r10 #107.1 | |
movl %eax, %ebx #109.24 | |
movq %rcx, 1176(%rsp) #109.24 | |
movq %rsi, %r15 #109.24 | |
movq %r8, 1184(%rsp) #109.24 | |
movq %rdi, %r12 #109.24 | |
movl %r9d, %r13d #109.24 | |
# LOE r12 r14 r15 ebx r13d | |
..B1.2: # Preds ..B1.6 ..B1.1 | |
### | |
### for (;;) | |
### { | |
### unsigned int thash[9][NPAR] __attribute__((aligned(128))); | |
### int j; | |
### | |
### nonce += NPAR; | |
### *nNonce_p = nonce; | |
### | |
### DoubleBlockSHA256(pdata, phash1, pmidstate, thash, pSHA256InitState); | |
movq %r15, %rdi #119.9 | |
movq %r14, %rsi #119.9 | |
movq %r12, %rdx #119.9 | |
lea (%rsp), %rcx #119.9 | |
movl $pSHA256InitState, %r8d #119.9 | |
addl $32, %ebx #116.2 | |
movl %ebx, 12(%r15) #117.3 | |
call DoubleBlockSHA256 #119.9 | |
# LOE r12 r14 r15 ebx r13d | |
..B1.3: # Preds ..B1.2 | |
### | |
### for (j = 0; j < NPAR; j++) | |
xorl %edx, %edx #121.14 | |
movq %r15, 1168(%rsp) #121.14 | |
movq %r14, 1160(%rsp) #121.14 | |
movl %r13d, 1152(%rsp) #121.14 | |
movq %rdx, %r13 #121.14 | |
movq 1176(%rsp), %r15 #121.14 | |
movq 1184(%rsp), %r14 #121.14 | |
# LOE r12 r13 r14 r15 ebx | |
..B1.4: # Preds ..B1.5 ..B1.3 | |
### { | |
### if (unlikely(thash[7][j] == 0)) | |
movl 896(%rsp,%r13,4), %esi #123.17 | |
testl %esi, %esi #123.17 | |
je ..B1.8 # Prob 5% #123.17 | |
# LOE r12 r13 r14 r15 ebx esi | |
..B1.5: # Preds ..B1.4 ..B1.9 | |
incq %r13 #121.31 | |
cmpq $32, %r13 #121.25 | |
jl ..B1.4 # Prob 96% #121.25 | |
# LOE r12 r13 r14 r15 ebx | |
..B1.6: # Preds ..B1.5 | |
movl 1152(%rsp), %r13d # | |
### { | |
### int i; | |
### | |
### for (i = 0; i < 32/4; i++) | |
### ((unsigned int*)phash)[i] = thash[i][j]; | |
### | |
### if (fulltest(phash, ptarget)) { | |
### *nHashesDone = nonce; | |
### *nNonce_p = nonce + j; | |
### return nonce + j; | |
### } | |
### } | |
### } | |
### | |
### if (nonce >= max_nonce) | |
cmpl %r13d, %ebx #138.22 | |
movq 1168(%rsp), %r15 # | |
movq 1160(%rsp), %r14 # | |
jb ..B1.2 # Prob 80% #138.22 | |
# LOE r12 r14 r15 ebx r13d | |
..B1.7: # Preds ..B1.6 | |
movl %ebx, %eax # | |
movq 16(%rbp), %r10 # | |
### { | |
### *nHashesDone = nonce; | |
movq %rax, (%r10) #140.14 | |
### return -1; | |
movl $-1, %eax #141.20 | |
addq $1240, %rsp #141.20 | |
..___tag_value_ScanHash_4WaySSE2.11: #141.20 | |
popq %rbx #141.20 | |
..___tag_value_ScanHash_4WaySSE2.12: #141.20 | |
popq %r15 #141.20 | |
..___tag_value_ScanHash_4WaySSE2.13: #141.20 | |
popq %r14 #141.20 | |
..___tag_value_ScanHash_4WaySSE2.14: #141.20 | |
popq %r13 #141.20 | |
..___tag_value_ScanHash_4WaySSE2.15: #141.20 | |
popq %r12 #141.20 | |
movq %rbp, %rsp #141.20 | |
popq %rbp #141.20 | |
..___tag_value_ScanHash_4WaySSE2.16: # | |
ret #141.20 | |
..___tag_value_ScanHash_4WaySSE2.18: # | |
# LOE | |
..B1.8: # Preds ..B1.4 # Infreq | |
movl (%rsp,%r13,4), %ecx #128.49 | |
movq %r15, %rdi #130.7 | |
movl %ecx, (%r15) #103.14 | |
movl 640(%rsp,%r13,4), %ecx #128.49 | |
movl %ecx, 20(%r15) #103.14 | |
movl 128(%rsp,%r13,4), %r8d #128.49 | |
movl 256(%rsp,%r13,4), %r9d #128.49 | |
movl 384(%rsp,%r13,4), %r10d #128.49 | |
movl 512(%rsp,%r13,4), %r11d #128.49 | |
movl 768(%rsp,%r13,4), %ecx #128.49 | |
movl %esi, 28(%r15) #103.14 | |
movq %r14, %rsi #130.7 | |
movl %r8d, 4(%r15) #103.14 | |
movl %r9d, 8(%r15) #103.14 | |
movl %r10d, 12(%r15) #103.14 | |
movl %r11d, 16(%r15) #103.14 | |
movl %ecx, 24(%r15) #103.14 | |
call fulltest #130.7 | |
# LOE r12 r13 r14 r15 eax ebx | |
..B1.9: # Preds ..B1.8 # Infreq | |
movzbl %al, %eax #130.7 | |
testl %eax, %eax #130.7 | |
je ..B1.5 # Prob 96% #130.7 | |
# LOE r12 r13 r14 r15 ebx | |
..B1.10: # Preds ..B1.9 # Infreq | |
movl %ebx, %eax # | |
movq %r13, %rdx # | |
movq 16(%rbp), %r10 # | |
movq 1168(%rsp), %rsi # | |
movq %rax, (%r10) #131.5 | |
addl %edx, %eax #132.24 | |
movl %eax, 12(%rsi) #132.5 | |
addq $1240, %rsp #133.33 | |
..___tag_value_ScanHash_4WaySSE2.25: #133.33 | |
popq %rbx #133.33 | |
..___tag_value_ScanHash_4WaySSE2.26: #133.33 | |
popq %r15 #133.33 | |
..___tag_value_ScanHash_4WaySSE2.27: #133.33 | |
popq %r14 #133.33 | |
..___tag_value_ScanHash_4WaySSE2.28: #133.33 | |
popq %r13 #133.33 | |
..___tag_value_ScanHash_4WaySSE2.29: #133.33 | |
popq %r12 #133.33 | |
movq %rbp, %rsp #133.33 | |
popq %rbp #133.33 | |
..___tag_value_ScanHash_4WaySSE2.30: # | |
ret #133.33 | |
.align 16,0x90 | |
..___tag_value_ScanHash_4WaySSE2.32: # | |
# LOE | |
# mark_end; | |
.type ScanHash_4WaySSE2,@function | |
.size ScanHash_4WaySSE2,.-ScanHash_4WaySSE2 | |
.data | |
# -- End ScanHash_4WaySSE2 | |
.text | |
# -- Begin DoubleBlockSHA256 | |
# mark_begin; | |
.align 16,0x90 | |
DoubleBlockSHA256: | |
# parameter 1: %rdi | |
# parameter 2: %rsi | |
# parameter 3: %rdx | |
# parameter 4: %rcx | |
# parameter 5: %r8 | |
..B2.1: # Preds ..B2.0 | |
### { | |
..___tag_value_DoubleBlockSHA256.33: #148.1 | |
pushq %r12 #148.1 | |
..___tag_value_DoubleBlockSHA256.35: # | |
pushq %r13 #148.1 | |
..___tag_value_DoubleBlockSHA256.37: # | |
pushq %r14 #148.1 | |
..___tag_value_DoubleBlockSHA256.39: # | |
pushq %r15 #148.1 | |
..___tag_value_DoubleBlockSHA256.41: # | |
pushq %rbx #148.1 | |
..___tag_value_DoubleBlockSHA256.43: # | |
pushq %rbp #148.1 | |
..___tag_value_DoubleBlockSHA256.45: # | |
subq $5256, %rsp #148.1 | |
..___tag_value_DoubleBlockSHA256.47: # | |
### unsigned int* In = (unsigned int*)pin; | |
### unsigned int* Pad = (unsigned int*)pad; | |
### unsigned int* hPre = (unsigned int*)pre; | |
### unsigned int* hInit = (unsigned int*)init; | |
### unsigned int /* i, j, */ k; | |
### | |
### /* vectors used in calculation */ | |
### __m128i w0, w1, w2, w3, w4, w5, w6, w7; | |
### __m128i w8, w9, w10, w11, w12, w13, w14, w15; | |
### __m128i T1; | |
### __m128i a, b, c, d, e, f, g, h; | |
### __m128i nonce, preNonce; | |
### | |
### /* nonce offset for vector */ | |
### __m128i offset = _mm_set_epi32(0x00000003, 0x00000002, 0x00000001, 0x00000000); | |
### | |
### | |
### preNonce = _mm_add_epi32(_mm_set1_epi32(In[3]), offset); | |
### | |
### for(k = 0; k<NPAR; k+=4) { | |
### w0 = _mm_set1_epi32(In[0]); | |
### w1 = _mm_set1_epi32(In[1]); | |
### w2 = _mm_set1_epi32(In[2]); | |
### //w3 = _mm_set1_epi32(In[3]); nonce will be later hacked into the hash | |
### w4 = _mm_set1_epi32(In[4]); | |
movd 16(%rdi), %xmm6 #173.14 | |
movd 12(%rdi), %xmm7 #166.30 | |
### w5 = _mm_set1_epi32(In[5]); | |
movd 20(%rdi), %xmm11 #174.14 | |
pshufd $0, %xmm6, %xmm10 #173.14 | |
### w6 = _mm_set1_epi32(In[6]); | |
movd 24(%rdi), %xmm2 #175.14 | |
movdqa %xmm10, 112(%rsp) #173.14 | |
pshufd $0, %xmm7, %xmm1 #166.30 | |
pshufd $0, %xmm11, %xmm4 #174.14 | |
### w7 = _mm_set1_epi32(In[7]); | |
movd 28(%rdi), %xmm3 #176.14 | |
### w8 = _mm_set1_epi32(In[8]); | |
### w9 = _mm_set1_epi32(In[9]); | |
### w10 = _mm_set1_epi32(In[10]); | |
### w11 = _mm_set1_epi32(In[11]); | |
movd 44(%rdi), %xmm10 #180.15 | |
movdqa %xmm4, 128(%rsp) #174.14 | |
### w12 = _mm_set1_epi32(In[12]); | |
movd 48(%rdi), %xmm4 #181.15 | |
pshufd $0, %xmm2, %xmm9 #175.14 | |
movd 32(%rdi), %xmm0 #177.14 | |
paddd .L_2il0floatpacket.6744(%rip), %xmm1 #166.16 | |
movdqa %xmm9, 144(%rsp) #175.14 | |
pshufd $0, %xmm3, %xmm15 #176.14 | |
pshufd $0, %xmm10, %xmm11 #180.15 | |
movd 4(%rdi), %xmm5 #170.14 | |
### w13 = _mm_set1_epi32(In[13]); | |
movd 52(%rdi), %xmm9 #182.15 | |
movdqa %xmm15, 160(%rsp) #176.14 | |
movdqa %xmm11, 224(%rsp) #180.15 | |
movd (%rdi), %xmm14 #169.14 | |
pshufd $0, %xmm4, %xmm2 #181.15 | |
### w14 = _mm_set1_epi32(In[14]); | |
### w15 = _mm_set1_epi32(In[15]); | |
### | |
### /* hack nonce into lowest byte of w3 */ | |
### nonce = _mm_add_epi32(preNonce, _mm_set1_epi32(k)); | |
### w3 = nonce; | |
### | |
### a = _mm_set1_epi32(hPre[0]); | |
### b = _mm_set1_epi32(hPre[1]); | |
movd 4(%rdx), %xmm11 #191.13 | |
### c = _mm_set1_epi32(hPre[2]); | |
movd 8(%rdx), %xmm4 #192.13 | |
### d = _mm_set1_epi32(hPre[3]); | |
### e = _mm_set1_epi32(hPre[4]); | |
movd 16(%rdx), %xmm15 #194.13 | |
movdqa %xmm1, 80(%rsp) #166.16 | |
pshufd $0, %xmm0, %xmm1 #177.14 | |
pshufd $0, %xmm5, %xmm13 #170.14 | |
movdqa %xmm1, 176(%rsp) #177.14 | |
movd 40(%rdi), %xmm5 #179.15 | |
pshufd $0, %xmm9, %xmm3 #182.15 | |
movd 8(%rdi), %xmm7 #171.14 | |
movd 56(%rdi), %xmm1 #183.15 | |
pshufd $0, %xmm14, %xmm12 #169.14 | |
movd 36(%rdi), %xmm14 #178.14 | |
movdqa %xmm3, 256(%rsp) #182.15 | |
pshufd $0, %xmm11, %xmm3 #191.13 | |
pshufd $0, %xmm4, %xmm11 #192.13 | |
pshufd $0, %xmm15, %xmm4 #194.13 | |
### f = _mm_set1_epi32(hPre[5]); | |
movd 20(%rdx), %xmm0 #195.13 | |
### g = _mm_set1_epi32(hPre[6]); | |
### h = _mm_set1_epi32(hPre[7]); | |
### | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 0, w0); | |
movdqa %xmm4, %xmm15 #199.9 | |
pshufd $0, %xmm5, %xmm6 #179.15 | |
psrld $6, %xmm15 #199.9 | |
pshufd $0, %xmm7, %xmm8 #171.14 | |
pshufd $0, %xmm14, %xmm7 #178.14 | |
movdqa %xmm6, 208(%rsp) #179.15 | |
pshufd $0, %xmm1, %xmm6 #183.15 | |
movd 60(%rdi), %xmm14 #184.15 | |
pshufd $0, %xmm0, %xmm1 #195.13 | |
movdqa %xmm4, %xmm0 #199.9 | |
pshufd $0, %xmm14, %xmm5 #184.15 | |
pslld $26, %xmm0 #199.9 | |
movd (%rdx), %xmm10 #190.13 | |
por %xmm0, %xmm15 #199.9 | |
movd 24(%rdx), %xmm14 #196.13 | |
movdqa %xmm4, %xmm0 #199.9 | |
movdqa %xmm5, 288(%rsp) #184.15 | |
pslld $21, %xmm0 #199.9 | |
pshufd $0, %xmm10, %xmm5 #190.13 | |
pshufd $0, %xmm14, %xmm10 #196.13 | |
movdqa %xmm4, %xmm14 #199.9 | |
psrld $11, %xmm14 #199.9 | |
por %xmm0, %xmm14 #199.9 | |
movdqa %xmm4, %xmm0 #199.9 | |
pxor %xmm14, %xmm15 #199.9 | |
movdqa %xmm4, %xmm14 #199.9 | |
psrld $25, %xmm0 #199.9 | |
pslld $7, %xmm14 #199.9 | |
movdqa %xmm2, 240(%rsp) #181.15 | |
por %xmm14, %xmm0 #199.9 | |
movd 12(%rdx), %xmm2 #193.13 | |
pxor %xmm0, %xmm15 #199.9 | |
pshufd $0, %xmm2, %xmm9 #193.13 | |
movdqa %xmm4, %xmm14 #199.9 | |
movd 28(%rdx), %xmm2 #197.13 | |
movdqa %xmm4, %xmm0 #199.9 | |
pshufd $0, %xmm2, %xmm2 #197.13 | |
pand %xmm1, %xmm14 #199.9 | |
pandn %xmm10, %xmm0 #199.9 | |
movdqa %xmm2, 416(%rsp) #197.13 | |
paddd %xmm15, %xmm2 #199.9 | |
movdqa .L_2il0floatpacket.6745(%rip), %xmm15 #199.9 | |
pxor %xmm0, %xmm14 #199.9 | |
paddd %xmm14, %xmm15 #199.9 | |
movdqa %xmm5, %xmm14 #199.9 | |
paddd %xmm15, %xmm2 #199.9 | |
pslld $30, %xmm14 #199.9 | |
movdqa %xmm12, 16(%rsp) #169.14 | |
paddd %xmm12, %xmm2 #199.9 | |
movdqa %xmm5, %xmm12 #199.9 | |
movdqa %xmm5, %xmm15 #199.9 | |
psrld $2, %xmm12 #199.9 | |
pslld $19, %xmm15 #199.9 | |
por %xmm14, %xmm12 #199.9 | |
movdqa %xmm5, %xmm14 #199.9 | |
psrld $13, %xmm14 #199.9 | |
movdqa %xmm5, %xmm0 #199.9 | |
por %xmm15, %xmm14 #199.9 | |
movdqa %xmm5, %xmm15 #199.9 | |
pxor %xmm14, %xmm12 #199.9 | |
movdqa %xmm5, %xmm14 #199.9 | |
psrld $22, %xmm15 #199.9 | |
pslld $10, %xmm14 #199.9 | |
pand %xmm3, %xmm0 #199.9 | |
por %xmm14, %xmm15 #199.9 | |
movdqa %xmm5, %xmm14 #199.9 | |
pxor %xmm15, %xmm12 #199.9 | |
pand %xmm11, %xmm14 #199.9 | |
movdqa %xmm0, %xmm15 #199.9 | |
pxor %xmm14, %xmm15 #199.9 | |
movdqa %xmm3, %xmm14 #199.9 | |
pand %xmm11, %xmm14 #199.9 | |
movdqa %xmm9, 352(%rsp) #193.13 | |
paddd %xmm2, %xmm9 #199.9 | |
pxor %xmm14, %xmm15 #199.9 | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 1, w1); | |
movdqa %xmm9, %xmm14 #200.9 | |
paddd %xmm15, %xmm12 #199.9 | |
movdqa %xmm9, %xmm15 #200.9 | |
psrld $6, %xmm15 #200.9 | |
pslld $26, %xmm14 #200.9 | |
paddd %xmm12, %xmm2 #199.9 | |
por %xmm14, %xmm15 #200.9 | |
movdqa %xmm9, %xmm14 #200.9 | |
movdqa %xmm9, %xmm12 #200.9 | |
psrld $11, %xmm14 #200.9 | |
pslld $21, %xmm12 #200.9 | |
por %xmm12, %xmm14 #200.9 | |
movdqa %xmm9, %xmm12 #200.9 | |
pxor %xmm14, %xmm15 #200.9 | |
movdqa %xmm9, %xmm14 #200.9 | |
psrld $25, %xmm12 #200.9 | |
pslld $7, %xmm14 #200.9 | |
por %xmm14, %xmm12 #200.9 | |
movdqa %xmm9, %xmm14 #200.9 | |
pxor %xmm12, %xmm15 #200.9 | |
movdqa %xmm9, %xmm12 #200.9 | |
pand %xmm4, %xmm14 #200.9 | |
pandn %xmm1, %xmm12 #200.9 | |
movdqa %xmm10, 400(%rsp) #196.13 | |
paddd %xmm15, %xmm10 #200.9 | |
movdqa .L_2il0floatpacket.6746(%rip), %xmm15 #200.9 | |
pxor %xmm12, %xmm14 #200.9 | |
paddd %xmm14, %xmm15 #200.9 | |
movdqa %xmm2, %xmm14 #200.9 | |
paddd %xmm15, %xmm10 #200.9 | |
movdqa %xmm2, %xmm15 #200.9 | |
movdqa %xmm13, 32(%rsp) #170.14 | |
paddd %xmm13, %xmm10 #200.9 | |
movdqa %xmm2, %xmm13 #200.9 | |
psrld $2, %xmm15 #200.9 | |
pslld $30, %xmm13 #200.9 | |
pslld $19, %xmm14 #200.9 | |
por %xmm13, %xmm15 #200.9 | |
movdqa %xmm2, %xmm13 #200.9 | |
psrld $13, %xmm13 #200.9 | |
movdqa %xmm2, %xmm12 #200.9 | |
por %xmm14, %xmm13 #200.9 | |
movdqa %xmm2, %xmm14 #200.9 | |
pxor %xmm13, %xmm15 #200.9 | |
movdqa %xmm2, %xmm13 #200.9 | |
psrld $22, %xmm14 #200.9 | |
pslld $10, %xmm13 #200.9 | |
pand %xmm5, %xmm12 #200.9 | |
por %xmm13, %xmm14 #200.9 | |
movdqa %xmm2, %xmm13 #200.9 | |
pxor %xmm14, %xmm15 #200.9 | |
pand %xmm3, %xmm13 #200.9 | |
movdqa %xmm12, %xmm14 #200.9 | |
pxor %xmm13, %xmm14 #200.9 | |
pxor %xmm0, %xmm14 #200.9 | |
movdqa %xmm11, 336(%rsp) #192.13 | |
paddd %xmm10, %xmm11 #200.9 | |
paddd %xmm14, %xmm15 #200.9 | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 2, w2); | |
movdqa %xmm11, %xmm14 #201.9 | |
paddd %xmm15, %xmm10 #200.9 | |
movdqa %xmm11, %xmm0 #201.9 | |
movdqa %xmm11, %xmm13 #201.9 | |
movdqa %xmm11, %xmm15 #201.9 | |
psrld $6, %xmm14 #201.9 | |
pslld $26, %xmm0 #201.9 | |
psrld $11, %xmm13 #201.9 | |
pslld $21, %xmm15 #201.9 | |
por %xmm0, %xmm14 #201.9 | |
por %xmm15, %xmm13 #201.9 | |
pxor %xmm13, %xmm14 #201.9 | |
movdqa %xmm11, %xmm0 #201.9 | |
movdqa %xmm11, %xmm13 #201.9 | |
psrld $25, %xmm0 #201.9 | |
pslld $7, %xmm13 #201.9 | |
movdqa %xmm11, %xmm15 #201.9 | |
por %xmm13, %xmm0 #201.9 | |
movdqa %xmm11, %xmm13 #201.9 | |
pand %xmm9, %xmm13 #201.9 | |
pandn %xmm4, %xmm15 #201.9 | |
pxor %xmm0, %xmm14 #201.9 | |
pxor %xmm15, %xmm13 #201.9 | |
movdqa .L_2il0floatpacket.6747(%rip), %xmm0 #201.9 | |
movdqa %xmm10, %xmm15 #201.9 | |
paddd %xmm13, %xmm0 #201.9 | |
movdqa %xmm10, %xmm13 #201.9 | |
movdqa %xmm1, 384(%rsp) #195.13 | |
paddd %xmm14, %xmm1 #201.9 | |
psrld $2, %xmm13 #201.9 | |
pslld $30, %xmm15 #201.9 | |
paddd %xmm0, %xmm1 #201.9 | |
por %xmm15, %xmm13 #201.9 | |
movdqa %xmm10, %xmm0 #201.9 | |
movdqa %xmm10, %xmm15 #201.9 | |
psrld $13, %xmm0 #201.9 | |
pslld $19, %xmm15 #201.9 | |
por %xmm15, %xmm0 #201.9 | |
movdqa %xmm10, %xmm15 #201.9 | |
pxor %xmm0, %xmm13 #201.9 | |
movdqa %xmm10, %xmm0 #201.9 | |
movdqa %xmm10, %xmm14 #201.9 | |
psrld $22, %xmm0 #201.9 | |
pslld $10, %xmm15 #201.9 | |
pand %xmm2, %xmm14 #201.9 | |
por %xmm15, %xmm0 #201.9 | |
movdqa %xmm10, %xmm15 #201.9 | |
movdqa %xmm5, 304(%rsp) #190.13 | |
pand %xmm5, %xmm15 #201.9 | |
movdqa %xmm14, %xmm5 #201.9 | |
paddd %xmm8, %xmm1 #201.9 | |
pxor %xmm15, %xmm5 #201.9 | |
pxor %xmm0, %xmm13 #201.9 | |
pxor %xmm12, %xmm5 #201.9 | |
movdqa %xmm3, 320(%rsp) #191.13 | |
paddd %xmm1, %xmm3 #201.9 | |
paddd %xmm5, %xmm13 #201.9 | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 3, w3); | |
movdqa %xmm3, %xmm12 #202.9 | |
paddd %xmm13, %xmm1 #201.9 | |
movdqa %xmm3, %xmm5 #202.9 | |
movdqa %xmm3, %xmm0 #202.9 | |
movdqa %xmm3, %xmm13 #202.9 | |
psrld $6, %xmm12 #202.9 | |
pslld $26, %xmm5 #202.9 | |
psrld $11, %xmm0 #202.9 | |
pslld $21, %xmm13 #202.9 | |
por %xmm5, %xmm12 #202.9 | |
por %xmm13, %xmm0 #202.9 | |
movdqa %xmm3, %xmm15 #202.9 | |
movdqa %xmm3, %xmm5 #202.9 | |
pxor %xmm0, %xmm12 #202.9 | |
psrld $25, %xmm15 #202.9 | |
pslld $7, %xmm5 #202.9 | |
movdqa %xmm3, %xmm0 #202.9 | |
movdqa %xmm3, 496(%rsp) #201.9 | |
por %xmm5, %xmm15 #202.9 | |
pand %xmm11, %xmm0 #202.9 | |
pandn %xmm9, %xmm3 #202.9 | |
movdqa %xmm11, 464(%rsp) #200.9 | |
pxor %xmm15, %xmm12 #202.9 | |
movdqa .L_2il0floatpacket.6748(%rip), %xmm11 #202.9 | |
pxor %xmm3, %xmm0 #202.9 | |
movdqa %xmm4, 368(%rsp) #194.13 | |
paddd %xmm12, %xmm4 #202.9 | |
paddd %xmm0, %xmm11 #202.9 | |
movdqa %xmm1, %xmm5 #202.9 | |
paddd %xmm11, %xmm4 #202.9 | |
movdqa %xmm1, %xmm11 #202.9 | |
movdqa %xmm9, 432(%rsp) #199.9 | |
pand %xmm10, %xmm11 #202.9 | |
movdqa %xmm10, 480(%rsp) #200.9 | |
movdqa %xmm1, %xmm10 #202.9 | |
movdqa %xmm4, 528(%rsp) #202.9 | |
movdqa %xmm1, %xmm9 #202.9 | |
movdqa %xmm1, %xmm4 #202.9 | |
psrld $2, %xmm10 #202.9 | |
pslld $30, %xmm9 #202.9 | |
psrld $13, %xmm5 #202.9 | |
pslld $19, %xmm4 #202.9 | |
movdqa %xmm1, %xmm0 #202.9 | |
movdqa %xmm1, %xmm3 #202.9 | |
por %xmm9, %xmm10 #202.9 | |
movdqa %xmm1, 512(%rsp) #201.9 | |
por %xmm4, %xmm5 #202.9 | |
psrld $22, %xmm0 #202.9 | |
pslld $10, %xmm3 #202.9 | |
pand %xmm2, %xmm1 #202.9 | |
pxor %xmm5, %xmm10 #202.9 | |
movdqa %xmm11, 544(%rsp) #202.9 | |
por %xmm3, %xmm0 #202.9 | |
pxor %xmm1, %xmm11 #202.9 | |
pxor %xmm0, %xmm10 #202.9 | |
pxor %xmm14, %xmm11 #202.9 | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 4, w4); | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 5, w5); | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 6, w6); | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 7, w7); | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 8, w8); | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 9, w9); | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 10, w10); | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 11, w11); | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 12, w12); | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 13, w13); | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 14, w14); | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 15, w15); | |
### | |
### w0 = add4(SIGMA1_256(w14), w9, SIGMA0_256(w1), w0); | |
movdqa %xmm6, %xmm1 #216.14 | |
paddd %xmm11, %xmm10 #202.9 | |
movdqa %xmm6, %xmm14 #216.14 | |
movdqa %xmm2, 448(%rsp) #199.9 | |
movdqa %xmm6, %xmm2 #216.14 | |
movdqa %xmm10, 560(%rsp) #202.9 | |
movdqa %xmm6, %xmm10 #216.14 | |
psrld $17, %xmm10 #216.14 | |
pslld $15, %xmm2 #216.14 | |
psrld $19, %xmm1 #216.14 | |
pslld $13, %xmm14 #216.14 | |
movdqa 32(%rsp), %xmm5 #216.14 | |
por %xmm2, %xmm10 #216.14 | |
por %xmm14, %xmm1 #216.14 | |
movdqa %xmm5, %xmm14 #216.14 | |
pxor %xmm1, %xmm10 #216.14 | |
movdqa %xmm5, %xmm1 #216.14 | |
movdqa %xmm5, %xmm11 #216.14 | |
movdqa %xmm5, %xmm4 #216.14 | |
psrld $7, %xmm1 #216.14 | |
pslld $25, %xmm14 #216.14 | |
psrld $18, %xmm11 #216.14 | |
pslld $14, %xmm4 #216.14 | |
por %xmm14, %xmm1 #216.14 | |
por %xmm4, %xmm11 #216.14 | |
movdqa %xmm5, %xmm0 #216.14 | |
pxor %xmm11, %xmm1 #216.14 | |
movdqa %xmm6, 272(%rsp) #183.15 | |
psrld $10, %xmm6 #216.14 | |
psrld $3, %xmm0 #216.14 | |
pxor %xmm6, %xmm10 #216.14 | |
pxor %xmm0, %xmm1 #216.14 | |
paddd %xmm7, %xmm10 #216.14 | |
paddd 16(%rsp), %xmm1 #216.14 | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 16, w0); | |
### w1 = add4(SIGMA1_256(w15), w10, SIGMA0_256(w2), w1); | |
movdqa %xmm8, %xmm12 #218.14 | |
movdqa 288(%rsp), %xmm6 #218.14 | |
paddd %xmm1, %xmm10 #216.14 | |
movdqa %xmm6, %xmm1 #218.14 | |
movdqa %xmm6, %xmm2 #218.14 | |
movdqa %xmm6, %xmm3 #218.14 | |
movdqa %xmm6, %xmm9 #218.14 | |
psrld $17, %xmm1 #218.14 | |
pslld $15, %xmm2 #218.14 | |
psrld $19, %xmm3 #218.14 | |
pslld $13, %xmm9 #218.14 | |
por %xmm2, %xmm1 #218.14 | |
por %xmm9, %xmm3 #218.14 | |
pxor %xmm3, %xmm1 #218.14 | |
psrld $10, %xmm6 #218.14 | |
pxor %xmm6, %xmm1 #218.14 | |
movdqa %xmm8, %xmm13 #218.14 | |
movdqa %xmm8, %xmm15 #218.14 | |
movdqa %xmm8, %xmm6 #218.14 | |
psrld $7, %xmm12 #218.14 | |
pslld $25, %xmm13 #218.14 | |
psrld $18, %xmm15 #218.14 | |
pslld $14, %xmm6 #218.14 | |
por %xmm13, %xmm12 #218.14 | |
por %xmm6, %xmm15 #218.14 | |
movdqa %xmm8, 96(%rsp) #171.14 | |
pxor %xmm15, %xmm12 #218.14 | |
psrld $3, %xmm8 #218.14 | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 17, w1); | |
### w2 = add4(SIGMA1_256(w0), w11, SIGMA0_256(w3), w2); | |
movdqa %xmm10, %xmm6 #220.14 | |
movdqa 208(%rsp), %xmm14 #218.14 | |
pxor %xmm8, %xmm12 #218.14 | |
paddd %xmm14, %xmm1 #218.14 | |
paddd %xmm5, %xmm12 #218.14 | |
paddd %xmm12, %xmm1 #218.14 | |
movdqa %xmm10, %xmm5 #220.14 | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 18, w2); | |
### w3 = add4(SIGMA1_256(w1), w12, SIGMA0_256(w4), w3); | |
movdqa %xmm1, %xmm0 #222.14 | |
movdqa %xmm1, %xmm2 #222.14 | |
movdqa %xmm1, %xmm3 #222.14 | |
movdqa %xmm1, %xmm9 #222.14 | |
movdqa %xmm10, %xmm11 #220.14 | |
movdqa %xmm10, %xmm4 #220.14 | |
psrld $17, %xmm0 #222.14 | |
pslld $15, %xmm2 #222.14 | |
psrld $19, %xmm3 #222.14 | |
pslld $13, %xmm9 #222.14 | |
psrld $17, %xmm6 #220.14 | |
pslld $15, %xmm5 #220.14 | |
psrld $19, %xmm11 #220.14 | |
pslld $13, %xmm4 #220.14 | |
por %xmm2, %xmm0 #222.14 | |
por %xmm9, %xmm3 #222.14 | |
por %xmm5, %xmm6 #220.14 | |
por %xmm4, %xmm11 #220.14 | |
movdqa %xmm1, 592(%rsp) #218.14 | |
pxor %xmm3, %xmm0 #222.14 | |
psrld $10, %xmm1 #222.14 | |
pxor %xmm11, %xmm6 #220.14 | |
movdqa %xmm10, 576(%rsp) #216.14 | |
psrld $10, %xmm10 #220.14 | |
pxor %xmm1, %xmm0 #222.14 | |
pxor %xmm10, %xmm6 #220.14 | |
movdqa 240(%rsp), %xmm1 #222.14 | |
movdqa 224(%rsp), %xmm5 #220.14 | |
paddd %xmm1, %xmm0 #222.14 | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 19, w3); | |
### w4 = add4(SIGMA1_256(w2), w13, SIGMA0_256(w5), w4); | |
movdqa 128(%rsp), %xmm11 #224.14 | |
paddd %xmm5, %xmm6 #220.14 | |
movdqa 112(%rsp), %xmm4 #222.14 | |
movdqa %xmm11, %xmm2 #224.14 | |
movdqa %xmm0, 624(%rsp) #222.14 | |
movdqa %xmm11, %xmm0 #224.14 | |
movdqa %xmm11, %xmm3 #224.14 | |
movdqa %xmm11, %xmm9 #224.14 | |
movdqa %xmm6, 608(%rsp) #220.14 | |
movdqa %xmm4, %xmm13 #222.14 | |
movdqa %xmm4, %xmm15 #222.14 | |
movdqa %xmm4, %xmm8 #222.14 | |
movdqa %xmm4, %xmm6 #222.14 | |
psrld $7, %xmm0 #224.14 | |
pslld $25, %xmm2 #224.14 | |
psrld $18, %xmm3 #224.14 | |
pslld $14, %xmm9 #224.14 | |
psrld $7, %xmm13 #222.14 | |
pslld $25, %xmm15 #222.14 | |
psrld $18, %xmm8 #222.14 | |
pslld $14, %xmm6 #222.14 | |
por %xmm2, %xmm0 #224.14 | |
por %xmm9, %xmm3 #224.14 | |
movdqa %xmm11, %xmm12 #224.14 | |
por %xmm15, %xmm13 #222.14 | |
por %xmm6, %xmm8 #222.14 | |
movdqa %xmm4, %xmm10 #222.14 | |
pxor %xmm3, %xmm0 #224.14 | |
psrld $3, %xmm12 #224.14 | |
pxor %xmm8, %xmm13 #222.14 | |
psrld $3, %xmm10 #222.14 | |
pxor %xmm12, %xmm0 #224.14 | |
pxor %xmm10, %xmm13 #222.14 | |
paddd %xmm4, %xmm0 #224.14 | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 20, w4); | |
### w5 = add4(SIGMA1_256(w3), w14, SIGMA0_256(w6), w5); | |
movdqa 144(%rsp), %xmm10 #226.14 | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 21, w5); | |
### w6 = add4(SIGMA1_256(w4), w15, SIGMA0_256(w7), w6); | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 22, w6); | |
### w7 = add4(SIGMA1_256(w5), w0, SIGMA0_256(w8), w7); | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 23, w7); | |
### w8 = add4(SIGMA1_256(w6), w1, SIGMA0_256(w9), w8); | |
movdqa %xmm7, %xmm15 #232.14 | |
movdqa %xmm0, 656(%rsp) #224.14 | |
movdqa %xmm10, %xmm4 #226.14 | |
movdqa %xmm10, %xmm8 #226.14 | |
movdqa %xmm10, %xmm6 #226.14 | |
movdqa %xmm10, %xmm0 #226.14 | |
psrld $7, %xmm4 #226.14 | |
pslld $25, %xmm8 #226.14 | |
psrld $18, %xmm6 #226.14 | |
pslld $14, %xmm0 #226.14 | |
por %xmm8, %xmm4 #226.14 | |
por %xmm0, %xmm6 #226.14 | |
movdqa %xmm10, %xmm2 #226.14 | |
pxor %xmm6, %xmm4 #226.14 | |
psrld $3, %xmm2 #226.14 | |
movdqa 160(%rsp), %xmm6 #228.14 | |
pxor %xmm2, %xmm4 #226.14 | |
paddd %xmm11, %xmm4 #226.14 | |
movdqa %xmm6, %xmm11 #228.14 | |
movdqa %xmm6, %xmm8 #228.14 | |
movdqa %xmm6, %xmm0 #228.14 | |
movdqa %xmm6, %xmm2 #228.14 | |
psrld $7, %xmm11 #228.14 | |
pslld $25, %xmm8 #228.14 | |
psrld $18, %xmm0 #228.14 | |
pslld $14, %xmm2 #228.14 | |
por %xmm8, %xmm11 #228.14 | |
por %xmm2, %xmm0 #228.14 | |
movdqa %xmm6, %xmm3 #228.14 | |
pxor %xmm0, %xmm11 #228.14 | |
psrld $3, %xmm3 #228.14 | |
movdqa 176(%rsp), %xmm8 #230.14 | |
pxor %xmm3, %xmm11 #228.14 | |
movdqa %xmm4, 672(%rsp) #226.14 | |
paddd %xmm10, %xmm11 #228.14 | |
movdqa %xmm8, %xmm4 #230.14 | |
movdqa %xmm8, %xmm9 #230.14 | |
movdqa %xmm8, %xmm10 #230.14 | |
movdqa %xmm8, %xmm12 #230.14 | |
psrld $7, %xmm4 #230.14 | |
pslld $25, %xmm9 #230.14 | |
psrld $18, %xmm10 #230.14 | |
pslld $14, %xmm12 #230.14 | |
movdqa %xmm13, 640(%rsp) #222.14 | |
por %xmm9, %xmm4 #230.14 | |
por %xmm12, %xmm10 #230.14 | |
movdqa %xmm8, %xmm13 #230.14 | |
pxor %xmm10, %xmm4 #230.14 | |
psrld $3, %xmm13 #230.14 | |
pxor %xmm13, %xmm4 #230.14 | |
movdqa %xmm7, %xmm0 #232.14 | |
paddd %xmm6, %xmm4 #230.14 | |
movdqa %xmm7, %xmm6 #232.14 | |
movdqa %xmm7, %xmm2 #232.14 | |
psrld $7, %xmm6 #232.14 | |
pslld $25, %xmm15 #232.14 | |
psrld $18, %xmm0 #232.14 | |
pslld $14, %xmm2 #232.14 | |
por %xmm15, %xmm6 #232.14 | |
por %xmm2, %xmm0 #232.14 | |
movdqa %xmm7, %xmm3 #232.14 | |
pxor %xmm0, %xmm6 #232.14 | |
psrld $3, %xmm3 #232.14 | |
pxor %xmm3, %xmm6 #232.14 | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 24, w8); | |
### w9 = add4(SIGMA1_256(w7), w2, SIGMA0_256(w10), w9); | |
movdqa %xmm14, %xmm9 #234.14 | |
movdqa %xmm4, 704(%rsp) #230.14 | |
paddd %xmm8, %xmm6 #232.14 | |
movdqa %xmm14, %xmm4 #234.14 | |
movdqa %xmm14, %xmm8 #234.14 | |
movdqa %xmm14, %xmm10 #234.14 | |
psrld $7, %xmm4 #234.14 | |
pslld $25, %xmm8 #234.14 | |
psrld $18, %xmm9 #234.14 | |
pslld $14, %xmm10 #234.14 | |
por %xmm8, %xmm4 #234.14 | |
movdqa %xmm11, 688(%rsp) #228.14 | |
por %xmm10, %xmm9 #234.14 | |
movdqa %xmm14, %xmm11 #234.14 | |
pxor %xmm9, %xmm4 #234.14 | |
psrld $3, %xmm11 #234.14 | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 25, w9); | |
### w10 = add4(SIGMA1_256(w8), w3, SIGMA0_256(w11), w10); | |
movdqa %xmm5, %xmm0 #236.15 | |
pxor %xmm11, %xmm4 #234.14 | |
movdqa %xmm5, %xmm2 #236.15 | |
movdqa %xmm7, 192(%rsp) #178.14 | |
paddd %xmm7, %xmm4 #234.14 | |
movdqa %xmm5, %xmm7 #236.15 | |
movdqa %xmm5, %xmm3 #236.15 | |
psrld $7, %xmm7 #236.15 | |
pslld $25, %xmm0 #236.15 | |
psrld $18, %xmm2 #236.15 | |
pslld $14, %xmm3 #236.15 | |
por %xmm0, %xmm7 #236.15 | |
por %xmm3, %xmm2 #236.15 | |
pxor %xmm2, %xmm7 #236.15 | |
psrld $3, %xmm5 #236.15 | |
pxor %xmm5, %xmm7 #236.15 | |
paddd %xmm14, %xmm7 #236.15 | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 26, w10); | |
### w11 = add4(SIGMA1_256(w9), w4, SIGMA0_256(w12), w11); | |
movdqa %xmm1, %xmm14 #238.15 | |
psrld $7, %xmm14 #238.15 | |
pslld $25, %xmm1 #238.15 | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 27, w11); | |
### w12 = add4(SIGMA1_256(w10), w5, SIGMA0_256(w13), w12); | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 28, w12); | |
### w13 = add4(SIGMA1_256(w11), w6, SIGMA0_256(w14), w13); | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 29, w13); | |
### w14 = add4(SIGMA1_256(w12), w7, SIGMA0_256(w15), w14); | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 30, w14); | |
### w15 = add4(SIGMA1_256(w13), w8, SIGMA0_256(w0), w15); | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 31, w15); | |
### | |
### w0 = add4(SIGMA1_256(w14), w9, SIGMA0_256(w1), w0); | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 32, w0); | |
### w1 = add4(SIGMA1_256(w15), w10, SIGMA0_256(w2), w1); | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 33, w1); | |
### w2 = add4(SIGMA1_256(w0), w11, SIGMA0_256(w3), w2); | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 34, w2); | |
### w3 = add4(SIGMA1_256(w1), w12, SIGMA0_256(w4), w3); | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 35, w3); | |
### w4 = add4(SIGMA1_256(w2), w13, SIGMA0_256(w5), w4); | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 36, w4); | |
### w5 = add4(SIGMA1_256(w3), w14, SIGMA0_256(w6), w5); | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 37, w5); | |
### w6 = add4(SIGMA1_256(w4), w15, SIGMA0_256(w7), w6); | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 38, w6); | |
### w7 = add4(SIGMA1_256(w5), w0, SIGMA0_256(w8), w7); | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 39, w7); | |
### w8 = add4(SIGMA1_256(w6), w1, SIGMA0_256(w9), w8); | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 40, w8); | |
### w9 = add4(SIGMA1_256(w7), w2, SIGMA0_256(w10), w9); | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 41, w9); | |
### w10 = add4(SIGMA1_256(w8), w3, SIGMA0_256(w11), w10); | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 42, w10); | |
### w11 = add4(SIGMA1_256(w9), w4, SIGMA0_256(w12), w11); | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 43, w11); | |
### w12 = add4(SIGMA1_256(w10), w5, SIGMA0_256(w13), w12); | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 44, w12); | |
### w13 = add4(SIGMA1_256(w11), w6, SIGMA0_256(w14), w13); | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 45, w13); | |
### w14 = add4(SIGMA1_256(w12), w7, SIGMA0_256(w15), w14); | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 46, w14); | |
### w15 = add4(SIGMA1_256(w13), w8, SIGMA0_256(w0), w15); | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 47, w15); | |
### | |
### w0 = add4(SIGMA1_256(w14), w9, SIGMA0_256(w1), w0); | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 48, w0); | |
### w1 = add4(SIGMA1_256(w15), w10, SIGMA0_256(w2), w1); | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 49, w1); | |
### w2 = add4(SIGMA1_256(w0), w11, SIGMA0_256(w3), w2); | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 50, w2); | |
### w3 = add4(SIGMA1_256(w1), w12, SIGMA0_256(w4), w3); | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 51, w3); | |
### w4 = add4(SIGMA1_256(w2), w13, SIGMA0_256(w5), w4); | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 52, w4); | |
### w5 = add4(SIGMA1_256(w3), w14, SIGMA0_256(w6), w5); | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 53, w5); | |
### w6 = add4(SIGMA1_256(w4), w15, SIGMA0_256(w7), w6); | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 54, w6); | |
### w7 = add4(SIGMA1_256(w5), w0, SIGMA0_256(w8), w7); | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 55, w7); | |
### w8 = add4(SIGMA1_256(w6), w1, SIGMA0_256(w9), w8); | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 56, w8); | |
### w9 = add4(SIGMA1_256(w7), w2, SIGMA0_256(w10), w9); | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 57, w9); | |
### w10 = add4(SIGMA1_256(w8), w3, SIGMA0_256(w11), w10); | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 58, w10); | |
### w11 = add4(SIGMA1_256(w9), w4, SIGMA0_256(w12), w11); | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 59, w11); | |
### w12 = add4(SIGMA1_256(w10), w5, SIGMA0_256(w13), w12); | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 60, w12); | |
### w13 = add4(SIGMA1_256(w11), w6, SIGMA0_256(w14), w13); | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 61, w13); | |
### w14 = add4(SIGMA1_256(w12), w7, SIGMA0_256(w15), w14); | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 62, w14); | |
### w15 = add4(SIGMA1_256(w13), w8, SIGMA0_256(w0), w15); | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 63, w15); | |
### | |
### #define store_load(x, i, dest) \ | |
### T1 = _mm_set1_epi32((hPre)[i]); \ | |
### dest = _mm_add_epi32(T1, x); | |
### | |
### store_load(a, 0, w0); | |
### store_load(b, 1, w1); | |
### store_load(c, 2, w2); | |
### store_load(d, 3, w3); | |
### store_load(e, 4, w4); | |
### store_load(f, 5, w5); | |
### store_load(g, 6, w6); | |
### store_load(h, 7, w7); | |
### | |
### w8 = _mm_set1_epi32(Pad[8]); | |
### w9 = _mm_set1_epi32(Pad[9]); | |
### w10 = _mm_set1_epi32(Pad[10]); | |
### w11 = _mm_set1_epi32(Pad[11]); | |
### w12 = _mm_set1_epi32(Pad[12]); | |
### w13 = _mm_set1_epi32(Pad[13]); | |
### w14 = _mm_set1_epi32(Pad[14]); | |
### w15 = _mm_set1_epi32(Pad[15]); | |
### | |
### a = _mm_set1_epi32(hInit[0]); | |
movl (%r8), %eax #337.28 | |
por %xmm1, %xmm14 #238.15 | |
movq %r8, 48(%rsp) #148.1 | |
movq %rcx, 5232(%rsp) #148.1 | |
movq %rsi, 56(%rsp) #148.1 | |
movq $0, 5240(%rsp) #168.9 | |
movl %eax, 64(%rsp) #337.28 | |
movl $0, 5248(%rsp) #187.34 | |
movdqa %xmm6, 720(%rsp) #232.14 | |
movdqa %xmm4, 736(%rsp) #234.14 | |
movdqa %xmm7, 752(%rsp) #236.15 | |
movdqa %xmm14, (%rsp) #238.15 | |
# LOE | |
..B2.6: # Preds ..B2.1 | |
movdqa 240(%rsp), %xmm10 #238.15 | |
movdqa 256(%rsp), %xmm11 #240.15 | |
movdqa %xmm10, %xmm8 #238.15 | |
movdqa %xmm10, %xmm7 #238.15 | |
movdqa %xmm11, %xmm13 #240.15 | |
movdqa %xmm11, %xmm14 #240.15 | |
movdqa %xmm11, %xmm15 #240.15 | |
movdqa %xmm11, %xmm9 #240.15 | |
psrld $18, %xmm8 #238.15 | |
pslld $14, %xmm7 #238.15 | |
psrld $7, %xmm13 #240.15 | |
pslld $25, %xmm14 #240.15 | |
psrld $18, %xmm15 #240.15 | |
pslld $14, %xmm9 #240.15 | |
por %xmm7, %xmm8 #238.15 | |
movdqa (%rsp), %xmm6 #238.15 | |
movdqa %xmm10, %xmm5 #238.15 | |
por %xmm14, %xmm13 #240.15 | |
por %xmm9, %xmm15 #240.15 | |
movdqa %xmm11, %xmm12 #240.15 | |
pxor %xmm8, %xmm6 #238.15 | |
psrld $3, %xmm5 #238.15 | |
pxor %xmm15, %xmm13 #240.15 | |
psrld $3, %xmm12 #240.15 | |
pxor %xmm5, %xmm6 #238.15 | |
movdqa 224(%rsp), %xmm4 #238.15 | |
pxor %xmm12, %xmm13 #240.15 | |
movdqa 272(%rsp), %xmm12 #242.15 | |
paddd %xmm6, %xmm4 #238.15 | |
movdqa %xmm12, %xmm8 #242.15 | |
movdqa %xmm12, %xmm7 #242.15 | |
movdqa %xmm12, %xmm3 #242.15 | |
movdqa %xmm12, %xmm6 #242.15 | |
psrld $7, %xmm8 #242.15 | |
pslld $25, %xmm7 #242.15 | |
psrld $18, %xmm3 #242.15 | |
pslld $14, %xmm6 #242.15 | |
movdqa 288(%rsp), %xmm14 #244.15 | |
por %xmm7, %xmm8 #242.15 | |
por %xmm6, %xmm3 #242.15 | |
movdqa %xmm12, %xmm2 #242.15 | |
paddd %xmm13, %xmm10 #240.15 | |
movdqa %xmm14, %xmm1 #244.15 | |
movdqa %xmm14, %xmm13 #244.15 | |
pxor %xmm3, %xmm8 #242.15 | |
psrld $3, %xmm2 #242.15 | |
psrld $18, %xmm1 #244.15 | |
pslld $14, %xmm13 #244.15 | |
pxor %xmm2, %xmm8 #242.15 | |
por %xmm13, %xmm1 #244.15 | |
paddd %xmm8, %xmm11 #242.15 | |
movdqa 576(%rsp), %xmm13 #246.15 | |
movdqa %xmm14, %xmm5 #244.15 | |
movdqa %xmm10, 32(%rsp) #240.15 | |
movdqa %xmm13, %xmm9 #246.15 | |
movdqa %xmm11, 768(%rsp) #242.15 | |
movdqa %xmm13, %xmm11 #246.15 | |
movdqa %xmm13, %xmm10 #246.15 | |
movdqa %xmm13, %xmm8 #246.15 | |
psrld $7, %xmm9 #246.15 | |
pslld $25, %xmm11 #246.15 | |
psrld $18, %xmm10 #246.15 | |
pslld $14, %xmm8 #246.15 | |
por %xmm11, %xmm9 #246.15 | |
por %xmm8, %xmm10 #246.15 | |
movdqa %xmm13, %xmm7 #246.15 | |
pxor %xmm10, %xmm9 #246.15 | |
psrld $3, %xmm7 #246.15 | |
movdqa %xmm14, %xmm0 #244.15 | |
pxor %xmm7, %xmm9 #246.15 | |
movdqa %xmm14, %xmm15 #244.15 | |
paddd %xmm9, %xmm14 #246.15 | |
psrld $7, %xmm5 #244.15 | |
movdqa %xmm14, 800(%rsp) #246.15 | |
pslld $25, %xmm0 #244.15 | |
movdqa 592(%rsp), %xmm14 #249.14 | |
por %xmm0, %xmm5 #244.15 | |
movdqa %xmm4, 16(%rsp) #238.15 | |
movdqa %xmm14, %xmm3 #249.14 | |
movdqa %xmm14, %xmm6 #249.14 | |
movdqa %xmm14, %xmm2 #249.14 | |
movdqa %xmm14, %xmm4 #249.14 | |
pxor %xmm1, %xmm5 #244.15 | |
movq 56(%rsp), %rax #328.14 | |
psrld $3, %xmm15 #244.15 | |
psrld $7, %xmm3 #249.14 | |
pslld $25, %xmm6 #249.14 | |
psrld $18, %xmm2 #249.14 | |
pslld $14, %xmm4 #249.14 | |
### b = _mm_set1_epi32(hInit[1]); | |
movq 48(%rsp), %rdx #338.13 | |
pxor %xmm15, %xmm5 #244.15 | |
por %xmm6, %xmm3 #249.14 | |
por %xmm4, %xmm2 #249.14 | |
paddd %xmm5, %xmm12 #244.15 | |
pxor %xmm2, %xmm3 #249.14 | |
movd 52(%rax), %xmm5 #333.15 | |
psrld $3, %xmm14 #249.14 | |
pshufd $0, %xmm5, %xmm4 #333.15 | |
pxor %xmm14, %xmm3 #249.14 | |
movdqa %xmm4, 912(%rsp) #333.15 | |
paddd %xmm3, %xmm13 #249.14 | |
### c = _mm_set1_epi32(hInit[2]); | |
### d = _mm_set1_epi32(hInit[3]); | |
movd 12(%rdx), %xmm2 #340.13 | |
### e = _mm_set1_epi32(hInit[4]); | |
movd 16(%rdx), %xmm4 #341.13 | |
movd 56(%rax), %xmm0 #334.15 | |
pshufd $0, %xmm2, %xmm5 #340.13 | |
movd 32(%rax), %xmm15 #328.14 | |
movd 60(%rax), %xmm1 #335.15 | |
movdqa %xmm13, 816(%rsp) #249.14 | |
pshufd $0, %xmm0, %xmm13 #334.15 | |
movdqa %xmm5, 1008(%rsp) #340.13 | |
pshufd $0, %xmm4, %xmm5 #341.13 | |
### f = _mm_set1_epi32(hInit[5]); | |
movd 20(%rdx), %xmm0 #342.13 | |
movdqa %xmm12, 784(%rsp) #244.15 | |
movd 48(%rax), %xmm7 #332.15 | |
movd 36(%rax), %xmm9 #329.14 | |
movd 40(%rax), %xmm12 #330.15 | |
movd 44(%rax), %xmm8 #331.15 | |
pshufd $0, %xmm15, %xmm10 #328.14 | |
pshufd $0, %xmm1, %xmm14 #335.15 | |
### g = _mm_set1_epi32(hInit[6]); | |
### h = _mm_set1_epi32(hInit[7]); | |
### | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 0, w0); | |
movdqa %xmm5, %xmm1 #346.9 | |
movd 64(%rsp), %xmm15 #337.13 | |
psrld $6, %xmm1 #346.9 | |
pshufd $0, %xmm0, %xmm4 #342.13 | |
movdqa %xmm5, %xmm0 #346.9 | |
pshufd $0, %xmm7, %xmm6 #332.15 | |
pslld $26, %xmm0 #346.9 | |
pshufd $0, %xmm9, %xmm11 #329.14 | |
por %xmm0, %xmm1 #346.9 | |
pshufd $0, %xmm12, %xmm9 #330.15 | |
movdqa %xmm5, %xmm0 #346.9 | |
pshufd $0, %xmm8, %xmm12 #331.15 | |
psrld $11, %xmm0 #346.9 | |
movdqa %xmm6, 896(%rsp) #332.15 | |
pshufd $0, %xmm15, %xmm6 #337.13 | |
movd 4(%rdx), %xmm8 #338.13 | |
movd 8(%rdx), %xmm3 #339.13 | |
movd 24(%rdx), %xmm15 #343.13 | |
pshufd $0, %xmm8, %xmm7 #338.13 | |
pshufd $0, %xmm3, %xmm8 #339.13 | |
pshufd $0, %xmm15, %xmm3 #343.13 | |
movdqa %xmm5, %xmm15 #346.9 | |
pslld $21, %xmm15 #346.9 | |
por %xmm15, %xmm0 #346.9 | |
movdqa %xmm5, %xmm15 #346.9 | |
pxor %xmm0, %xmm1 #346.9 | |
movdqa %xmm5, %xmm0 #346.9 | |
psrld $25, %xmm0 #346.9 | |
pslld $7, %xmm15 #346.9 | |
por %xmm15, %xmm0 #346.9 | |
movdqa %xmm5, %xmm15 #346.9 | |
movd 28(%rdx), %xmm2 #344.13 | |
pand %xmm4, %xmm15 #346.9 | |
movdqa %xmm5, 1024(%rsp) #341.13 | |
pandn %xmm3, %xmm5 #346.9 | |
pshufd $0, %xmm2, %xmm2 #344.13 | |
pxor %xmm0, %xmm1 #346.9 | |
movdqa %xmm3, 1056(%rsp) #343.13 | |
pxor %xmm5, %xmm15 #346.9 | |
movdqa .L_2il0floatpacket.6745(%rip), %xmm3 #346.9 | |
movdqa %xmm6, %xmm0 #346.9 | |
movdqa %xmm2, 1072(%rsp) #344.13 | |
paddd %xmm1, %xmm2 #346.9 | |
movdqa %xmm6, %xmm1 #346.9 | |
paddd %xmm15, %xmm3 #346.9 | |
movdqa %xmm4, 1040(%rsp) #342.13 | |
psrld $2, %xmm0 #346.9 | |
pslld $30, %xmm1 #346.9 | |
movdqa %xmm6, %xmm4 #346.9 | |
movdqa %xmm6, %xmm15 #346.9 | |
paddd %xmm3, %xmm2 #346.9 | |
movdqa %xmm2, 1088(%rsp) #346.9 | |
por %xmm1, %xmm0 #346.9 | |
psrld $13, %xmm4 #346.9 | |
pslld $19, %xmm15 #346.9 | |
movdqa %xmm6, %xmm1 #346.9 | |
movdqa %xmm6, %xmm2 #346.9 | |
por %xmm15, %xmm4 #346.9 | |
psrld $22, %xmm1 #346.9 | |
pslld $10, %xmm2 #346.9 | |
pxor %xmm4, %xmm0 #346.9 | |
por %xmm2, %xmm1 #346.9 | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 1, w1); | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 2, w2); | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 3, w3); | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 4, w4); | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 5, w5); | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 6, w6); | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 7, w7); | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 8, w8); | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 9, w9); | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 10, w10); | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 11, w11); | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 12, w12); | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 13, w13); | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 14, w14); | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 15, w15); | |
### | |
### w0 = add4(SIGMA1_256(w14), w9, SIGMA0_256(w1), w0); | |
movdqa %xmm13, %xmm15 #363.14 | |
pxor %xmm1, %xmm0 #346.9 | |
movdqa %xmm13, %xmm1 #363.14 | |
movdqa %xmm13, %xmm2 #363.14 | |
movdqa %xmm13, %xmm4 #363.14 | |
movdqa %xmm6, %xmm3 #346.9 | |
psrld $17, %xmm15 #363.14 | |
pslld $15, %xmm1 #363.14 | |
psrld $19, %xmm2 #363.14 | |
pslld $13, %xmm4 #363.14 | |
pand %xmm7, %xmm3 #346.9 | |
movdqa %xmm6, 960(%rsp) #337.13 | |
pand %xmm8, %xmm6 #346.9 | |
por %xmm1, %xmm15 #363.14 | |
por %xmm4, %xmm2 #363.14 | |
movdqa %xmm13, %xmm5 #363.14 | |
pxor %xmm2, %xmm15 #363.14 | |
movdqa %xmm7, 976(%rsp) #338.13 | |
pand %xmm8, %xmm7 #346.9 | |
movdqa %xmm3, 1104(%rsp) #346.9 | |
pxor %xmm6, %xmm3 #346.9 | |
psrld $10, %xmm5 #363.14 | |
pxor %xmm7, %xmm3 #346.9 | |
pxor %xmm5, %xmm15 #363.14 | |
paddd %xmm3, %xmm0 #346.9 | |
paddd %xmm11, %xmm15 #363.14 | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 16, w0); | |
### w1 = add4(SIGMA1_256(w15), w10, SIGMA0_256(w2), w1); | |
movdqa %xmm14, %xmm6 #365.14 | |
movdqa %xmm8, 992(%rsp) #339.13 | |
movdqa %xmm14, %xmm8 #365.14 | |
movdqa %xmm0, 1120(%rsp) #346.9 | |
movdqa %xmm14, %xmm0 #365.14 | |
movdqa %xmm15, 1136(%rsp) #363.14 | |
movdqa %xmm14, %xmm15 #365.14 | |
psrld $17, %xmm6 #365.14 | |
pslld $15, %xmm8 #365.14 | |
psrld $19, %xmm15 #365.14 | |
pslld $13, %xmm0 #365.14 | |
por %xmm8, %xmm6 #365.14 | |
por %xmm0, %xmm15 #365.14 | |
movdqa %xmm14, %xmm1 #365.14 | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 17, w1); | |
### w2 = add4(SIGMA1_256(w0), w11, SIGMA0_256(w3), w2); | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 18, w2); | |
### w3 = add4(SIGMA1_256(w1), w12, SIGMA0_256(w4), w3); | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 19, w3); | |
### w4 = add4(SIGMA1_256(w2), w13, SIGMA0_256(w5), w4); | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 20, w4); | |
### w5 = add4(SIGMA1_256(w3), w14, SIGMA0_256(w6), w5); | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 21, w5); | |
### w6 = add4(SIGMA1_256(w4), w15, SIGMA0_256(w7), w6); | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 22, w6); | |
### w7 = add4(SIGMA1_256(w5), w0, SIGMA0_256(w8), w7); | |
movdqa %xmm10, %xmm2 #377.14 | |
movdqa %xmm10, %xmm3 #377.14 | |
pxor %xmm15, %xmm6 #365.14 | |
psrld $10, %xmm1 #365.14 | |
psrld $7, %xmm2 #377.14 | |
pslld $25, %xmm3 #377.14 | |
movdqa %xmm10, %xmm4 #377.14 | |
movdqa %xmm10, %xmm5 #377.14 | |
pxor %xmm1, %xmm6 #365.14 | |
por %xmm3, %xmm2 #377.14 | |
psrld $18, %xmm4 #377.14 | |
pslld $14, %xmm5 #377.14 | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 23, w7); | |
### w8 = add4(SIGMA1_256(w6), w1, SIGMA0_256(w9), w8); | |
movdqa %xmm11, %xmm15 #379.14 | |
movdqa %xmm11, %xmm0 #379.14 | |
movdqa %xmm11, %xmm1 #379.14 | |
movdqa %xmm11, %xmm3 #379.14 | |
por %xmm5, %xmm4 #377.14 | |
psrld $7, %xmm15 #379.14 | |
pslld $25, %xmm0 #379.14 | |
psrld $18, %xmm1 #379.14 | |
pslld $14, %xmm3 #379.14 | |
pxor %xmm4, %xmm2 #377.14 | |
por %xmm0, %xmm15 #379.14 | |
por %xmm3, %xmm1 #379.14 | |
movdqa %xmm11, %xmm4 #379.14 | |
pxor %xmm1, %xmm15 #379.14 | |
psrld $3, %xmm4 #379.14 | |
pxor %xmm4, %xmm15 #379.14 | |
movdqa %xmm10, %xmm7 #377.14 | |
paddd %xmm10, %xmm15 #379.14 | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 24, w8); | |
### w9 = add4(SIGMA1_256(w7), w2, SIGMA0_256(w10), w9); | |
movdqa %xmm9, %xmm0 #381.14 | |
movdqa %xmm10, 832(%rsp) #328.14 | |
movdqa %xmm9, %xmm10 #381.14 | |
movdqa %xmm15, 1184(%rsp) #379.14 | |
movdqa %xmm9, %xmm15 #381.14 | |
movdqa %xmm9, %xmm1 #381.14 | |
psrld $3, %xmm7 #377.14 | |
psrld $7, %xmm10 #381.14 | |
pslld $25, %xmm15 #381.14 | |
psrld $18, %xmm0 #381.14 | |
pslld $14, %xmm1 #381.14 | |
pxor %xmm7, %xmm2 #377.14 | |
por %xmm15, %xmm10 #381.14 | |
movdqa %xmm2, 1168(%rsp) #377.14 | |
por %xmm1, %xmm0 #381.14 | |
movdqa %xmm9, %xmm2 #381.14 | |
pxor %xmm0, %xmm10 #381.14 | |
psrld $3, %xmm2 #381.14 | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 25, w9); | |
### w10 = add4(SIGMA1_256(w8), w3, SIGMA0_256(w11), w10); | |
movdqa %xmm12, %xmm15 #383.15 | |
pxor %xmm2, %xmm10 #381.14 | |
movdqa %xmm12, %xmm0 #383.15 | |
movdqa %xmm11, 848(%rsp) #329.14 | |
paddd %xmm11, %xmm10 #381.14 | |
movdqa %xmm12, %xmm11 #383.15 | |
movdqa %xmm12, %xmm1 #383.15 | |
psrld $7, %xmm11 #383.15 | |
pslld $25, %xmm15 #383.15 | |
psrld $18, %xmm0 #383.15 | |
pslld $14, %xmm1 #383.15 | |
por %xmm15, %xmm11 #383.15 | |
por %xmm1, %xmm0 #383.15 | |
movdqa %xmm12, %xmm2 #383.15 | |
pxor %xmm0, %xmm11 #383.15 | |
psrld $3, %xmm2 #383.15 | |
paddd %xmm9, %xmm6 #365.14 | |
pxor %xmm2, %xmm11 #383.15 | |
movdqa %xmm9, 864(%rsp) #330.15 | |
paddd %xmm9, %xmm11 #383.15 | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 26, w10); | |
### w11 = add4(SIGMA1_256(w9), w4, SIGMA0_256(w12), w11); | |
movdqa 896(%rsp), %xmm9 #385.15 | |
movdqa %xmm6, 1152(%rsp) #365.14 | |
movdqa %xmm9, %xmm3 #385.15 | |
movdqa %xmm9, %xmm4 #385.15 | |
movdqa %xmm9, %xmm5 #385.15 | |
movdqa %xmm9, %xmm6 #385.15 | |
psrld $7, %xmm3 #385.15 | |
pslld $25, %xmm4 #385.15 | |
psrld $18, %xmm5 #385.15 | |
pslld $14, %xmm6 #385.15 | |
por %xmm4, %xmm3 #385.15 | |
por %xmm6, %xmm5 #385.15 | |
movdqa %xmm9, %xmm7 #385.15 | |
pxor %xmm5, %xmm3 #385.15 | |
psrld $3, %xmm7 #385.15 | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 27, w11); | |
### w12 = add4(SIGMA1_256(w10), w5, SIGMA0_256(w13), w12); | |
movdqa 912(%rsp), %xmm15 #387.15 | |
pxor %xmm7, %xmm3 #385.15 | |
movdqa %xmm12, 880(%rsp) #331.15 | |
paddd %xmm12, %xmm3 #385.15 | |
movdqa %xmm15, %xmm12 #387.15 | |
movdqa %xmm15, %xmm0 #387.15 | |
movdqa %xmm15, %xmm1 #387.15 | |
movdqa %xmm15, %xmm2 #387.15 | |
psrld $7, %xmm12 #387.15 | |
pslld $25, %xmm0 #387.15 | |
psrld $18, %xmm1 #387.15 | |
pslld $14, %xmm2 #387.15 | |
movdqa %xmm3, 1232(%rsp) #385.15 | |
por %xmm0, %xmm12 #387.15 | |
por %xmm2, %xmm1 #387.15 | |
movdqa %xmm15, %xmm3 #387.15 | |
pxor %xmm1, %xmm12 #387.15 | |
psrld $3, %xmm3 #387.15 | |
pxor %xmm3, %xmm12 #387.15 | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 28, w12); | |
### w13 = add4(SIGMA1_256(w11), w6, SIGMA0_256(w14), w13); | |
movdqa %xmm13, %xmm0 #389.15 | |
paddd %xmm9, %xmm12 #387.15 | |
movdqa %xmm13, %xmm9 #389.15 | |
movdqa %xmm13, %xmm1 #389.15 | |
movdqa %xmm13, %xmm2 #389.15 | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 29, w13); | |
### w14 = add4(SIGMA1_256(w12), w7, SIGMA0_256(w15), w14); | |
movdqa %xmm14, %xmm4 #391.15 | |
movdqa %xmm14, %xmm5 #391.15 | |
movdqa %xmm14, %xmm6 #391.15 | |
movdqa %xmm14, %xmm7 #391.15 | |
psrld $7, %xmm9 #389.15 | |
pslld $25, %xmm0 #389.15 | |
psrld $18, %xmm1 #389.15 | |
pslld $14, %xmm2 #389.15 | |
psrld $7, %xmm4 #391.15 | |
pslld $25, %xmm5 #391.15 | |
psrld $18, %xmm6 #391.15 | |
pslld $14, %xmm7 #391.15 | |
por %xmm0, %xmm9 #389.15 | |
por %xmm2, %xmm1 #389.15 | |
movdqa %xmm13, %xmm3 #389.15 | |
por %xmm5, %xmm4 #391.15 | |
por %xmm7, %xmm6 #391.15 | |
pxor %xmm1, %xmm9 #389.15 | |
movdqa %xmm14, 944(%rsp) #335.15 | |
psrld $3, %xmm3 #389.15 | |
pxor %xmm6, %xmm4 #391.15 | |
psrld $3, %xmm14 #391.15 | |
pxor %xmm3, %xmm9 #389.15 | |
pxor %xmm14, %xmm4 #391.15 | |
paddd %xmm15, %xmm9 #389.15 | |
paddd %xmm13, %xmm4 #391.15 | |
movdqa %xmm13, 928(%rsp) #334.15 | |
movdqa %xmm10, 1200(%rsp) #381.14 | |
movdqa %xmm11, 1216(%rsp) #383.15 | |
movdqa %xmm12, 1248(%rsp) #387.15 | |
movdqa %xmm9, 1264(%rsp) #389.15 | |
movdqa %xmm4, 1280(%rsp) #391.15 | |
# LOE | |
..B2.2: # Preds ..B2.7 ..B2.6 | |
movd 5248(%rsp), %xmm14 #187.34 | |
movdqa 80(%rsp), %xmm8 #187.10 | |
pshufd $0, %xmm14, %xmm11 #187.34 | |
movdqa 528(%rsp), %xmm9 #202.9 | |
paddd %xmm11, %xmm8 #187.10 | |
movdqa 304(%rsp), %xmm0 #202.9 | |
paddd %xmm8, %xmm9 #202.9 | |
paddd %xmm9, %xmm0 #202.9 | |
movdqa %xmm0, %xmm10 #203.9 | |
movdqa %xmm0, %xmm7 #203.9 | |
movdqa %xmm0, %xmm13 #203.9 | |
movdqa %xmm0, %xmm5 #203.9 | |
movdqa 496(%rsp), %xmm12 #203.9 | |
psrld $6, %xmm10 #203.9 | |
pslld $26, %xmm7 #203.9 | |
psrld $11, %xmm13 #203.9 | |
pslld $21, %xmm5 #203.9 | |
movdqa %xmm0, %xmm3 #203.9 | |
movdqa %xmm0, %xmm2 #203.9 | |
por %xmm7, %xmm10 #203.9 | |
movdqa 464(%rsp), %xmm15 #203.9 | |
por %xmm5, %xmm13 #203.9 | |
psrld $25, %xmm3 #203.9 | |
pslld $7, %xmm2 #203.9 | |
movdqa %xmm12, %xmm4 #203.9 | |
movdqa %xmm0, %xmm14 #203.9 | |
pxor %xmm13, %xmm10 #203.9 | |
por %xmm2, %xmm3 #203.9 | |
pand %xmm0, %xmm4 #203.9 | |
pandn %xmm15, %xmm14 #203.9 | |
movdqa %xmm8, (%rsp) #187.10 | |
pxor %xmm3, %xmm10 #203.9 | |
movdqa 560(%rsp), %xmm1 #202.9 | |
pxor %xmm14, %xmm4 #203.9 | |
movdqa 432(%rsp), %xmm11 #203.9 | |
paddd %xmm9, %xmm1 #202.9 | |
movdqa .L_2il0floatpacket.6749(%rip), %xmm8 #203.9 | |
paddd %xmm10, %xmm11 #203.9 | |
paddd %xmm4, %xmm8 #203.9 | |
movdqa %xmm1, %xmm9 #203.9 | |
movdqa 112(%rsp), %xmm3 #203.9 | |
paddd %xmm8, %xmm11 #203.9 | |
movdqa %xmm1, %xmm10 #203.9 | |
movdqa %xmm1, %xmm13 #203.9 | |
movdqa %xmm1, %xmm5 #203.9 | |
paddd %xmm11, %xmm3 #203.9 | |
movdqa 512(%rsp), %xmm6 #203.9 | |
psrld $2, %xmm9 #203.9 | |
pslld $30, %xmm10 #203.9 | |
psrld $13, %xmm13 #203.9 | |
pslld $19, %xmm5 #203.9 | |
movdqa %xmm1, %xmm14 #203.9 | |
movdqa %xmm1, %xmm11 #203.9 | |
movdqa %xmm6, %xmm7 #203.9 | |
movdqa 480(%rsp), %xmm4 #203.9 | |
por %xmm10, %xmm9 #203.9 | |
por %xmm5, %xmm13 #203.9 | |
psrld $22, %xmm14 #203.9 | |
pslld $10, %xmm11 #203.9 | |
pand %xmm1, %xmm7 #203.9 | |
pxor %xmm13, %xmm9 #203.9 | |
por %xmm11, %xmm14 #203.9 | |
movdqa %xmm4, %xmm8 #203.9 | |
pxor %xmm14, %xmm9 #203.9 | |
movdqa 448(%rsp), %xmm2 #203.9 | |
pand %xmm1, %xmm8 #203.9 | |
movdqa %xmm7, %xmm14 #203.9 | |
paddd %xmm3, %xmm2 #203.9 | |
movdqa 544(%rsp), %xmm11 #203.9 | |
pxor %xmm8, %xmm14 #203.9 | |
pxor %xmm14, %xmm11 #203.9 | |
movdqa %xmm2, %xmm10 #204.9 | |
movdqa %xmm2, %xmm13 #204.9 | |
movdqa %xmm2, %xmm5 #204.9 | |
movdqa %xmm2, %xmm14 #204.9 | |
paddd %xmm11, %xmm9 #203.9 | |
psrld $6, %xmm10 #204.9 | |
pslld $26, %xmm13 #204.9 | |
psrld $11, %xmm5 #204.9 | |
pslld $21, %xmm14 #204.9 | |
movdqa %xmm2, %xmm11 #204.9 | |
movdqa %xmm2, %xmm8 #204.9 | |
por %xmm13, %xmm10 #204.9 | |
por %xmm14, %xmm5 #204.9 | |
psrld $25, %xmm11 #204.9 | |
pslld $7, %xmm8 #204.9 | |
pxor %xmm5, %xmm10 #204.9 | |
por %xmm8, %xmm11 #204.9 | |
movdqa %xmm2, %xmm5 #204.9 | |
movdqa %xmm2, %xmm8 #204.9 | |
paddd %xmm9, %xmm3 #203.9 | |
pxor %xmm11, %xmm10 #204.9 | |
pand %xmm0, %xmm5 #204.9 | |
pandn %xmm12, %xmm8 #204.9 | |
movdqa .L_2il0floatpacket.6750(%rip), %xmm14 #204.9 | |
paddd %xmm10, %xmm15 #204.9 | |
pxor %xmm8, %xmm5 #204.9 | |
movdqa %xmm3, %xmm11 #204.9 | |
movdqa %xmm3, %xmm8 #204.9 | |
movdqa %xmm3, %xmm9 #204.9 | |
movdqa %xmm3, %xmm10 #204.9 | |
paddd %xmm5, %xmm14 #204.9 | |
psrld $2, %xmm11 #204.9 | |
pslld $30, %xmm8 #204.9 | |
psrld $13, %xmm9 #204.9 | |
pslld $19, %xmm10 #204.9 | |
paddd %xmm14, %xmm15 #204.9 | |
movdqa %xmm3, %xmm14 #204.9 | |
por %xmm8, %xmm11 #204.9 | |
por %xmm10, %xmm9 #204.9 | |
pand %xmm1, %xmm14 #204.9 | |
pxor %xmm9, %xmm11 #204.9 | |
movdqa %xmm3, %xmm13 #204.9 | |
movdqa %xmm3, %xmm8 #204.9 | |
movdqa %xmm6, %xmm9 #204.9 | |
psrld $22, %xmm13 #204.9 | |
pslld $10, %xmm8 #204.9 | |
pand %xmm3, %xmm9 #204.9 | |
movdqa %xmm14, %xmm10 #204.9 | |
por %xmm8, %xmm13 #204.9 | |
movdqa 128(%rsp), %xmm5 #204.9 | |
pxor %xmm9, %xmm10 #204.9 | |
paddd %xmm15, %xmm5 #204.9 | |
pxor %xmm13, %xmm11 #204.9 | |
pxor %xmm7, %xmm10 #204.9 | |
paddd %xmm5, %xmm4 #204.9 | |
paddd %xmm10, %xmm11 #204.9 | |
movdqa %xmm4, %xmm7 #205.9 | |
paddd %xmm11, %xmm5 #204.9 | |
movdqa %xmm4, %xmm11 #205.9 | |
movdqa %xmm4, %xmm8 #205.9 | |
movdqa %xmm4, %xmm9 #205.9 | |
psrld $6, %xmm7 #205.9 | |
pslld $26, %xmm11 #205.9 | |
psrld $11, %xmm8 #205.9 | |
pslld $21, %xmm9 #205.9 | |
movdqa %xmm4, %xmm10 #205.9 | |
movdqa %xmm4, %xmm15 #205.9 | |
por %xmm11, %xmm7 #205.9 | |
por %xmm9, %xmm8 #205.9 | |
psrld $25, %xmm10 #205.9 | |
pslld $7, %xmm15 #205.9 | |
pxor %xmm8, %xmm7 #205.9 | |
por %xmm15, %xmm10 #205.9 | |
pxor %xmm10, %xmm7 #205.9 | |
movdqa %xmm4, %xmm8 #205.9 | |
paddd %xmm7, %xmm12 #205.9 | |
movdqa %xmm4, %xmm7 #205.9 | |
pand %xmm2, %xmm7 #205.9 | |
pandn %xmm0, %xmm8 #205.9 | |
movdqa .L_2il0floatpacket.6751(%rip), %xmm11 #205.9 | |
pxor %xmm8, %xmm7 #205.9 | |
paddd %xmm7, %xmm11 #205.9 | |
movdqa %xmm5, %xmm9 #205.9 | |
paddd %xmm11, %xmm12 #205.9 | |
movdqa %xmm5, %xmm13 #205.9 | |
movdqa %xmm5, %xmm11 #205.9 | |
movdqa %xmm5, %xmm8 #205.9 | |
movdqa 144(%rsp), %xmm7 #205.9 | |
psrld $2, %xmm9 #205.9 | |
pslld $30, %xmm13 #205.9 | |
psrld $13, %xmm11 #205.9 | |
pslld $19, %xmm8 #205.9 | |
paddd %xmm12, %xmm7 #205.9 | |
movdqa %xmm5, %xmm12 #205.9 | |
por %xmm13, %xmm9 #205.9 | |
por %xmm8, %xmm11 #205.9 | |
pand %xmm3, %xmm12 #205.9 | |
pxor %xmm11, %xmm9 #205.9 | |
movdqa %xmm5, %xmm10 #205.9 | |
movdqa %xmm5, %xmm15 #205.9 | |
movdqa %xmm5, %xmm11 #205.9 | |
psrld $22, %xmm10 #205.9 | |
pslld $10, %xmm15 #205.9 | |
pand %xmm1, %xmm11 #205.9 | |
movdqa %xmm12, %xmm8 #205.9 | |
por %xmm15, %xmm10 #205.9 | |
pxor %xmm11, %xmm8 #205.9 | |
pxor %xmm10, %xmm9 #205.9 | |
pxor %xmm14, %xmm8 #205.9 | |
paddd %xmm7, %xmm6 #205.9 | |
paddd %xmm8, %xmm9 #205.9 | |
paddd %xmm9, %xmm7 #205.9 | |
movdqa %xmm6, %xmm14 #206.9 | |
movdqa %xmm6, %xmm11 #206.9 | |
movdqa %xmm6, %xmm8 #206.9 | |
movdqa %xmm6, %xmm9 #206.9 | |
psrld $6, %xmm14 #206.9 | |
pslld $26, %xmm11 #206.9 | |
psrld $11, %xmm8 #206.9 | |
pslld $21, %xmm9 #206.9 | |
movdqa %xmm6, %xmm10 #206.9 | |
movdqa %xmm6, %xmm15 #206.9 | |
por %xmm11, %xmm14 #206.9 | |
por %xmm9, %xmm8 #206.9 | |
psrld $25, %xmm10 #206.9 | |
pslld $7, %xmm15 #206.9 | |
pxor %xmm8, %xmm14 #206.9 | |
por %xmm15, %xmm10 #206.9 | |
movdqa %xmm6, %xmm8 #206.9 | |
movdqa %xmm6, %xmm9 #206.9 | |
pxor %xmm10, %xmm14 #206.9 | |
pand %xmm4, %xmm8 #206.9 | |
pandn %xmm2, %xmm9 #206.9 | |
paddd %xmm14, %xmm0 #206.9 | |
pxor %xmm9, %xmm8 #206.9 | |
movdqa .L_2il0floatpacket.6752(%rip), %xmm14 #206.9 | |
movdqa %xmm7, %xmm13 #206.9 | |
paddd %xmm8, %xmm14 #206.9 | |
movdqa %xmm7, %xmm11 #206.9 | |
movdqa 160(%rsp), %xmm8 #206.9 | |
paddd %xmm14, %xmm0 #206.9 | |
movdqa %xmm7, %xmm14 #206.9 | |
movdqa %xmm7, %xmm9 #206.9 | |
paddd %xmm0, %xmm8 #206.9 | |
movdqa %xmm7, %xmm0 #206.9 | |
psrld $2, %xmm13 #206.9 | |
pslld $30, %xmm14 #206.9 | |
psrld $13, %xmm11 #206.9 | |
pslld $19, %xmm9 #206.9 | |
pand %xmm5, %xmm0 #206.9 | |
por %xmm14, %xmm13 #206.9 | |
por %xmm9, %xmm11 #206.9 | |
movdqa %xmm7, %xmm10 #206.9 | |
movdqa %xmm7, %xmm15 #206.9 | |
movdqa %xmm7, %xmm14 #206.9 | |
pxor %xmm11, %xmm13 #206.9 | |
psrld $22, %xmm10 #206.9 | |
pslld $10, %xmm15 #206.9 | |
pand %xmm3, %xmm14 #206.9 | |
movdqa %xmm0, %xmm11 #206.9 | |
por %xmm15, %xmm10 #206.9 | |
pxor %xmm14, %xmm11 #206.9 | |
paddd %xmm8, %xmm1 #206.9 | |
pxor %xmm10, %xmm13 #206.9 | |
pxor %xmm12, %xmm11 #206.9 | |
paddd %xmm11, %xmm13 #206.9 | |
movdqa %xmm1, %xmm12 #207.9 | |
movdqa %xmm1, %xmm14 #207.9 | |
movdqa %xmm1, %xmm11 #207.9 | |
movdqa %xmm1, %xmm9 #207.9 | |
psrld $6, %xmm12 #207.9 | |
pslld $26, %xmm14 #207.9 | |
psrld $11, %xmm11 #207.9 | |
pslld $21, %xmm9 #207.9 | |
movdqa %xmm1, %xmm10 #207.9 | |
movdqa %xmm1, %xmm15 #207.9 | |
por %xmm14, %xmm12 #207.9 | |
por %xmm9, %xmm11 #207.9 | |
psrld $25, %xmm10 #207.9 | |
pslld $7, %xmm15 #207.9 | |
pxor %xmm11, %xmm12 #207.9 | |
por %xmm15, %xmm10 #207.9 | |
movdqa %xmm1, %xmm9 #207.9 | |
pxor %xmm10, %xmm12 #207.9 | |
movdqa %xmm1, %xmm10 #207.9 | |
pand %xmm6, %xmm9 #207.9 | |
pandn %xmm4, %xmm10 #207.9 | |
paddd %xmm12, %xmm2 #207.9 | |
pxor %xmm10, %xmm9 #207.9 | |
movdqa .L_2il0floatpacket.6753(%rip), %xmm12 #207.9 | |
paddd %xmm13, %xmm8 #206.9 | |
paddd %xmm9, %xmm12 #207.9 | |
movdqa %xmm8, %xmm13 #207.9 | |
movdqa 176(%rsp), %xmm9 #207.9 | |
paddd %xmm12, %xmm2 #207.9 | |
movdqa %xmm8, %xmm12 #207.9 | |
movdqa %xmm8, %xmm14 #207.9 | |
movdqa %xmm8, %xmm11 #207.9 | |
paddd %xmm2, %xmm9 #207.9 | |
movdqa %xmm8, %xmm2 #207.9 | |
psrld $2, %xmm13 #207.9 | |
pslld $30, %xmm12 #207.9 | |
psrld $13, %xmm14 #207.9 | |
pslld $19, %xmm11 #207.9 | |
pand %xmm7, %xmm2 #207.9 | |
por %xmm12, %xmm13 #207.9 | |
por %xmm11, %xmm14 #207.9 | |
movdqa %xmm8, %xmm10 #207.9 | |
movdqa %xmm8, %xmm15 #207.9 | |
movdqa %xmm8, %xmm12 #207.9 | |
pxor %xmm14, %xmm13 #207.9 | |
psrld $22, %xmm10 #207.9 | |
pslld $10, %xmm15 #207.9 | |
pand %xmm5, %xmm12 #207.9 | |
movdqa %xmm2, %xmm14 #207.9 | |
por %xmm15, %xmm10 #207.9 | |
pxor %xmm12, %xmm14 #207.9 | |
paddd %xmm9, %xmm3 #207.9 | |
pxor %xmm10, %xmm13 #207.9 | |
pxor %xmm0, %xmm14 #207.9 | |
movdqa %xmm3, %xmm0 #208.9 | |
paddd %xmm14, %xmm13 #207.9 | |
movdqa %xmm3, %xmm12 #208.9 | |
movdqa %xmm3, %xmm14 #208.9 | |
movdqa %xmm3, %xmm11 #208.9 | |
psrld $6, %xmm0 #208.9 | |
pslld $26, %xmm12 #208.9 | |
psrld $11, %xmm14 #208.9 | |
pslld $21, %xmm11 #208.9 | |
movdqa %xmm3, %xmm10 #208.9 | |
movdqa %xmm3, %xmm15 #208.9 | |
por %xmm12, %xmm0 #208.9 | |
por %xmm11, %xmm14 #208.9 | |
psrld $25, %xmm10 #208.9 | |
pslld $7, %xmm15 #208.9 | |
pxor %xmm14, %xmm0 #208.9 | |
por %xmm15, %xmm10 #208.9 | |
pxor %xmm10, %xmm0 #208.9 | |
movdqa %xmm3, %xmm10 #208.9 | |
paddd %xmm0, %xmm4 #208.9 | |
movdqa %xmm3, %xmm0 #208.9 | |
paddd %xmm13, %xmm9 #207.9 | |
pand %xmm1, %xmm0 #208.9 | |
pandn %xmm6, %xmm10 #208.9 | |
movdqa %xmm9, %xmm14 #208.9 | |
movdqa .L_2il0floatpacket.6754(%rip), %xmm12 #208.9 | |
pxor %xmm10, %xmm0 #208.9 | |
movdqa %xmm9, %xmm11 #208.9 | |
paddd %xmm0, %xmm12 #208.9 | |
psrld $2, %xmm14 #208.9 | |
pslld $30, %xmm11 #208.9 | |
movdqa %xmm9, %xmm15 #208.9 | |
movdqa %xmm9, %xmm0 #208.9 | |
paddd %xmm12, %xmm4 #208.9 | |
por %xmm11, %xmm14 #208.9 | |
psrld $13, %xmm15 #208.9 | |
pslld $19, %xmm0 #208.9 | |
movdqa %xmm9, %xmm12 #208.9 | |
movdqa %xmm9, %xmm11 #208.9 | |
movdqa 192(%rsp), %xmm10 #208.9 | |
movdqa %xmm9, %xmm13 #208.9 | |
por %xmm0, %xmm15 #208.9 | |
psrld $22, %xmm12 #208.9 | |
pslld $10, %xmm11 #208.9 | |
paddd %xmm4, %xmm10 #208.9 | |
pand %xmm8, %xmm13 #208.9 | |
pxor %xmm15, %xmm14 #208.9 | |
por %xmm11, %xmm12 #208.9 | |
movdqa %xmm9, %xmm4 #208.9 | |
pxor %xmm12, %xmm14 #208.9 | |
pand %xmm7, %xmm4 #208.9 | |
movdqa %xmm13, %xmm12 #208.9 | |
paddd %xmm10, %xmm5 #208.9 | |
pxor %xmm4, %xmm12 #208.9 | |
movdqa %xmm5, %xmm11 #209.9 | |
pxor %xmm2, %xmm12 #208.9 | |
movdqa %xmm5, %xmm2 #209.9 | |
paddd %xmm12, %xmm14 #208.9 | |
movdqa %xmm5, %xmm12 #209.9 | |
paddd %xmm14, %xmm10 #208.9 | |
movdqa %xmm5, %xmm14 #209.9 | |
psrld $6, %xmm2 #209.9 | |
pslld $26, %xmm12 #209.9 | |
psrld $11, %xmm14 #209.9 | |
pslld $21, %xmm11 #209.9 | |
movdqa %xmm5, %xmm15 #209.9 | |
movdqa %xmm5, %xmm0 #209.9 | |
por %xmm12, %xmm2 #209.9 | |
por %xmm11, %xmm14 #209.9 | |
psrld $25, %xmm15 #209.9 | |
pslld $7, %xmm0 #209.9 | |
pxor %xmm14, %xmm2 #209.9 | |
por %xmm0, %xmm15 #209.9 | |
pxor %xmm15, %xmm2 #209.9 | |
movdqa %xmm5, %xmm0 #209.9 | |
paddd %xmm2, %xmm6 #209.9 | |
movdqa %xmm5, %xmm2 #209.9 | |
pand %xmm3, %xmm0 #209.9 | |
pandn %xmm1, %xmm2 #209.9 | |
movdqa .L_2il0floatpacket.6755(%rip), %xmm12 #209.9 | |
pxor %xmm2, %xmm0 #209.9 | |
paddd %xmm0, %xmm12 #209.9 | |
movdqa %xmm10, %xmm15 #209.9 | |
movdqa %xmm10, %xmm0 #209.9 | |
psrld $2, %xmm15 #209.9 | |
pslld $30, %xmm0 #209.9 | |
movdqa %xmm10, %xmm2 #209.9 | |
movdqa %xmm10, %xmm4 #209.9 | |
paddd %xmm12, %xmm6 #209.9 | |
por %xmm0, %xmm15 #209.9 | |
psrld $13, %xmm2 #209.9 | |
pslld $19, %xmm4 #209.9 | |
movdqa %xmm10, %xmm12 #209.9 | |
movdqa %xmm10, %xmm0 #209.9 | |
movdqa %xmm10, %xmm14 #209.9 | |
movdqa 208(%rsp), %xmm11 #209.9 | |
por %xmm4, %xmm2 #209.9 | |
psrld $22, %xmm12 #209.9 | |
pslld $10, %xmm0 #209.9 | |
paddd %xmm6, %xmm11 #209.9 | |
pand %xmm9, %xmm14 #209.9 | |
pxor %xmm2, %xmm15 #209.9 | |
por %xmm0, %xmm12 #209.9 | |
movdqa %xmm10, %xmm6 #209.9 | |
pxor %xmm12, %xmm15 #209.9 | |
pand %xmm8, %xmm6 #209.9 | |
movdqa %xmm14, %xmm12 #209.9 | |
pxor %xmm6, %xmm12 #209.9 | |
paddd %xmm11, %xmm7 #209.9 | |
pxor %xmm13, %xmm12 #209.9 | |
movdqa %xmm7, %xmm13 #210.9 | |
paddd %xmm12, %xmm15 #209.9 | |
movdqa %xmm7, %xmm12 #210.9 | |
paddd %xmm15, %xmm11 #209.9 | |
movdqa %xmm7, %xmm15 #210.9 | |
movdqa %xmm7, %xmm0 #210.9 | |
psrld $6, %xmm13 #210.9 | |
pslld $26, %xmm12 #210.9 | |
psrld $11, %xmm15 #210.9 | |
pslld $21, %xmm0 #210.9 | |
movdqa %xmm7, %xmm2 #210.9 | |
movdqa %xmm7, %xmm4 #210.9 | |
por %xmm12, %xmm13 #210.9 | |
por %xmm0, %xmm15 #210.9 | |
psrld $25, %xmm2 #210.9 | |
pslld $7, %xmm4 #210.9 | |
pxor %xmm15, %xmm13 #210.9 | |
por %xmm4, %xmm2 #210.9 | |
movdqa %xmm7, %xmm0 #210.9 | |
pxor %xmm2, %xmm13 #210.9 | |
movdqa %xmm7, %xmm2 #210.9 | |
pand %xmm5, %xmm0 #210.9 | |
pandn %xmm3, %xmm2 #210.9 | |
movdqa .L_2il0floatpacket.6756(%rip), %xmm12 #210.9 | |
paddd %xmm13, %xmm1 #210.9 | |
pxor %xmm2, %xmm0 #210.9 | |
movdqa %xmm11, %xmm6 #210.9 | |
paddd %xmm0, %xmm12 #210.9 | |
movdqa %xmm1, %xmm0 #210.9 | |
movdqa 224(%rsp), %xmm1 #210.9 | |
paddd %xmm12, %xmm0 #210.9 | |
paddd %xmm0, %xmm1 #210.9 | |
movdqa %xmm11, %xmm15 #210.9 | |
movdqa %xmm11, %xmm13 #210.9 | |
movdqa %xmm11, %xmm0 #210.9 | |
movdqa %xmm11, %xmm12 #210.9 | |
psrld $2, %xmm6 #210.9 | |
pslld $30, %xmm15 #210.9 | |
psrld $13, %xmm13 #210.9 | |
pslld $19, %xmm0 #210.9 | |
pand %xmm10, %xmm12 #210.9 | |
por %xmm15, %xmm6 #210.9 | |
por %xmm0, %xmm13 #210.9 | |
movdqa %xmm11, %xmm2 #210.9 | |
movdqa %xmm11, %xmm4 #210.9 | |
movdqa %xmm11, %xmm15 #210.9 | |
pxor %xmm13, %xmm6 #210.9 | |
psrld $22, %xmm2 #210.9 | |
pslld $10, %xmm4 #210.9 | |
pand %xmm9, %xmm15 #210.9 | |
movdqa %xmm12, %xmm13 #210.9 | |
por %xmm4, %xmm2 #210.9 | |
pxor %xmm15, %xmm13 #210.9 | |
paddd %xmm1, %xmm8 #210.9 | |
pxor %xmm2, %xmm6 #210.9 | |
pxor %xmm14, %xmm13 #210.9 | |
movdqa %xmm8, %xmm14 #211.9 | |
paddd %xmm13, %xmm6 #210.9 | |
movdqa %xmm8, %xmm15 #211.9 | |
movdqa %xmm8, %xmm13 #211.9 | |
movdqa %xmm8, %xmm0 #211.9 | |
psrld $6, %xmm14 #211.9 | |
pslld $26, %xmm15 #211.9 | |
psrld $11, %xmm13 #211.9 | |
pslld $21, %xmm0 #211.9 | |
movdqa %xmm8, %xmm2 #211.9 | |
movdqa %xmm8, %xmm4 #211.9 | |
por %xmm15, %xmm14 #211.9 | |
por %xmm0, %xmm13 #211.9 | |
psrld $25, %xmm2 #211.9 | |
pslld $7, %xmm4 #211.9 | |
pxor %xmm13, %xmm14 #211.9 | |
por %xmm4, %xmm2 #211.9 | |
pxor %xmm2, %xmm14 #211.9 | |
movdqa %xmm8, %xmm0 #211.9 | |
movdqa %xmm8, %xmm2 #211.9 | |
pand %xmm7, %xmm0 #211.9 | |
pandn %xmm5, %xmm2 #211.9 | |
paddd %xmm6, %xmm1 #210.9 | |
paddd %xmm14, %xmm3 #211.9 | |
pxor %xmm2, %xmm0 #211.9 | |
movdqa .L_2il0floatpacket.6757(%rip), %xmm14 #211.9 | |
movdqa %xmm1, %xmm13 #211.9 | |
paddd %xmm0, %xmm14 #211.9 | |
movdqa %xmm1, %xmm0 #211.9 | |
movdqa %xmm1, %xmm2 #211.9 | |
movdqa %xmm1, %xmm4 #211.9 | |
psrld $2, %xmm13 #211.9 | |
pslld $30, %xmm0 #211.9 | |
psrld $13, %xmm2 #211.9 | |
pslld $19, %xmm4 #211.9 | |
movdqa %xmm1, %xmm15 #211.9 | |
por %xmm0, %xmm13 #211.9 | |
por %xmm4, %xmm2 #211.9 | |
paddd %xmm14, %xmm3 #211.9 | |
movdqa 240(%rsp), %xmm14 #211.9 | |
pand %xmm11, %xmm15 #211.9 | |
pxor %xmm2, %xmm13 #211.9 | |
movdqa %xmm1, %xmm6 #211.9 | |
movdqa %xmm1, %xmm2 #211.9 | |
movdqa %xmm1, %xmm0 #211.9 | |
paddd %xmm3, %xmm14 #211.9 | |
psrld $22, %xmm6 #211.9 | |
pslld $10, %xmm2 #211.9 | |
pand %xmm10, %xmm0 #211.9 | |
movdqa %xmm15, %xmm3 #211.9 | |
por %xmm2, %xmm6 #211.9 | |
pxor %xmm0, %xmm3 #211.9 | |
pxor %xmm6, %xmm13 #211.9 | |
pxor %xmm12, %xmm3 #211.9 | |
paddd %xmm14, %xmm9 #211.9 | |
paddd %xmm3, %xmm13 #211.9 | |
movdqa %xmm9, %xmm12 #212.9 | |
paddd %xmm13, %xmm14 #211.9 | |
movdqa %xmm9, %xmm13 #212.9 | |
movdqa %xmm9, %xmm0 #212.9 | |
movdqa %xmm9, %xmm2 #212.9 | |
psrld $6, %xmm12 #212.9 | |
pslld $26, %xmm13 #212.9 | |
psrld $11, %xmm0 #212.9 | |
pslld $21, %xmm2 #212.9 | |
movdqa %xmm9, %xmm3 #212.9 | |
movdqa %xmm9, %xmm4 #212.9 | |
por %xmm13, %xmm12 #212.9 | |
por %xmm2, %xmm0 #212.9 | |
psrld $25, %xmm3 #212.9 | |
pslld $7, %xmm4 #212.9 | |
pxor %xmm0, %xmm12 #212.9 | |
por %xmm4, %xmm3 #212.9 | |
movdqa %xmm9, %xmm0 #212.9 | |
movdqa %xmm9, %xmm2 #212.9 | |
pxor %xmm3, %xmm12 #212.9 | |
pand %xmm8, %xmm0 #212.9 | |
pandn %xmm7, %xmm2 #212.9 | |
paddd %xmm12, %xmm5 #212.9 | |
movdqa .L_2il0floatpacket.6758(%rip), %xmm12 #212.9 | |
pxor %xmm2, %xmm0 #212.9 | |
movdqa %xmm14, %xmm13 #212.9 | |
paddd %xmm0, %xmm12 #212.9 | |
movdqa %xmm1, 64(%rsp) #210.9 | |
pand %xmm1, %xmm13 #212.9 | |
movdqa %xmm14, %xmm0 #212.9 | |
movdqa %xmm14, %xmm1 #212.9 | |
psrld $2, %xmm0 #212.9 | |
pslld $30, %xmm1 #212.9 | |
movdqa %xmm14, %xmm2 #212.9 | |
movdqa %xmm14, %xmm3 #212.9 | |
por %xmm1, %xmm0 #212.9 | |
psrld $13, %xmm2 #212.9 | |
pslld $19, %xmm3 #212.9 | |
movdqa %xmm14, %xmm4 #212.9 | |
movdqa %xmm14, %xmm6 #212.9 | |
movdqa %xmm14, %xmm1 #212.9 | |
por %xmm3, %xmm2 #212.9 | |
psrld $22, %xmm4 #212.9 | |
pslld $10, %xmm6 #212.9 | |
pand %xmm11, %xmm1 #212.9 | |
movdqa %xmm13, 1344(%rsp) #212.9 | |
paddd %xmm12, %xmm5 #212.9 | |
movdqa 256(%rsp), %xmm12 #212.9 | |
pxor %xmm2, %xmm0 #212.9 | |
por %xmm6, %xmm4 #212.9 | |
pxor %xmm1, %xmm13 #212.9 | |
paddd %xmm5, %xmm12 #212.9 | |
pxor %xmm4, %xmm0 #212.9 | |
pxor %xmm15, %xmm13 #212.9 | |
paddd %xmm12, %xmm10 #212.9 | |
paddd %xmm13, %xmm0 #212.9 | |
movdqa %xmm10, %xmm15 #213.9 | |
paddd %xmm0, %xmm12 #212.9 | |
movdqa %xmm10, %xmm0 #213.9 | |
movdqa %xmm10, %xmm1 #213.9 | |
movdqa %xmm10, %xmm2 #213.9 | |
psrld $6, %xmm15 #213.9 | |
pslld $26, %xmm0 #213.9 | |
psrld $11, %xmm1 #213.9 | |
pslld $21, %xmm2 #213.9 | |
movdqa %xmm10, %xmm3 #213.9 | |
movdqa %xmm10, %xmm4 #213.9 | |
por %xmm0, %xmm15 #213.9 | |
por %xmm2, %xmm1 #213.9 | |
psrld $25, %xmm3 #213.9 | |
pslld $7, %xmm4 #213.9 | |
movdqa %xmm10, %xmm0 #213.9 | |
pxor %xmm1, %xmm15 #213.9 | |
movdqa %xmm10, 1328(%rsp) #212.9 | |
por %xmm4, %xmm3 #213.9 | |
pand %xmm9, %xmm0 #213.9 | |
pandn %xmm8, %xmm10 #213.9 | |
movdqa %xmm8, 48(%rsp) #210.9 | |
pxor %xmm3, %xmm15 #213.9 | |
movdqa .L_2il0floatpacket.6759(%rip), %xmm8 #213.9 | |
pxor %xmm10, %xmm0 #213.9 | |
movdqa %xmm12, %xmm1 #213.9 | |
movdqa %xmm12, %xmm2 #213.9 | |
movdqa %xmm12, %xmm3 #213.9 | |
movdqa %xmm12, %xmm4 #213.9 | |
paddd %xmm15, %xmm7 #213.9 | |
paddd %xmm0, %xmm8 #213.9 | |
psrld $2, %xmm1 #213.9 | |
pslld $30, %xmm2 #213.9 | |
psrld $13, %xmm3 #213.9 | |
pslld $19, %xmm4 #213.9 | |
movdqa %xmm12, %xmm5 #213.9 | |
paddd %xmm8, %xmm7 #213.9 | |
movdqa %xmm9, 1296(%rsp) #211.9 | |
movdqa %xmm12, %xmm0 #213.9 | |
movdqa %xmm12, 1360(%rsp) #212.9 | |
por %xmm2, %xmm1 #213.9 | |
movdqa 272(%rsp), %xmm9 #213.9 | |
por %xmm4, %xmm3 #213.9 | |
psrld $22, %xmm5 #213.9 | |
pslld $10, %xmm12 #213.9 | |
paddd %xmm7, %xmm9 #213.9 | |
pxor %xmm3, %xmm1 #213.9 | |
por %xmm12, %xmm5 #213.9 | |
paddd %xmm9, %xmm11 #213.9 | |
pand %xmm14, %xmm0 #213.9 | |
pxor %xmm5, %xmm1 #213.9 | |
movdqa %xmm14, 1312(%rsp) #211.9 | |
movdqa %xmm9, 1376(%rsp) #213.9 | |
movdqa %xmm11, 1392(%rsp) #213.9 | |
movdqa %xmm0, 1408(%rsp) #213.9 | |
movdqa %xmm1, 1424(%rsp) #213.9 | |
# LOE | |
..B2.21: # Preds ..B2.2 | |
movdqa 1360(%rsp), %xmm7 #213.9 | |
movdqa 64(%rsp), %xmm10 #213.9 | |
movdqa %xmm7, %xmm11 #213.9 | |
movdqa 1408(%rsp), %xmm3 #213.9 | |
pand %xmm10, %xmm11 #213.9 | |
movdqa 1392(%rsp), %xmm14 #214.9 | |
movdqa %xmm3, %xmm9 #213.9 | |
movdqa 1344(%rsp), %xmm13 #213.9 | |
pxor %xmm11, %xmm9 #213.9 | |
movdqa %xmm14, %xmm5 #214.9 | |
movdqa %xmm14, %xmm8 #214.9 | |
movdqa %xmm14, %xmm15 #214.9 | |
movdqa %xmm14, %xmm6 #214.9 | |
movdqa 1424(%rsp), %xmm2 #213.9 | |
pxor %xmm9, %xmm13 #213.9 | |
psrld $6, %xmm5 #214.9 | |
pslld $26, %xmm8 #214.9 | |
psrld $11, %xmm15 #214.9 | |
pslld $21, %xmm6 #214.9 | |
movdqa %xmm14, %xmm4 #214.9 | |
movdqa %xmm14, %xmm1 #214.9 | |
movdqa 1296(%rsp), %xmm9 #214.9 | |
paddd %xmm13, %xmm2 #213.9 | |
por %xmm8, %xmm5 #214.9 | |
por %xmm6, %xmm15 #214.9 | |
movdqa 1328(%rsp), %xmm8 #214.9 | |
psrld $25, %xmm4 #214.9 | |
pslld $7, %xmm1 #214.9 | |
movdqa %xmm14, %xmm0 #214.9 | |
movdqa %xmm14, %xmm13 #214.9 | |
pxor %xmm15, %xmm5 #214.9 | |
movdqa 1376(%rsp), %xmm12 #213.9 | |
por %xmm1, %xmm4 #214.9 | |
pand %xmm8, %xmm0 #214.9 | |
pandn %xmm9, %xmm13 #214.9 | |
movdqa .L_2il0floatpacket.6760(%rip), %xmm11 #214.9 | |
paddd %xmm2, %xmm12 #213.9 | |
movdqa 48(%rsp), %xmm2 #214.9 | |
pxor %xmm4, %xmm5 #214.9 | |
pxor %xmm13, %xmm0 #214.9 | |
paddd %xmm5, %xmm2 #214.9 | |
paddd %xmm0, %xmm11 #214.9 | |
movdqa %xmm12, %xmm5 #214.9 | |
paddd %xmm11, %xmm2 #214.9 | |
movdqa %xmm12, %xmm15 #214.9 | |
movdqa 288(%rsp), %xmm11 #214.9 | |
psrld $2, %xmm5 #214.9 | |
movdqa %xmm11, %xmm13 #214.9 | |
pslld $30, %xmm15 #214.9 | |
movdqa %xmm12, %xmm6 #214.9 | |
movdqa %xmm12, %xmm4 #214.9 | |
paddd %xmm2, %xmm13 #214.9 | |
por %xmm15, %xmm5 #214.9 | |
psrld $13, %xmm6 #214.9 | |
pslld $19, %xmm4 #214.9 | |
movdqa %xmm12, %xmm2 #214.9 | |
movdqa %xmm12, %xmm15 #214.9 | |
por %xmm4, %xmm6 #214.9 | |
psrld $22, %xmm2 #214.9 | |
pslld $10, %xmm15 #214.9 | |
pxor %xmm6, %xmm5 #214.9 | |
por %xmm15, %xmm2 #214.9 | |
movdqa %xmm7, %xmm1 #214.9 | |
pxor %xmm2, %xmm5 #214.9 | |
pand %xmm12, %xmm1 #214.9 | |
movdqa 1312(%rsp), %xmm2 #214.9 | |
movdqa %xmm1, %xmm15 #214.9 | |
movdqa %xmm2, %xmm0 #214.9 | |
paddd %xmm13, %xmm10 #214.9 | |
pand %xmm12, %xmm0 #214.9 | |
movdqa %xmm10, %xmm6 #217.9 | |
pxor %xmm0, %xmm15 #214.9 | |
movdqa %xmm10, %xmm4 #217.9 | |
pxor %xmm15, %xmm3 #214.9 | |
movdqa %xmm10, %xmm15 #217.9 | |
paddd %xmm3, %xmm5 #214.9 | |
movdqa %xmm10, %xmm0 #217.9 | |
paddd %xmm5, %xmm13 #214.9 | |
psrld $6, %xmm15 #217.9 | |
pslld $26, %xmm6 #217.9 | |
psrld $11, %xmm4 #217.9 | |
pslld $21, %xmm0 #217.9 | |
movdqa %xmm10, %xmm3 #217.9 | |
movdqa %xmm10, %xmm5 #217.9 | |
por %xmm6, %xmm15 #217.9 | |
por %xmm0, %xmm4 #217.9 | |
psrld $25, %xmm3 #217.9 | |
pslld $7, %xmm5 #217.9 | |
pxor %xmm4, %xmm15 #217.9 | |
por %xmm5, %xmm3 #217.9 | |
movdqa %xmm14, %xmm0 #217.9 | |
pxor %xmm3, %xmm15 #217.9 | |
movdqa %xmm10, %xmm3 #217.9 | |
pand %xmm10, %xmm0 #217.9 | |
pandn %xmm8, %xmm3 #217.9 | |
pxor %xmm3, %xmm0 #217.9 | |
movdqa %xmm13, %xmm5 #217.9 | |
movdqa .L_2il0floatpacket.6761(%rip), %xmm3 #217.9 | |
movdqa %xmm13, %xmm6 #217.9 | |
paddd %xmm0, %xmm3 #217.9 | |
movdqa %xmm13, %xmm4 #217.9 | |
movdqa %xmm13, %xmm0 #217.9 | |
paddd %xmm15, %xmm9 #217.9 | |
psrld $2, %xmm5 #217.9 | |
pslld $30, %xmm6 #217.9 | |
psrld $13, %xmm4 #217.9 | |
pslld $19, %xmm0 #217.9 | |
paddd %xmm3, %xmm9 #217.9 | |
movdqa %xmm13, %xmm15 #217.9 | |
movdqa 576(%rsp), %xmm3 #217.9 | |
por %xmm6, %xmm5 #217.9 | |
por %xmm0, %xmm4 #217.9 | |
paddd %xmm9, %xmm3 #217.9 | |
pand %xmm12, %xmm15 #217.9 | |
pxor %xmm4, %xmm5 #217.9 | |
movdqa %xmm13, %xmm9 #217.9 | |
movdqa %xmm13, %xmm6 #217.9 | |
movdqa %xmm7, %xmm4 #217.9 | |
psrld $22, %xmm9 #217.9 | |
pslld $10, %xmm6 #217.9 | |
pand %xmm13, %xmm4 #217.9 | |
movdqa %xmm15, %xmm0 #217.9 | |
por %xmm6, %xmm9 #217.9 | |
pxor %xmm4, %xmm0 #217.9 | |
pxor %xmm9, %xmm5 #217.9 | |
pxor %xmm1, %xmm0 #217.9 | |
paddd %xmm3, %xmm2 #217.9 | |
paddd %xmm0, %xmm5 #217.9 | |
movdqa %xmm2, %xmm1 #219.9 | |
paddd %xmm5, %xmm3 #217.9 | |
movdqa %xmm2, %xmm9 #219.9 | |
movdqa %xmm2, %xmm5 #219.9 | |
movdqa %xmm2, %xmm6 #219.9 | |
psrld $6, %xmm1 #219.9 | |
pslld $26, %xmm9 #219.9 | |
psrld $11, %xmm5 #219.9 | |
pslld $21, %xmm6 #219.9 | |
movdqa %xmm2, %xmm4 #219.9 | |
movdqa %xmm2, %xmm0 #219.9 | |
por %xmm9, %xmm1 #219.9 | |
por %xmm6, %xmm5 #219.9 | |
psrld $25, %xmm4 #219.9 | |
pslld $7, %xmm0 #219.9 | |
pxor %xmm5, %xmm1 #219.9 | |
por %xmm0, %xmm4 #219.9 | |
pxor %xmm4, %xmm1 #219.9 | |
movdqa %xmm2, %xmm0 #219.9 | |
paddd %xmm1, %xmm8 #219.9 | |
movdqa %xmm2, %xmm1 #219.9 | |
pand %xmm10, %xmm0 #219.9 | |
pandn %xmm14, %xmm1 #219.9 | |
movdqa .L_2il0floatpacket.6762(%rip), %xmm9 #219.9 | |
pxor %xmm1, %xmm0 #219.9 | |
movdqa %xmm3, %xmm5 #219.9 | |
movdqa %xmm3, %xmm6 #219.9 | |
movdqa %xmm3, %xmm4 #219.9 | |
movdqa %xmm3, %xmm1 #219.9 | |
paddd %xmm0, %xmm9 #219.9 | |
movdqa %xmm8, %xmm0 #219.9 | |
psrld $2, %xmm5 #219.9 | |
pslld $30, %xmm6 #219.9 | |
psrld $13, %xmm4 #219.9 | |
pslld $19, %xmm1 #219.9 | |
movdqa 592(%rsp), %xmm8 #219.9 | |
paddd %xmm9, %xmm0 #219.9 | |
movdqa %xmm3, %xmm9 #219.9 | |
por %xmm6, %xmm5 #219.9 | |
por %xmm1, %xmm4 #219.9 | |
paddd %xmm0, %xmm8 #219.9 | |
pand %xmm13, %xmm9 #219.9 | |
pxor %xmm4, %xmm5 #219.9 | |
movdqa %xmm3, %xmm0 #219.9 | |
movdqa %xmm3, %xmm6 #219.9 | |
movdqa %xmm3, %xmm4 #219.9 | |
psrld $22, %xmm0 #219.9 | |
pslld $10, %xmm6 #219.9 | |
pand %xmm12, %xmm4 #219.9 | |
movdqa %xmm9, %xmm1 #219.9 | |
por %xmm6, %xmm0 #219.9 | |
pxor %xmm4, %xmm1 #219.9 | |
pxor %xmm0, %xmm5 #219.9 | |
pxor %xmm15, %xmm1 #219.9 | |
paddd %xmm8, %xmm7 #219.9 | |
movdqa (%rsp), %xmm15 #220.14 | |
paddd %xmm1, %xmm5 #219.9 | |
paddd %xmm5, %xmm8 #219.9 | |
movdqa %xmm15, %xmm6 #220.14 | |
movdqa %xmm15, %xmm4 #220.14 | |
movdqa %xmm15, %xmm0 #220.14 | |
movdqa %xmm15, %xmm5 #220.14 | |
psrld $7, %xmm6 #220.14 | |
pslld $25, %xmm4 #220.14 | |
psrld $18, %xmm0 #220.14 | |
pslld $14, %xmm5 #220.14 | |
por %xmm4, %xmm6 #220.14 | |
por %xmm5, %xmm0 #220.14 | |
movdqa %xmm15, %xmm4 #220.14 | |
pxor %xmm0, %xmm6 #220.14 | |
psrld $3, %xmm4 #220.14 | |
movdqa 96(%rsp), %xmm5 #220.14 | |
pxor %xmm4, %xmm6 #220.14 | |
movdqa 608(%rsp), %xmm1 #220.14 | |
paddd %xmm6, %xmm5 #220.14 | |
paddd %xmm5, %xmm1 #220.14 | |
movdqa %xmm7, %xmm6 #221.9 | |
movdqa %xmm7, %xmm0 #221.9 | |
movdqa %xmm7, %xmm5 #221.9 | |
movdqa %xmm7, %xmm4 #221.9 | |
psrld $6, %xmm6 #221.9 | |
pslld $26, %xmm0 #221.9 | |
psrld $11, %xmm5 #221.9 | |
pslld $21, %xmm4 #221.9 | |
por %xmm0, %xmm6 #221.9 | |
por %xmm4, %xmm5 #221.9 | |
movdqa %xmm7, %xmm0 #221.9 | |
pxor %xmm5, %xmm6 #221.9 | |
movdqa %xmm7, %xmm5 #221.9 | |
psrld $25, %xmm0 #221.9 | |
pslld $7, %xmm5 #221.9 | |
por %xmm5, %xmm0 #221.9 | |
movdqa %xmm7, %xmm5 #221.9 | |
movdqa %xmm7, %xmm4 #221.9 | |
pxor %xmm0, %xmm6 #221.9 | |
pand %xmm2, %xmm5 #221.9 | |
pandn %xmm10, %xmm4 #221.9 | |
paddd %xmm6, %xmm14 #221.9 | |
pxor %xmm4, %xmm5 #221.9 | |
movdqa %xmm8, %xmm4 #221.9 | |
movdqa %xmm8, %xmm6 #221.9 | |
movdqa .L_2il0floatpacket.6763(%rip), %xmm0 #221.9 | |
psrld $2, %xmm4 #221.9 | |
pslld $30, %xmm6 #221.9 | |
paddd %xmm5, %xmm0 #221.9 | |
por %xmm6, %xmm4 #221.9 | |
movdqa %xmm8, %xmm6 #221.9 | |
movdqa %xmm8, %xmm5 #221.9 | |
psrld $13, %xmm6 #221.9 | |
pslld $19, %xmm5 #221.9 | |
paddd %xmm0, %xmm14 #221.9 | |
por %xmm5, %xmm6 #221.9 | |
movdqa %xmm8, %xmm5 #221.9 | |
pxor %xmm6, %xmm4 #221.9 | |
movdqa %xmm8, %xmm6 #221.9 | |
movdqa %xmm8, %xmm0 #221.9 | |
psrld $22, %xmm5 #221.9 | |
pslld $10, %xmm6 #221.9 | |
pand %xmm3, %xmm0 #221.9 | |
por %xmm6, %xmm5 #221.9 | |
movdqa %xmm8, %xmm6 #221.9 | |
pxor %xmm5, %xmm4 #221.9 | |
pand %xmm13, %xmm6 #221.9 | |
movdqa %xmm0, %xmm5 #221.9 | |
paddd %xmm1, %xmm14 #221.9 | |
pxor %xmm6, %xmm5 #221.9 | |
paddd %xmm14, %xmm12 #221.9 | |
pxor %xmm9, %xmm5 #221.9 | |
movdqa %xmm12, %xmm6 #223.9 | |
movdqa 640(%rsp), %xmm9 #222.14 | |
paddd %xmm5, %xmm4 #221.9 | |
movdqa 624(%rsp), %xmm5 #222.14 | |
paddd %xmm15, %xmm9 #222.14 | |
paddd %xmm4, %xmm14 #221.9 | |
paddd %xmm9, %xmm5 #222.14 | |
movdqa %xmm12, %xmm15 #223.9 | |
movdqa %xmm12, %xmm4 #223.9 | |
movdqa %xmm12, %xmm9 #223.9 | |
psrld $6, %xmm15 #223.9 | |
pslld $26, %xmm6 #223.9 | |
psrld $11, %xmm4 #223.9 | |
pslld $21, %xmm9 #223.9 | |
por %xmm6, %xmm15 #223.9 | |
por %xmm9, %xmm4 #223.9 | |
movdqa %xmm12, %xmm6 #223.9 | |
movdqa %xmm12, %xmm9 #223.9 | |
psrld $25, %xmm6 #223.9 | |
pslld $7, %xmm9 #223.9 | |
pxor %xmm4, %xmm15 #223.9 | |
por %xmm9, %xmm6 #223.9 | |
movdqa %xmm12, %xmm9 #223.9 | |
pxor %xmm6, %xmm15 #223.9 | |
movdqa %xmm12, %xmm6 #223.9 | |
pand %xmm7, %xmm9 #223.9 | |
pandn %xmm2, %xmm6 #223.9 | |
movdqa .L_2il0floatpacket.6764(%rip), %xmm4 #223.9 | |
pxor %xmm6, %xmm9 #223.9 | |
paddd %xmm15, %xmm10 #223.9 | |
paddd %xmm9, %xmm4 #223.9 | |
movdqa %xmm14, %xmm9 #223.9 | |
movdqa %xmm14, %xmm15 #223.9 | |
psrld $2, %xmm9 #223.9 | |
pslld $30, %xmm15 #223.9 | |
por %xmm15, %xmm9 #223.9 | |
movdqa %xmm14, %xmm15 #223.9 | |
movdqa %xmm14, %xmm6 #223.9 | |
psrld $13, %xmm15 #223.9 | |
pslld $19, %xmm6 #223.9 | |
paddd %xmm4, %xmm10 #223.9 | |
por %xmm6, %xmm15 #223.9 | |
movdqa %xmm14, %xmm6 #223.9 | |
pxor %xmm15, %xmm9 #223.9 | |
movdqa %xmm14, %xmm15 #223.9 | |
movdqa %xmm14, %xmm4 #223.9 | |
psrld $22, %xmm15 #223.9 | |
pslld $10, %xmm6 #223.9 | |
pand %xmm8, %xmm4 #223.9 | |
por %xmm6, %xmm15 #223.9 | |
movdqa %xmm14, %xmm6 #223.9 | |
pxor %xmm15, %xmm9 #223.9 | |
pand %xmm3, %xmm6 #223.9 | |
movdqa %xmm4, %xmm15 #223.9 | |
paddd %xmm5, %xmm10 #223.9 | |
pxor %xmm6, %xmm15 #223.9 | |
paddd %xmm10, %xmm13 #223.9 | |
pxor %xmm0, %xmm15 #223.9 | |
movdqa %xmm1, %xmm0 #224.14 | |
paddd %xmm15, %xmm9 #223.9 | |
movdqa %xmm1, %xmm15 #224.14 | |
paddd %xmm9, %xmm10 #223.9 | |
movdqa %xmm1, %xmm9 #224.14 | |
movdqa %xmm1, %xmm6 #224.14 | |
psrld $17, %xmm9 #224.14 | |
pslld $15, %xmm0 #224.14 | |
psrld $19, %xmm15 #224.14 | |
pslld $13, %xmm6 #224.14 | |
por %xmm0, %xmm9 #224.14 | |
por %xmm6, %xmm15 #224.14 | |
movdqa %xmm13, %xmm6 #225.9 | |
movdqa %xmm1, 1440(%rsp) #220.14 | |
pxor %xmm15, %xmm9 #224.14 | |
psrld $10, %xmm1 #224.14 | |
movdqa %xmm13, %xmm0 #225.9 | |
movdqa 256(%rsp), %xmm15 #224.14 | |
pxor %xmm1, %xmm9 #224.14 | |
paddd %xmm9, %xmm15 #224.14 | |
movdqa %xmm13, %xmm1 #225.9 | |
movdqa 656(%rsp), %xmm9 #224.14 | |
psrld $6, %xmm6 #225.9 | |
paddd %xmm15, %xmm9 #224.14 | |
movdqa %xmm13, %xmm15 #225.9 | |
pslld $26, %xmm0 #225.9 | |
psrld $11, %xmm15 #225.9 | |
pslld $21, %xmm1 #225.9 | |
por %xmm0, %xmm6 #225.9 | |
por %xmm1, %xmm15 #225.9 | |
movdqa %xmm13, %xmm0 #225.9 | |
pxor %xmm15, %xmm6 #225.9 | |
movdqa %xmm13, %xmm15 #225.9 | |
psrld $25, %xmm0 #225.9 | |
pslld $7, %xmm15 #225.9 | |
por %xmm15, %xmm0 #225.9 | |
movdqa %xmm13, %xmm15 #225.9 | |
movdqa %xmm13, %xmm1 #225.9 | |
pand %xmm12, %xmm15 #225.9 | |
pandn %xmm7, %xmm1 #225.9 | |
pxor %xmm0, %xmm6 #225.9 | |
movdqa .L_2il0floatpacket.6765(%rip), %xmm0 #225.9 | |
pxor %xmm1, %xmm15 #225.9 | |
paddd %xmm6, %xmm2 #225.9 | |
paddd %xmm15, %xmm0 #225.9 | |
paddd %xmm0, %xmm2 #225.9 | |
movdqa %xmm10, %xmm0 #225.9 | |
movdqa %xmm10, %xmm1 #225.9 | |
psrld $2, %xmm0 #225.9 | |
pslld $30, %xmm1 #225.9 | |
movdqa %xmm10, %xmm15 #225.9 | |
por %xmm1, %xmm0 #225.9 | |
movdqa %xmm10, %xmm1 #225.9 | |
psrld $13, %xmm1 #225.9 | |
pslld $19, %xmm15 #225.9 | |
por %xmm15, %xmm1 #225.9 | |
movdqa %xmm10, %xmm15 #225.9 | |
pxor %xmm1, %xmm0 #225.9 | |
movdqa %xmm10, %xmm1 #225.9 | |
movdqa %xmm10, %xmm6 #225.9 | |
psrld $22, %xmm15 #225.9 | |
pslld $10, %xmm1 #225.9 | |
pand %xmm14, %xmm6 #225.9 | |
por %xmm1, %xmm15 #225.9 | |
movdqa %xmm10, %xmm1 #225.9 | |
pxor %xmm15, %xmm0 #225.9 | |
pand %xmm8, %xmm1 #225.9 | |
movdqa %xmm6, %xmm15 #225.9 | |
paddd %xmm9, %xmm2 #225.9 | |
pxor %xmm1, %xmm15 #225.9 | |
movdqa %xmm5, %xmm1 #226.14 | |
pxor %xmm4, %xmm15 #225.9 | |
movdqa %xmm5, %xmm4 #226.14 | |
paddd %xmm15, %xmm0 #225.9 | |
movdqa %xmm5, %xmm15 #226.14 | |
psrld $17, %xmm4 #226.14 | |
pslld $15, %xmm15 #226.14 | |
por %xmm15, %xmm4 #226.14 | |
movdqa %xmm5, %xmm15 #226.14 | |
psrld $19, %xmm1 #226.14 | |
pslld $13, %xmm15 #226.14 | |
por %xmm15, %xmm1 #226.14 | |
paddd %xmm2, %xmm3 #225.9 | |
movdqa %xmm5, 1456(%rsp) #222.14 | |
pxor %xmm1, %xmm4 #226.14 | |
psrld $10, %xmm5 #226.14 | |
paddd %xmm0, %xmm2 #225.9 | |
movdqa 272(%rsp), %xmm0 #226.14 | |
pxor %xmm5, %xmm4 #226.14 | |
paddd %xmm4, %xmm0 #226.14 | |
movdqa %xmm3, %xmm15 #227.9 | |
movdqa %xmm3, %xmm4 #227.9 | |
psrld $6, %xmm15 #227.9 | |
pslld $26, %xmm4 #227.9 | |
movdqa %xmm3, %xmm1 #227.9 | |
por %xmm4, %xmm15 #227.9 | |
movdqa %xmm3, %xmm4 #227.9 | |
movdqa 672(%rsp), %xmm5 #226.14 | |
psrld $11, %xmm1 #227.9 | |
pslld $21, %xmm4 #227.9 | |
paddd %xmm0, %xmm5 #226.14 | |
por %xmm4, %xmm1 #227.9 | |
movdqa %xmm3, %xmm0 #227.9 | |
movdqa %xmm3, %xmm4 #227.9 | |
psrld $25, %xmm0 #227.9 | |
pslld $7, %xmm4 #227.9 | |
pxor %xmm1, %xmm15 #227.9 | |
por %xmm4, %xmm0 #227.9 | |
movdqa %xmm3, %xmm4 #227.9 | |
movdqa %xmm3, %xmm1 #227.9 | |
pand %xmm13, %xmm4 #227.9 | |
pandn %xmm12, %xmm1 #227.9 | |
pxor %xmm0, %xmm15 #227.9 | |
movdqa .L_2il0floatpacket.6766(%rip), %xmm0 #227.9 | |
pxor %xmm1, %xmm4 #227.9 | |
paddd %xmm4, %xmm0 #227.9 | |
movdqa %xmm2, %xmm1 #227.9 | |
movdqa %xmm2, %xmm4 #227.9 | |
paddd %xmm15, %xmm7 #227.9 | |
psrld $2, %xmm1 #227.9 | |
pslld $30, %xmm4 #227.9 | |
paddd %xmm0, %xmm7 #227.9 | |
por %xmm4, %xmm1 #227.9 | |
movdqa %xmm2, %xmm4 #227.9 | |
movdqa %xmm2, %xmm0 #227.9 | |
psrld $13, %xmm4 #227.9 | |
pslld $19, %xmm0 #227.9 | |
por %xmm0, %xmm4 #227.9 | |
movdqa %xmm2, %xmm0 #227.9 | |
pxor %xmm4, %xmm1 #227.9 | |
movdqa %xmm2, %xmm4 #227.9 | |
movdqa %xmm2, %xmm15 #227.9 | |
psrld $22, %xmm4 #227.9 | |
pslld $10, %xmm0 #227.9 | |
pand %xmm10, %xmm15 #227.9 | |
por %xmm0, %xmm4 #227.9 | |
movdqa %xmm2, %xmm0 #227.9 | |
pxor %xmm4, %xmm1 #227.9 | |
pand %xmm14, %xmm0 #227.9 | |
movdqa %xmm15, %xmm4 #227.9 | |
paddd %xmm5, %xmm7 #227.9 | |
pxor %xmm0, %xmm4 #227.9 | |
paddd %xmm7, %xmm8 #227.9 | |
pxor %xmm6, %xmm4 #227.9 | |
movdqa %xmm9, %xmm6 #228.14 | |
paddd %xmm4, %xmm1 #227.9 | |
movdqa %xmm9, %xmm4 #228.14 | |
paddd %xmm1, %xmm7 #227.9 | |
movdqa %xmm9, %xmm0 #228.14 | |
movdqa %xmm9, %xmm1 #228.14 | |
psrld $17, %xmm4 #228.14 | |
pslld $15, %xmm6 #228.14 | |
psrld $19, %xmm0 #228.14 | |
pslld $13, %xmm1 #228.14 | |
por %xmm6, %xmm4 #228.14 | |
por %xmm1, %xmm0 #228.14 | |
movdqa %xmm8, %xmm6 #229.9 | |
movdqa %xmm9, 1472(%rsp) #224.14 | |
pxor %xmm0, %xmm4 #228.14 | |
psrld $10, %xmm9 #228.14 | |
movdqa %xmm8, %xmm0 #229.9 | |
pxor %xmm9, %xmm4 #228.14 | |
psrld $6, %xmm6 #229.9 | |
movdqa 688(%rsp), %xmm9 #228.14 | |
paddd %xmm4, %xmm11 #228.14 | |
pslld $26, %xmm0 #229.9 | |
movdqa %xmm8, %xmm1 #229.9 | |
movdqa %xmm8, %xmm4 #229.9 | |
paddd %xmm11, %xmm9 #228.14 | |
por %xmm0, %xmm6 #229.9 | |
psrld $11, %xmm1 #229.9 | |
pslld $21, %xmm4 #229.9 | |
movdqa %xmm8, %xmm11 #229.9 | |
movdqa %xmm8, %xmm0 #229.9 | |
por %xmm4, %xmm1 #229.9 | |
psrld $25, %xmm11 #229.9 | |
pslld $7, %xmm0 #229.9 | |
pxor %xmm1, %xmm6 #229.9 | |
por %xmm0, %xmm11 #229.9 | |
movdqa %xmm8, %xmm0 #229.9 | |
movdqa %xmm8, %xmm1 #229.9 | |
pand %xmm3, %xmm0 #229.9 | |
pandn %xmm13, %xmm1 #229.9 | |
pxor %xmm11, %xmm6 #229.9 | |
pxor %xmm1, %xmm0 #229.9 | |
movdqa .L_2il0floatpacket.6767(%rip), %xmm11 #229.9 | |
paddd %xmm6, %xmm12 #229.9 | |
paddd %xmm0, %xmm11 #229.9 | |
movdqa %xmm7, %xmm6 #229.9 | |
movdqa %xmm7, %xmm4 #229.9 | |
movdqa %xmm7, %xmm0 #229.9 | |
movdqa %xmm7, %xmm1 #229.9 | |
psrld $2, %xmm6 #229.9 | |
pslld $30, %xmm4 #229.9 | |
psrld $13, %xmm0 #229.9 | |
pslld $19, %xmm1 #229.9 | |
por %xmm4, %xmm6 #229.9 | |
por %xmm1, %xmm0 #229.9 | |
paddd %xmm11, %xmm12 #229.9 | |
movdqa %xmm7, %xmm11 #229.9 | |
pxor %xmm0, %xmm6 #229.9 | |
movdqa %xmm7, %xmm4 #229.9 | |
movdqa %xmm7, %xmm0 #229.9 | |
pand %xmm2, %xmm11 #229.9 | |
psrld $22, %xmm4 #229.9 | |
pslld $10, %xmm0 #229.9 | |
movdqa %xmm7, %xmm1 #229.9 | |
por %xmm0, %xmm4 #229.9 | |
pand %xmm10, %xmm1 #229.9 | |
movdqa %xmm11, %xmm0 #229.9 | |
pxor %xmm4, %xmm6 #229.9 | |
pxor %xmm1, %xmm0 #229.9 | |
movdqa %xmm5, %xmm1 #230.14 | |
pxor %xmm15, %xmm0 #229.9 | |
movdqa %xmm5, %xmm15 #230.14 | |
paddd %xmm0, %xmm6 #229.9 | |
movdqa %xmm5, %xmm0 #230.14 | |
movdqa %xmm5, %xmm4 #230.14 | |
psrld $17, %xmm15 #230.14 | |
pslld $15, %xmm0 #230.14 | |
psrld $19, %xmm1 #230.14 | |
pslld $13, %xmm4 #230.14 | |
paddd %xmm9, %xmm12 #229.9 | |
por %xmm0, %xmm15 #230.14 | |
por %xmm4, %xmm1 #230.14 | |
movdqa %xmm5, 1520(%rsp) #226.14 | |
paddd %xmm12, %xmm14 #229.9 | |
pxor %xmm1, %xmm15 #230.14 | |
psrld $10, %xmm5 #230.14 | |
paddd %xmm6, %xmm12 #229.9 | |
pxor %xmm5, %xmm15 #230.14 | |
movdqa 576(%rsp), %xmm6 #230.14 | |
movdqa %xmm14, %xmm0 #231.9 | |
movdqa %xmm14, %xmm1 #231.9 | |
paddd %xmm15, %xmm6 #230.14 | |
movdqa 704(%rsp), %xmm5 #230.14 | |
psrld $6, %xmm0 #231.9 | |
pslld $26, %xmm1 #231.9 | |
movdqa %xmm14, %xmm4 #231.9 | |
movdqa %xmm14, %xmm15 #231.9 | |
paddd %xmm6, %xmm5 #230.14 | |
por %xmm1, %xmm0 #231.9 | |
psrld $11, %xmm4 #231.9 | |
pslld $21, %xmm15 #231.9 | |
movdqa %xmm14, %xmm1 #231.9 | |
movdqa %xmm14, %xmm6 #231.9 | |
por %xmm15, %xmm4 #231.9 | |
psrld $25, %xmm1 #231.9 | |
pslld $7, %xmm6 #231.9 | |
pxor %xmm4, %xmm0 #231.9 | |
por %xmm6, %xmm1 #231.9 | |
pxor %xmm1, %xmm0 #231.9 | |
movdqa %xmm12, %xmm1 #231.9 | |
paddd %xmm0, %xmm13 #231.9 | |
movdqa %xmm14, %xmm0 #231.9 | |
movdqa %xmm14, 1584(%rsp) #229.9 | |
pand %xmm8, %xmm0 #231.9 | |
pandn %xmm3, %xmm14 #231.9 | |
movdqa %xmm12, %xmm4 #231.9 | |
movdqa %xmm3, 1488(%rsp) #225.9 | |
pxor %xmm14, %xmm0 #231.9 | |
movdqa .L_2il0floatpacket.6768(%rip), %xmm3 #231.9 | |
psrld $13, %xmm1 #231.9 | |
movdqa %xmm8, 1536(%rsp) #227.9 | |
paddd %xmm0, %xmm3 #231.9 | |
movdqa %xmm12, %xmm8 #231.9 | |
movdqa %xmm12, %xmm0 #231.9 | |
psrld $2, %xmm8 #231.9 | |
pslld $30, %xmm0 #231.9 | |
pslld $19, %xmm4 #231.9 | |
paddd %xmm3, %xmm13 #231.9 | |
movdqa %xmm12, %xmm3 #231.9 | |
por %xmm0, %xmm8 #231.9 | |
por %xmm4, %xmm1 #231.9 | |
paddd %xmm5, %xmm13 #231.9 | |
movdqa %xmm2, 1504(%rsp) #225.9 | |
pand %xmm7, %xmm3 #231.9 | |
movdqa %xmm7, 1552(%rsp) #227.9 | |
pxor %xmm1, %xmm8 #231.9 | |
movdqa %xmm12, 1600(%rsp) #229.9 | |
movdqa %xmm12, %xmm6 #231.9 | |
movdqa %xmm5, 1616(%rsp) #230.14 | |
movdqa %xmm12, %xmm5 #231.9 | |
pand %xmm2, %xmm12 #231.9 | |
movdqa %xmm9, %xmm2 #232.14 | |
movdqa %xmm9, %xmm7 #232.14 | |
movdqa %xmm9, %xmm0 #232.14 | |
movdqa %xmm9, %xmm1 #232.14 | |
psrld $22, %xmm5 #231.9 | |
pslld $10, %xmm6 #231.9 | |
psrld $17, %xmm2 #232.14 | |
pslld $15, %xmm7 #232.14 | |
psrld $19, %xmm0 #232.14 | |
pslld $13, %xmm1 #232.14 | |
por %xmm6, %xmm5 #231.9 | |
movdqa %xmm3, 1648(%rsp) #231.9 | |
pxor %xmm12, %xmm3 #231.9 | |
por %xmm7, %xmm2 #232.14 | |
por %xmm1, %xmm0 #232.14 | |
movdqa %xmm9, 1568(%rsp) #228.14 | |
paddd %xmm13, %xmm10 #231.9 | |
pxor %xmm5, %xmm8 #231.9 | |
pxor %xmm11, %xmm3 #231.9 | |
pxor %xmm0, %xmm2 #232.14 | |
psrld $10, %xmm9 #232.14 | |
movdqa 592(%rsp), %xmm0 #232.14 | |
paddd %xmm3, %xmm8 #231.9 | |
pxor %xmm9, %xmm2 #232.14 | |
movdqa %xmm10, %xmm3 #233.9 | |
movdqa 720(%rsp), %xmm1 #232.14 | |
paddd %xmm2, %xmm0 #232.14 | |
movdqa %xmm10, 1632(%rsp) #231.9 | |
psrld $6, %xmm3 #233.9 | |
pslld $26, %xmm10 #233.9 | |
paddd %xmm8, %xmm13 #231.9 | |
paddd %xmm0, %xmm1 #232.14 | |
por %xmm10, %xmm3 #233.9 | |
movdqa %xmm13, 1664(%rsp) #231.9 | |
movdqa %xmm1, 1680(%rsp) #232.14 | |
movdqa %xmm3, 1696(%rsp) #233.9 | |
# LOE | |
..B2.20: # Preds ..B2.21 | |
movdqa 1632(%rsp), %xmm11 #233.9 | |
movdqa %xmm11, %xmm12 #233.9 | |
movdqa %xmm11, %xmm6 #233.9 | |
psrld $11, %xmm12 #233.9 | |
pslld $21, %xmm6 #233.9 | |
movdqa %xmm11, %xmm1 #233.9 | |
movdqa %xmm11, %xmm14 #233.9 | |
movdqa 1696(%rsp), %xmm4 #233.9 | |
por %xmm6, %xmm12 #233.9 | |
psrld $25, %xmm1 #233.9 | |
pslld $7, %xmm14 #233.9 | |
pxor %xmm12, %xmm4 #233.9 | |
por %xmm14, %xmm1 #233.9 | |
movdqa 1488(%rsp), %xmm3 #233.9 | |
pxor %xmm1, %xmm4 #233.9 | |
movdqa 1536(%rsp), %xmm8 #233.9 | |
paddd %xmm4, %xmm3 #233.9 | |
movdqa 1584(%rsp), %xmm4 #233.9 | |
movdqa %xmm11, %xmm0 #233.9 | |
movdqa %xmm11, %xmm5 #233.9 | |
pand %xmm4, %xmm0 #233.9 | |
pandn %xmm8, %xmm5 #233.9 | |
movdqa 1664(%rsp), %xmm12 #233.9 | |
pxor %xmm5, %xmm0 #233.9 | |
movdqa .L_2il0floatpacket.6769(%rip), %xmm7 #233.9 | |
movdqa %xmm12, %xmm10 #233.9 | |
movdqa 1680(%rsp), %xmm15 #233.9 | |
paddd %xmm0, %xmm7 #233.9 | |
movdqa %xmm12, %xmm9 #233.9 | |
movdqa %xmm12, %xmm14 #233.9 | |
movdqa %xmm12, %xmm0 #233.9 | |
paddd %xmm7, %xmm3 #233.9 | |
movdqa %xmm15, %xmm2 #233.9 | |
psrld $2, %xmm10 #233.9 | |
pslld $30, %xmm9 #233.9 | |
psrld $13, %xmm14 #233.9 | |
pslld $19, %xmm0 #233.9 | |
paddd %xmm3, %xmm2 #233.9 | |
movdqa 1600(%rsp), %xmm6 #233.9 | |
movdqa %xmm12, %xmm13 #233.9 | |
por %xmm9, %xmm10 #233.9 | |
por %xmm0, %xmm14 #233.9 | |
movdqa %xmm12, %xmm5 #233.9 | |
movdqa %xmm12, %xmm3 #233.9 | |
movdqa 1552(%rsp), %xmm7 #233.9 | |
pand %xmm6, %xmm13 #233.9 | |
pxor %xmm14, %xmm10 #233.9 | |
psrld $22, %xmm5 #233.9 | |
pslld $10, %xmm3 #233.9 | |
movdqa %xmm12, %xmm14 #233.9 | |
por %xmm3, %xmm5 #233.9 | |
pand %xmm7, %xmm14 #233.9 | |
movdqa %xmm13, %xmm0 #233.9 | |
pxor %xmm5, %xmm10 #233.9 | |
movdqa 1648(%rsp), %xmm9 #233.9 | |
pxor %xmm14, %xmm0 #233.9 | |
movdqa 1616(%rsp), %xmm5 #234.14 | |
pxor %xmm0, %xmm9 #233.9 | |
movdqa %xmm5, %xmm14 #234.14 | |
movdqa %xmm5, %xmm0 #234.14 | |
psrld $17, %xmm14 #234.14 | |
pslld $15, %xmm0 #234.14 | |
por %xmm0, %xmm14 #234.14 | |
movdqa %xmm5, %xmm3 #234.14 | |
movdqa %xmm5, %xmm0 #234.14 | |
paddd %xmm9, %xmm10 #233.9 | |
movdqa 1504(%rsp), %xmm1 #233.9 | |
psrld $19, %xmm3 #234.14 | |
pslld $13, %xmm0 #234.14 | |
paddd %xmm2, %xmm1 #233.9 | |
paddd %xmm10, %xmm2 #233.9 | |
por %xmm0, %xmm3 #234.14 | |
movdqa %xmm5, %xmm10 #234.14 | |
pxor %xmm3, %xmm14 #234.14 | |
psrld $10, %xmm10 #234.14 | |
movdqa %xmm1, %xmm0 #235.9 | |
movdqa 1440(%rsp), %xmm9 #234.14 | |
pxor %xmm10, %xmm14 #234.14 | |
paddd %xmm14, %xmm9 #234.14 | |
movdqa %xmm1, %xmm14 #235.9 | |
psrld $6, %xmm14 #235.9 | |
pslld $26, %xmm0 #235.9 | |
por %xmm0, %xmm14 #235.9 | |
movdqa %xmm1, %xmm3 #235.9 | |
movdqa %xmm1, %xmm0 #235.9 | |
psrld $11, %xmm3 #235.9 | |
movdqa 736(%rsp), %xmm10 #234.14 | |
pslld $21, %xmm0 #235.9 | |
paddd %xmm9, %xmm10 #234.14 | |
por %xmm0, %xmm3 #235.9 | |
movdqa %xmm1, %xmm9 #235.9 | |
movdqa %xmm1, %xmm0 #235.9 | |
psrld $25, %xmm9 #235.9 | |
pslld $7, %xmm0 #235.9 | |
pxor %xmm3, %xmm14 #235.9 | |
por %xmm0, %xmm9 #235.9 | |
movdqa %xmm11, %xmm0 #235.9 | |
movdqa %xmm1, %xmm3 #235.9 | |
pxor %xmm9, %xmm14 #235.9 | |
pand %xmm1, %xmm0 #235.9 | |
pandn %xmm4, %xmm3 #235.9 | |
paddd %xmm14, %xmm8 #235.9 | |
pxor %xmm3, %xmm0 #235.9 | |
movdqa %xmm2, %xmm3 #235.9 | |
movdqa %xmm2, %xmm14 #235.9 | |
psrld $2, %xmm3 #235.9 | |
movdqa .L_2il0floatpacket.6770(%rip), %xmm9 #235.9 | |
pslld $30, %xmm14 #235.9 | |
paddd %xmm0, %xmm9 #235.9 | |
por %xmm14, %xmm3 #235.9 | |
movdqa %xmm2, %xmm14 #235.9 | |
movdqa %xmm2, %xmm0 #235.9 | |
psrld $13, %xmm14 #235.9 | |
pslld $19, %xmm0 #235.9 | |
por %xmm0, %xmm14 #235.9 | |
movdqa %xmm2, %xmm0 #235.9 | |
pxor %xmm14, %xmm3 #235.9 | |
movdqa %xmm2, %xmm14 #235.9 | |
paddd %xmm9, %xmm8 #235.9 | |
movdqa %xmm12, %xmm9 #235.9 | |
psrld $22, %xmm0 #235.9 | |
pslld $10, %xmm14 #235.9 | |
pand %xmm2, %xmm9 #235.9 | |
por %xmm14, %xmm0 #235.9 | |
movdqa %xmm6, %xmm14 #235.9 | |
pxor %xmm0, %xmm3 #235.9 | |
pand %xmm2, %xmm14 #235.9 | |
movdqa %xmm9, %xmm0 #235.9 | |
pxor %xmm14, %xmm0 #235.9 | |
paddd %xmm10, %xmm8 #235.9 | |
pxor %xmm13, %xmm0 #235.9 | |
paddd %xmm8, %xmm7 #235.9 | |
paddd %xmm0, %xmm3 #235.9 | |
movdqa %xmm15, %xmm0 #236.15 | |
paddd %xmm3, %xmm8 #235.9 | |
movdqa %xmm15, %xmm13 #236.15 | |
movdqa %xmm15, %xmm14 #236.15 | |
movdqa %xmm15, %xmm3 #236.15 | |
psrld $17, %xmm0 #236.15 | |
pslld $15, %xmm13 #236.15 | |
psrld $19, %xmm14 #236.15 | |
pslld $13, %xmm3 #236.15 | |
por %xmm13, %xmm0 #236.15 | |
por %xmm3, %xmm14 #236.15 | |
pxor %xmm14, %xmm0 #236.15 | |
psrld $10, %xmm15 #236.15 | |
movdqa 1456(%rsp), %xmm14 #236.15 | |
pxor %xmm15, %xmm0 #236.15 | |
paddd %xmm0, %xmm14 #236.15 | |
movdqa %xmm7, %xmm13 #237.9 | |
movdqa 752(%rsp), %xmm0 #236.15 | |
movdqa %xmm7, %xmm3 #237.9 | |
paddd %xmm14, %xmm0 #236.15 | |
movdqa %xmm7, %xmm14 #237.9 | |
movdqa %xmm7, %xmm15 #237.9 | |
psrld $6, %xmm13 #237.9 | |
pslld $26, %xmm3 #237.9 | |
psrld $11, %xmm14 #237.9 | |
pslld $21, %xmm15 #237.9 | |
por %xmm3, %xmm13 #237.9 | |
por %xmm15, %xmm14 #237.9 | |
movdqa %xmm7, %xmm3 #237.9 | |
pxor %xmm14, %xmm13 #237.9 | |
movdqa %xmm7, %xmm14 #237.9 | |
psrld $25, %xmm3 #237.9 | |
pslld $7, %xmm14 #237.9 | |
por %xmm14, %xmm3 #237.9 | |
movdqa %xmm7, %xmm14 #237.9 | |
pxor %xmm3, %xmm13 #237.9 | |
movdqa %xmm7, %xmm3 #237.9 | |
pand %xmm1, %xmm14 #237.9 | |
pandn %xmm11, %xmm3 #237.9 | |
movdqa .L_2il0floatpacket.6771(%rip), %xmm15 #237.9 | |
pxor %xmm3, %xmm14 #237.9 | |
paddd %xmm13, %xmm4 #237.9 | |
paddd %xmm14, %xmm15 #237.9 | |
paddd %xmm15, %xmm4 #237.9 | |
movdqa %xmm8, %xmm15 #237.9 | |
movdqa %xmm8, %xmm3 #237.9 | |
psrld $2, %xmm15 #237.9 | |
pslld $30, %xmm3 #237.9 | |
movdqa %xmm8, %xmm14 #237.9 | |
por %xmm3, %xmm15 #237.9 | |
movdqa %xmm8, %xmm3 #237.9 | |
psrld $13, %xmm14 #237.9 | |
pslld $19, %xmm3 #237.9 | |
por %xmm3, %xmm14 #237.9 | |
movdqa %xmm8, %xmm3 #237.9 | |
pxor %xmm14, %xmm15 #237.9 | |
movdqa %xmm8, %xmm14 #237.9 | |
movdqa %xmm8, %xmm13 #237.9 | |
psrld $22, %xmm14 #237.9 | |
pslld $10, %xmm3 #237.9 | |
pand %xmm2, %xmm13 #237.9 | |
por %xmm3, %xmm14 #237.9 | |
movdqa %xmm12, %xmm3 #237.9 | |
pxor %xmm14, %xmm15 #237.9 | |
pand %xmm8, %xmm3 #237.9 | |
movdqa %xmm13, %xmm14 #237.9 | |
paddd %xmm0, %xmm4 #237.9 | |
pxor %xmm3, %xmm14 #237.9 | |
paddd %xmm4, %xmm6 #237.9 | |
pxor %xmm9, %xmm14 #237.9 | |
movdqa %xmm10, %xmm3 #238.15 | |
paddd %xmm14, %xmm15 #237.9 | |
movdqa %xmm10, %xmm9 #238.15 | |
paddd %xmm15, %xmm4 #237.9 | |
movdqa %xmm10, %xmm14 #238.15 | |
movdqa %xmm10, %xmm15 #238.15 | |
psrld $17, %xmm3 #238.15 | |
pslld $15, %xmm9 #238.15 | |
psrld $19, %xmm14 #238.15 | |
pslld $13, %xmm15 #238.15 | |
por %xmm9, %xmm3 #238.15 | |
por %xmm15, %xmm14 #238.15 | |
movdqa %xmm6, %xmm15 #239.9 | |
movdqa %xmm10, 1712(%rsp) #234.14 | |
pxor %xmm14, %xmm3 #238.15 | |
psrld $10, %xmm10 #238.15 | |
pslld $26, %xmm15 #239.9 | |
movdqa 1472(%rsp), %xmm14 #238.15 | |
pxor %xmm10, %xmm3 #238.15 | |
paddd %xmm3, %xmm14 #238.15 | |
movdqa %xmm6, %xmm9 #239.9 | |
movdqa 16(%rsp), %xmm3 #238.15 | |
psrld $11, %xmm9 #239.9 | |
paddd %xmm14, %xmm3 #238.15 | |
movdqa %xmm6, %xmm14 #239.9 | |
psrld $6, %xmm14 #239.9 | |
movdqa %xmm6, %xmm10 #239.9 | |
por %xmm15, %xmm14 #239.9 | |
movdqa %xmm6, %xmm15 #239.9 | |
pslld $21, %xmm15 #239.9 | |
psrld $25, %xmm10 #239.9 | |
por %xmm15, %xmm9 #239.9 | |
movdqa %xmm6, %xmm15 #239.9 | |
pslld $7, %xmm15 #239.9 | |
pxor %xmm9, %xmm14 #239.9 | |
por %xmm15, %xmm10 #239.9 | |
movdqa %xmm6, %xmm15 #239.9 | |
pxor %xmm10, %xmm14 #239.9 | |
movdqa %xmm6, %xmm10 #239.9 | |
pand %xmm7, %xmm15 #239.9 | |
pandn %xmm1, %xmm10 #239.9 | |
movdqa .L_2il0floatpacket.6772(%rip), %xmm9 #239.9 | |
pxor %xmm10, %xmm15 #239.9 | |
paddd %xmm14, %xmm11 #239.9 | |
paddd %xmm15, %xmm9 #239.9 | |
paddd %xmm9, %xmm11 #239.9 | |
movdqa %xmm4, %xmm9 #239.9 | |
movdqa %xmm4, %xmm10 #239.9 | |
psrld $2, %xmm9 #239.9 | |
pslld $30, %xmm10 #239.9 | |
movdqa %xmm4, %xmm15 #239.9 | |
por %xmm10, %xmm9 #239.9 | |
movdqa %xmm4, %xmm10 #239.9 | |
psrld $13, %xmm10 #239.9 | |
pslld $19, %xmm15 #239.9 | |
por %xmm15, %xmm10 #239.9 | |
movdqa %xmm4, %xmm15 #239.9 | |
pxor %xmm10, %xmm9 #239.9 | |
movdqa %xmm4, %xmm10 #239.9 | |
movdqa %xmm4, %xmm14 #239.9 | |
psrld $22, %xmm15 #239.9 | |
pslld $10, %xmm10 #239.9 | |
pand %xmm8, %xmm14 #239.9 | |
por %xmm10, %xmm15 #239.9 | |
movdqa %xmm4, %xmm10 #239.9 | |
pxor %xmm15, %xmm9 #239.9 | |
pand %xmm2, %xmm10 #239.9 | |
movdqa %xmm14, %xmm15 #239.9 | |
paddd %xmm3, %xmm11 #239.9 | |
pxor %xmm10, %xmm15 #239.9 | |
paddd %xmm11, %xmm12 #239.9 | |
pxor %xmm13, %xmm15 #239.9 | |
movdqa %xmm0, %xmm10 #240.15 | |
paddd %xmm15, %xmm9 #239.9 | |
movdqa %xmm0, %xmm13 #240.15 | |
paddd %xmm9, %xmm11 #239.9 | |
movdqa %xmm0, %xmm15 #240.15 | |
movdqa %xmm0, %xmm9 #240.15 | |
psrld $17, %xmm10 #240.15 | |
pslld $15, %xmm13 #240.15 | |
psrld $19, %xmm15 #240.15 | |
pslld $13, %xmm9 #240.15 | |
por %xmm13, %xmm10 #240.15 | |
por %xmm9, %xmm15 #240.15 | |
movdqa %xmm12, %xmm13 #241.9 | |
movdqa %xmm0, 1728(%rsp) #236.15 | |
pxor %xmm15, %xmm10 #240.15 | |
psrld $10, %xmm0 #240.15 | |
movdqa %xmm12, %xmm9 #241.9 | |
movdqa 1520(%rsp), %xmm15 #240.15 | |
pxor %xmm0, %xmm10 #240.15 | |
movdqa 32(%rsp), %xmm0 #240.15 | |
paddd %xmm10, %xmm15 #240.15 | |
paddd %xmm15, %xmm0 #240.15 | |
movdqa %xmm12, %xmm15 #241.9 | |
movdqa %xmm12, %xmm10 #241.9 | |
psrld $6, %xmm15 #241.9 | |
pslld $26, %xmm13 #241.9 | |
psrld $11, %xmm9 #241.9 | |
pslld $21, %xmm10 #241.9 | |
por %xmm13, %xmm15 #241.9 | |
por %xmm10, %xmm9 #241.9 | |
movdqa %xmm12, %xmm13 #241.9 | |
movdqa %xmm12, %xmm10 #241.9 | |
psrld $25, %xmm13 #241.9 | |
pslld $7, %xmm10 #241.9 | |
pxor %xmm9, %xmm15 #241.9 | |
por %xmm10, %xmm13 #241.9 | |
movdqa %xmm12, %xmm10 #241.9 | |
pxor %xmm13, %xmm15 #241.9 | |
movdqa %xmm12, %xmm13 #241.9 | |
pand %xmm6, %xmm10 #241.9 | |
pandn %xmm7, %xmm13 #241.9 | |
movdqa .L_2il0floatpacket.6773(%rip), %xmm9 #241.9 | |
pxor %xmm13, %xmm10 #241.9 | |
paddd %xmm10, %xmm9 #241.9 | |
movdqa %xmm11, %xmm10 #241.9 | |
movdqa %xmm11, %xmm13 #241.9 | |
paddd %xmm15, %xmm1 #241.9 | |
psrld $2, %xmm10 #241.9 | |
pslld $30, %xmm13 #241.9 | |
paddd %xmm9, %xmm1 #241.9 | |
por %xmm13, %xmm10 #241.9 | |
movdqa %xmm11, %xmm13 #241.9 | |
movdqa %xmm11, %xmm9 #241.9 | |
psrld $13, %xmm13 #241.9 | |
pslld $19, %xmm9 #241.9 | |
por %xmm9, %xmm13 #241.9 | |
movdqa %xmm11, %xmm9 #241.9 | |
pxor %xmm13, %xmm10 #241.9 | |
movdqa %xmm11, %xmm13 #241.9 | |
movdqa %xmm11, %xmm15 #241.9 | |
psrld $22, %xmm13 #241.9 | |
pslld $10, %xmm9 #241.9 | |
pand %xmm4, %xmm15 #241.9 | |
por %xmm9, %xmm13 #241.9 | |
movdqa %xmm11, %xmm9 #241.9 | |
pxor %xmm13, %xmm10 #241.9 | |
pand %xmm8, %xmm9 #241.9 | |
movdqa %xmm15, %xmm13 #241.9 | |
paddd %xmm0, %xmm1 #241.9 | |
pxor %xmm9, %xmm13 #241.9 | |
paddd %xmm1, %xmm2 #241.9 | |
pxor %xmm14, %xmm13 #241.9 | |
movdqa %xmm3, %xmm14 #242.15 | |
paddd %xmm13, %xmm10 #241.9 | |
movdqa %xmm3, %xmm13 #242.15 | |
paddd %xmm10, %xmm1 #241.9 | |
movdqa %xmm3, %xmm9 #242.15 | |
movdqa %xmm3, %xmm10 #242.15 | |
psrld $17, %xmm14 #242.15 | |
pslld $15, %xmm13 #242.15 | |
psrld $19, %xmm9 #242.15 | |
pslld $13, %xmm10 #242.15 | |
por %xmm13, %xmm14 #242.15 | |
por %xmm10, %xmm9 #242.15 | |
movdqa %xmm2, %xmm13 #243.9 | |
movdqa %xmm3, 1744(%rsp) #238.15 | |
pxor %xmm9, %xmm14 #242.15 | |
psrld $10, %xmm3 #242.15 | |
movdqa %xmm2, %xmm9 #243.9 | |
movdqa 1568(%rsp), %xmm10 #242.15 | |
pxor %xmm3, %xmm14 #242.15 | |
paddd %xmm14, %xmm10 #242.15 | |
movdqa %xmm2, %xmm3 #243.9 | |
movdqa 768(%rsp), %xmm14 #242.15 | |
psrld $6, %xmm3 #243.9 | |
paddd %xmm10, %xmm14 #242.15 | |
movdqa %xmm2, %xmm10 #243.9 | |
pslld $26, %xmm13 #243.9 | |
psrld $11, %xmm10 #243.9 | |
pslld $21, %xmm9 #243.9 | |
por %xmm13, %xmm3 #243.9 | |
por %xmm9, %xmm10 #243.9 | |
movdqa %xmm2, %xmm13 #243.9 | |
pxor %xmm10, %xmm3 #243.9 | |
movdqa %xmm2, %xmm10 #243.9 | |
psrld $25, %xmm13 #243.9 | |
pslld $7, %xmm10 #243.9 | |
por %xmm10, %xmm13 #243.9 | |
movdqa %xmm2, %xmm10 #243.9 | |
pxor %xmm13, %xmm3 #243.9 | |
movdqa %xmm2, %xmm13 #243.9 | |
pand %xmm12, %xmm10 #243.9 | |
pandn %xmm6, %xmm13 #243.9 | |
movdqa .L_2il0floatpacket.6774(%rip), %xmm9 #243.9 | |
pxor %xmm13, %xmm10 #243.9 | |
paddd %xmm10, %xmm9 #243.9 | |
movdqa %xmm1, %xmm13 #243.9 | |
movdqa %xmm1, %xmm10 #243.9 | |
paddd %xmm3, %xmm7 #243.9 | |
psrld $2, %xmm13 #243.9 | |
pslld $30, %xmm10 #243.9 | |
paddd %xmm9, %xmm7 #243.9 | |
por %xmm10, %xmm13 #243.9 | |
movdqa %xmm1, %xmm9 #243.9 | |
movdqa %xmm1, %xmm10 #243.9 | |
psrld $13, %xmm9 #243.9 | |
pslld $19, %xmm10 #243.9 | |
por %xmm10, %xmm9 #243.9 | |
movdqa %xmm1, %xmm10 #243.9 | |
pxor %xmm9, %xmm13 #243.9 | |
movdqa %xmm1, %xmm9 #243.9 | |
movdqa %xmm1, %xmm3 #243.9 | |
psrld $22, %xmm10 #243.9 | |
pslld $10, %xmm9 #243.9 | |
pand %xmm11, %xmm3 #243.9 | |
por %xmm9, %xmm10 #243.9 | |
movdqa %xmm1, %xmm9 #243.9 | |
pxor %xmm10, %xmm13 #243.9 | |
pand %xmm4, %xmm9 #243.9 | |
movdqa %xmm3, %xmm10 #243.9 | |
paddd %xmm14, %xmm7 #243.9 | |
pxor %xmm9, %xmm10 #243.9 | |
paddd %xmm7, %xmm8 #243.9 | |
pxor %xmm15, %xmm10 #243.9 | |
movdqa %xmm0, %xmm15 #244.15 | |
paddd %xmm10, %xmm13 #243.9 | |
movdqa %xmm0, %xmm10 #244.15 | |
paddd %xmm13, %xmm7 #243.9 | |
movdqa %xmm0, %xmm13 #244.15 | |
movdqa %xmm0, %xmm9 #244.15 | |
psrld $17, %xmm13 #244.15 | |
pslld $15, %xmm15 #244.15 | |
psrld $19, %xmm10 #244.15 | |
pslld $13, %xmm9 #244.15 | |
por %xmm15, %xmm13 #244.15 | |
por %xmm9, %xmm10 #244.15 | |
movdqa %xmm8, %xmm15 #245.9 | |
movdqa %xmm0, 1760(%rsp) #240.15 | |
pxor %xmm10, %xmm13 #244.15 | |
psrld $10, %xmm0 #244.15 | |
movdqa %xmm8, %xmm10 #245.9 | |
pxor %xmm0, %xmm13 #244.15 | |
psrld $6, %xmm15 #245.9 | |
movdqa 784(%rsp), %xmm0 #244.15 | |
paddd %xmm13, %xmm5 #244.15 | |
pslld $26, %xmm10 #245.9 | |
movdqa %xmm8, %xmm9 #245.9 | |
movdqa %xmm8, %xmm13 #245.9 | |
paddd %xmm5, %xmm0 #244.15 | |
por %xmm10, %xmm15 #245.9 | |
psrld $11, %xmm9 #245.9 | |
pslld $21, %xmm13 #245.9 | |
movdqa %xmm8, %xmm5 #245.9 | |
movdqa %xmm8, %xmm10 #245.9 | |
por %xmm13, %xmm9 #245.9 | |
psrld $25, %xmm5 #245.9 | |
pslld $7, %xmm10 #245.9 | |
pxor %xmm9, %xmm15 #245.9 | |
por %xmm10, %xmm5 #245.9 | |
pxor %xmm5, %xmm15 #245.9 | |
movdqa %xmm8, %xmm5 #245.9 | |
movdqa %xmm8, %xmm9 #245.9 | |
pand %xmm2, %xmm5 #245.9 | |
pandn %xmm12, %xmm9 #245.9 | |
paddd %xmm15, %xmm6 #245.9 | |
movdqa .L_2il0floatpacket.6775(%rip), %xmm15 #245.9 | |
pxor %xmm9, %xmm5 #245.9 | |
paddd %xmm5, %xmm15 #245.9 | |
movdqa %xmm7, %xmm10 #245.9 | |
paddd %xmm15, %xmm6 #245.9 | |
movdqa %xmm7, %xmm13 #245.9 | |
movdqa %xmm7, %xmm15 #245.9 | |
movdqa %xmm7, %xmm9 #245.9 | |
psrld $2, %xmm10 #245.9 | |
pslld $30, %xmm13 #245.9 | |
psrld $13, %xmm15 #245.9 | |
pslld $19, %xmm9 #245.9 | |
por %xmm13, %xmm10 #245.9 | |
por %xmm9, %xmm15 #245.9 | |
movdqa %xmm7, %xmm5 #245.9 | |
pxor %xmm15, %xmm10 #245.9 | |
movdqa %xmm7, %xmm13 #245.9 | |
movdqa %xmm7, %xmm15 #245.9 | |
pand %xmm1, %xmm5 #245.9 | |
psrld $22, %xmm13 #245.9 | |
pslld $10, %xmm15 #245.9 | |
movdqa %xmm7, %xmm9 #245.9 | |
por %xmm15, %xmm13 #245.9 | |
pand %xmm11, %xmm9 #245.9 | |
movdqa %xmm5, %xmm15 #245.9 | |
pxor %xmm13, %xmm10 #245.9 | |
pxor %xmm9, %xmm15 #245.9 | |
paddd %xmm0, %xmm6 #245.9 | |
pxor %xmm3, %xmm15 #245.9 | |
paddd %xmm6, %xmm4 #245.9 | |
paddd %xmm15, %xmm10 #245.9 | |
movdqa %xmm14, %xmm3 #246.15 | |
paddd %xmm10, %xmm6 #245.9 | |
movdqa %xmm14, %xmm15 #246.15 | |
movdqa %xmm14, %xmm9 #246.15 | |
movdqa %xmm14, %xmm10 #246.15 | |
psrld $17, %xmm3 #246.15 | |
pslld $15, %xmm15 #246.15 | |
psrld $19, %xmm9 #246.15 | |
pslld $13, %xmm10 #246.15 | |
por %xmm15, %xmm3 #246.15 | |
por %xmm10, %xmm9 #246.15 | |
movdqa %xmm14, 1776(%rsp) #242.15 | |
pxor %xmm9, %xmm3 #246.15 | |
psrld $10, %xmm14 #246.15 | |
movdqa %xmm4, %xmm10 #247.9 | |
movdqa 1680(%rsp), %xmm9 #246.15 | |
pxor %xmm14, %xmm3 #246.15 | |
paddd %xmm3, %xmm9 #246.15 | |
movdqa %xmm4, %xmm13 #247.9 | |
movdqa %xmm4, %xmm14 #247.9 | |
movdqa %xmm4, %xmm3 #247.9 | |
psrld $6, %xmm10 #247.9 | |
pslld $26, %xmm13 #247.9 | |
psrld $11, %xmm14 #247.9 | |
pslld $21, %xmm3 #247.9 | |
movdqa 800(%rsp), %xmm15 #246.15 | |
por %xmm13, %xmm10 #247.9 | |
por %xmm3, %xmm14 #247.9 | |
paddd %xmm9, %xmm15 #246.15 | |
pxor %xmm14, %xmm10 #247.9 | |
movdqa %xmm4, %xmm9 #247.9 | |
movdqa %xmm4, %xmm14 #247.9 | |
psrld $25, %xmm9 #247.9 | |
pslld $7, %xmm14 #247.9 | |
movdqa %xmm4, %xmm3 #247.9 | |
por %xmm14, %xmm9 #247.9 | |
pand %xmm8, %xmm3 #247.9 | |
pxor %xmm9, %xmm10 #247.9 | |
movdqa %xmm4, %xmm9 #247.9 | |
pandn %xmm2, %xmm9 #247.9 | |
paddd %xmm10, %xmm12 #247.9 | |
movdqa .L_2il0floatpacket.6776(%rip), %xmm14 #247.9 | |
pxor %xmm9, %xmm3 #247.9 | |
paddd %xmm3, %xmm14 #247.9 | |
movdqa %xmm6, %xmm3 #247.9 | |
paddd %xmm14, %xmm12 #247.9 | |
pand %xmm7, %xmm3 #247.9 | |
movdqa %xmm7, 1808(%rsp) #243.9 | |
paddd %xmm15, %xmm12 #247.9 | |
movdqa %xmm15, 1872(%rsp) #246.15 | |
movdqa %xmm6, %xmm7 #247.9 | |
movdqa %xmm6, %xmm15 #247.9 | |
movdqa %xmm6, %xmm14 #247.9 | |
movdqa %xmm6, %xmm9 #247.9 | |
psrld $2, %xmm7 #247.9 | |
pslld $30, %xmm15 #247.9 | |
psrld $13, %xmm14 #247.9 | |
pslld $19, %xmm9 #247.9 | |
por %xmm15, %xmm7 #247.9 | |
por %xmm9, %xmm14 #247.9 | |
movdqa %xmm6, %xmm10 #247.9 | |
pxor %xmm14, %xmm7 #247.9 | |
movdqa %xmm6, %xmm13 #247.9 | |
movdqa %xmm6, %xmm14 #247.9 | |
psrld $22, %xmm10 #247.9 | |
pslld $10, %xmm13 #247.9 | |
pand %xmm1, %xmm14 #247.9 | |
movdqa %xmm3, 1904(%rsp) #247.9 | |
por %xmm13, %xmm10 #247.9 | |
pxor %xmm14, %xmm3 #247.9 | |
pxor %xmm10, %xmm7 #247.9 | |
pxor %xmm5, %xmm3 #247.9 | |
paddd %xmm12, %xmm11 #247.9 | |
paddd %xmm3, %xmm7 #247.9 | |
movdqa %xmm0, %xmm5 #249.14 | |
paddd %xmm7, %xmm12 #247.9 | |
movdqa %xmm0, %xmm14 #249.14 | |
movdqa %xmm0, %xmm3 #249.14 | |
movdqa %xmm0, %xmm7 #249.14 | |
psrld $17, %xmm5 #249.14 | |
pslld $15, %xmm14 #249.14 | |
psrld $19, %xmm3 #249.14 | |
pslld $13, %xmm7 #249.14 | |
por %xmm14, %xmm5 #249.14 | |
por %xmm7, %xmm3 #249.14 | |
pxor %xmm3, %xmm5 #249.14 | |
movdqa %xmm11, %xmm3 #250.9 | |
movdqa %xmm11, %xmm7 #250.9 | |
movdqa %xmm11, %xmm9 #250.9 | |
movdqa %xmm11, %xmm10 #250.9 | |
psrld $6, %xmm3 #250.9 | |
movdqa %xmm0, 1824(%rsp) #244.15 | |
psrld $10, %xmm0 #249.14 | |
pslld $26, %xmm7 #250.9 | |
psrld $11, %xmm9 #250.9 | |
pslld $21, %xmm10 #250.9 | |
movdqa %xmm11, %xmm13 #250.9 | |
movdqa %xmm11, %xmm15 #250.9 | |
pxor %xmm0, %xmm5 #249.14 | |
por %xmm7, %xmm3 #250.9 | |
por %xmm10, %xmm9 #250.9 | |
psrld $25, %xmm13 #250.9 | |
pslld $7, %xmm15 #250.9 | |
movdqa %xmm11, %xmm0 #250.9 | |
pxor %xmm9, %xmm3 #250.9 | |
movdqa %xmm11, 1888(%rsp) #247.9 | |
por %xmm15, %xmm13 #250.9 | |
pand %xmm4, %xmm0 #250.9 | |
pandn %xmm8, %xmm11 #250.9 | |
movdqa %xmm4, 1840(%rsp) #245.9 | |
pxor %xmm13, %xmm3 #250.9 | |
movdqa .L_2il0floatpacket.6777(%rip), %xmm4 #250.9 | |
pxor %xmm11, %xmm0 #250.9 | |
paddd 1712(%rsp), %xmm5 #249.14 | |
paddd %xmm3, %xmm2 #250.9 | |
movdqa 816(%rsp), %xmm14 #249.14 | |
paddd %xmm0, %xmm4 #250.9 | |
paddd %xmm5, %xmm14 #249.14 | |
paddd %xmm4, %xmm2 #250.9 | |
paddd %xmm14, %xmm2 #250.9 | |
movdqa %xmm12, %xmm0 #250.9 | |
paddd %xmm2, %xmm1 #250.9 | |
psrld $2, %xmm0 #250.9 | |
movdqa %xmm2, 1952(%rsp) #250.9 | |
movdqa %xmm12, %xmm2 #250.9 | |
movdqa %xmm1, 1968(%rsp) #250.9 | |
movdqa %xmm12, %xmm1 #250.9 | |
movdqa %xmm8, 1792(%rsp) #243.9 | |
movdqa %xmm12, %xmm8 #250.9 | |
movdqa %xmm12, 1920(%rsp) #247.9 | |
pslld $30, %xmm1 #250.9 | |
psrld $13, %xmm2 #250.9 | |
pslld $19, %xmm12 #250.9 | |
por %xmm1, %xmm0 #250.9 | |
por %xmm12, %xmm2 #250.9 | |
pand %xmm6, %xmm8 #250.9 | |
pxor %xmm2, %xmm0 #250.9 | |
movdqa %xmm6, 1856(%rsp) #245.9 | |
movdqa %xmm14, 1936(%rsp) #249.14 | |
movdqa %xmm8, 1984(%rsp) #250.9 | |
movdqa %xmm0, 2000(%rsp) #250.9 | |
# LOE | |
..B2.19: # Preds ..B2.20 | |
movdqa 1920(%rsp), %xmm2 #250.9 | |
movdqa 1808(%rsp), %xmm6 #250.9 | |
movdqa %xmm2, %xmm12 #250.9 | |
movdqa 1984(%rsp), %xmm3 #250.9 | |
movdqa %xmm2, %xmm7 #250.9 | |
movdqa %xmm2, %xmm1 #250.9 | |
psrld $22, %xmm12 #250.9 | |
pslld $10, %xmm7 #250.9 | |
pand %xmm6, %xmm1 #250.9 | |
movdqa %xmm3, %xmm4 #250.9 | |
por %xmm7, %xmm12 #250.9 | |
movdqa 2000(%rsp), %xmm5 #250.9 | |
pxor %xmm1, %xmm4 #250.9 | |
movdqa 1904(%rsp), %xmm15 #250.9 | |
pxor %xmm12, %xmm5 #250.9 | |
pxor %xmm4, %xmm15 #250.9 | |
movdqa 1952(%rsp), %xmm12 #250.9 | |
paddd %xmm15, %xmm5 #250.9 | |
movdqa 1440(%rsp), %xmm9 #251.14 | |
paddd %xmm5, %xmm12 #250.9 | |
movdqa 1872(%rsp), %xmm10 #251.14 | |
movdqa %xmm9, %xmm7 #251.14 | |
movdqa %xmm9, %xmm1 #251.14 | |
movdqa %xmm9, %xmm4 #251.14 | |
movdqa %xmm9, %xmm5 #251.14 | |
movdqa %xmm10, %xmm13 #251.14 | |
movdqa %xmm10, %xmm0 #251.14 | |
movdqa %xmm10, %xmm11 #251.14 | |
movdqa %xmm10, %xmm14 #251.14 | |
psrld $7, %xmm7 #251.14 | |
pslld $25, %xmm1 #251.14 | |
psrld $18, %xmm4 #251.14 | |
pslld $14, %xmm5 #251.14 | |
psrld $17, %xmm13 #251.14 | |
pslld $15, %xmm0 #251.14 | |
psrld $19, %xmm11 #251.14 | |
pslld $13, %xmm14 #251.14 | |
por %xmm1, %xmm7 #251.14 | |
por %xmm5, %xmm4 #251.14 | |
movdqa %xmm9, %xmm15 #251.14 | |
por %xmm0, %xmm13 #251.14 | |
por %xmm14, %xmm11 #251.14 | |
pxor %xmm4, %xmm7 #251.14 | |
psrld $3, %xmm15 #251.14 | |
movdqa 592(%rsp), %xmm0 #251.14 | |
pxor %xmm11, %xmm13 #251.14 | |
movdqa 1968(%rsp), %xmm11 #252.9 | |
pxor %xmm15, %xmm7 #251.14 | |
paddd %xmm7, %xmm0 #251.14 | |
movdqa %xmm11, %xmm14 #252.9 | |
movdqa %xmm11, %xmm7 #252.9 | |
movdqa %xmm11, %xmm1 #252.9 | |
movdqa %xmm11, %xmm4 #252.9 | |
psrld $10, %xmm10 #251.14 | |
psrld $6, %xmm14 #252.9 | |
pslld $26, %xmm7 #252.9 | |
psrld $11, %xmm1 #252.9 | |
pslld $21, %xmm4 #252.9 | |
movdqa %xmm11, %xmm5 #252.9 | |
movdqa %xmm11, %xmm15 #252.9 | |
movdqa 1728(%rsp), %xmm8 #251.14 | |
pxor %xmm10, %xmm13 #251.14 | |
por %xmm7, %xmm14 #252.9 | |
por %xmm4, %xmm1 #252.9 | |
psrld $25, %xmm5 #252.9 | |
pslld $7, %xmm15 #252.9 | |
paddd %xmm13, %xmm8 #251.14 | |
pxor %xmm1, %xmm14 #252.9 | |
por %xmm15, %xmm5 #252.9 | |
paddd %xmm0, %xmm8 #251.14 | |
movdqa 1888(%rsp), %xmm1 #252.9 | |
pxor %xmm5, %xmm14 #252.9 | |
movdqa 1840(%rsp), %xmm5 #252.9 | |
movdqa %xmm11, %xmm13 #252.9 | |
movdqa %xmm11, %xmm0 #252.9 | |
pand %xmm1, %xmm13 #252.9 | |
pandn %xmm5, %xmm0 #252.9 | |
movdqa %xmm12, %xmm7 #252.9 | |
movdqa 1792(%rsp), %xmm4 #252.9 | |
pxor %xmm0, %xmm13 #252.9 | |
movdqa .L_2il0floatpacket.6778(%rip), %xmm10 #252.9 | |
movdqa %xmm12, %xmm0 #252.9 | |
paddd %xmm14, %xmm4 #252.9 | |
paddd %xmm13, %xmm10 #252.9 | |
psrld $2, %xmm0 #252.9 | |
pslld $30, %xmm7 #252.9 | |
movdqa %xmm12, %xmm15 #252.9 | |
movdqa %xmm12, %xmm13 #252.9 | |
paddd %xmm10, %xmm4 #252.9 | |
por %xmm7, %xmm0 #252.9 | |
psrld $13, %xmm15 #252.9 | |
pslld $19, %xmm13 #252.9 | |
movdqa %xmm12, %xmm7 #252.9 | |
movdqa %xmm12, %xmm10 #252.9 | |
por %xmm13, %xmm15 #252.9 | |
psrld $22, %xmm7 #252.9 | |
pslld $10, %xmm10 #252.9 | |
pxor %xmm15, %xmm0 #252.9 | |
por %xmm10, %xmm7 #252.9 | |
movdqa %xmm2, %xmm14 #252.9 | |
pxor %xmm7, %xmm0 #252.9 | |
pand %xmm12, %xmm14 #252.9 | |
movdqa 1856(%rsp), %xmm7 #252.9 | |
movdqa %xmm14, %xmm13 #252.9 | |
movdqa %xmm7, %xmm15 #252.9 | |
paddd %xmm8, %xmm4 #252.9 | |
pand %xmm12, %xmm15 #252.9 | |
paddd %xmm4, %xmm6 #252.9 | |
pxor %xmm15, %xmm13 #252.9 | |
pxor %xmm13, %xmm3 #252.9 | |
movdqa 1936(%rsp), %xmm15 #253.14 | |
paddd %xmm3, %xmm0 #252.9 | |
paddd %xmm0, %xmm4 #252.9 | |
movdqa %xmm15, %xmm3 #253.14 | |
movdqa %xmm15, %xmm10 #253.14 | |
movdqa %xmm15, %xmm13 #253.14 | |
movdqa %xmm15, %xmm0 #253.14 | |
psrld $17, %xmm3 #253.14 | |
pslld $15, %xmm10 #253.14 | |
psrld $19, %xmm13 #253.14 | |
pslld $13, %xmm0 #253.14 | |
por %xmm10, %xmm3 #253.14 | |
por %xmm0, %xmm13 #253.14 | |
psrld $10, %xmm15 #253.14 | |
pxor %xmm13, %xmm3 #253.14 | |
pxor %xmm15, %xmm3 #253.14 | |
movdqa 1456(%rsp), %xmm15 #253.14 | |
movdqa %xmm15, %xmm13 #253.14 | |
movdqa %xmm15, %xmm0 #253.14 | |
movdqa 1744(%rsp), %xmm10 #253.14 | |
psrld $7, %xmm13 #253.14 | |
pslld $25, %xmm0 #253.14 | |
paddd %xmm3, %xmm10 #253.14 | |
por %xmm0, %xmm13 #253.14 | |
movdqa %xmm15, %xmm0 #253.14 | |
movdqa %xmm15, %xmm3 #253.14 | |
psrld $18, %xmm0 #253.14 | |
pslld $14, %xmm3 #253.14 | |
por %xmm3, %xmm0 #253.14 | |
movdqa %xmm15, %xmm3 #253.14 | |
pxor %xmm0, %xmm13 #253.14 | |
psrld $3, %xmm3 #253.14 | |
pxor %xmm3, %xmm13 #253.14 | |
movdqa %xmm6, %xmm0 #254.9 | |
paddd %xmm13, %xmm9 #253.14 | |
movdqa %xmm6, %xmm3 #254.9 | |
paddd %xmm9, %xmm10 #253.14 | |
movdqa %xmm6, %xmm9 #254.9 | |
movdqa %xmm6, %xmm13 #254.9 | |
psrld $6, %xmm9 #254.9 | |
pslld $26, %xmm0 #254.9 | |
psrld $11, %xmm3 #254.9 | |
pslld $21, %xmm13 #254.9 | |
por %xmm0, %xmm9 #254.9 | |
por %xmm13, %xmm3 #254.9 | |
movdqa %xmm6, %xmm0 #254.9 | |
pxor %xmm3, %xmm9 #254.9 | |
movdqa %xmm6, %xmm3 #254.9 | |
psrld $25, %xmm0 #254.9 | |
pslld $7, %xmm3 #254.9 | |
por %xmm3, %xmm0 #254.9 | |
movdqa %xmm11, %xmm3 #254.9 | |
movdqa %xmm6, %xmm13 #254.9 | |
pand %xmm6, %xmm3 #254.9 | |
pandn %xmm1, %xmm13 #254.9 | |
pxor %xmm0, %xmm9 #254.9 | |
movdqa .L_2il0floatpacket.6779(%rip), %xmm0 #254.9 | |
pxor %xmm13, %xmm3 #254.9 | |
paddd %xmm3, %xmm0 #254.9 | |
movdqa %xmm4, %xmm13 #254.9 | |
movdqa %xmm4, %xmm3 #254.9 | |
paddd %xmm9, %xmm5 #254.9 | |
psrld $2, %xmm13 #254.9 | |
pslld $30, %xmm3 #254.9 | |
paddd %xmm0, %xmm5 #254.9 | |
por %xmm3, %xmm13 #254.9 | |
movdqa %xmm4, %xmm3 #254.9 | |
movdqa %xmm4, %xmm0 #254.9 | |
psrld $13, %xmm3 #254.9 | |
pslld $19, %xmm0 #254.9 | |
por %xmm0, %xmm3 #254.9 | |
movdqa %xmm4, %xmm0 #254.9 | |
pxor %xmm3, %xmm13 #254.9 | |
movdqa %xmm4, %xmm3 #254.9 | |
movdqa %xmm4, %xmm9 #254.9 | |
psrld $22, %xmm0 #254.9 | |
pslld $10, %xmm3 #254.9 | |
pand %xmm12, %xmm9 #254.9 | |
por %xmm3, %xmm0 #254.9 | |
movdqa %xmm2, %xmm3 #254.9 | |
pxor %xmm0, %xmm13 #254.9 | |
pand %xmm4, %xmm3 #254.9 | |
movdqa %xmm9, %xmm0 #254.9 | |
paddd %xmm10, %xmm5 #254.9 | |
pxor %xmm3, %xmm0 #254.9 | |
paddd %xmm5, %xmm7 #254.9 | |
pxor %xmm14, %xmm0 #254.9 | |
movdqa %xmm8, %xmm14 #255.14 | |
paddd %xmm0, %xmm13 #254.9 | |
movdqa %xmm8, %xmm3 #255.14 | |
paddd %xmm13, %xmm5 #254.9 | |
movdqa %xmm8, %xmm13 #255.14 | |
movdqa %xmm8, %xmm0 #255.14 | |
psrld $17, %xmm13 #255.14 | |
pslld $15, %xmm14 #255.14 | |
psrld $19, %xmm3 #255.14 | |
pslld $13, %xmm0 #255.14 | |
por %xmm14, %xmm13 #255.14 | |
por %xmm0, %xmm3 #255.14 | |
movdqa %xmm8, 2016(%rsp) #251.14 | |
pxor %xmm3, %xmm13 #255.14 | |
psrld $10, %xmm8 #255.14 | |
pxor %xmm8, %xmm13 #255.14 | |
movdqa 1472(%rsp), %xmm8 #255.14 | |
movdqa %xmm8, %xmm14 #255.14 | |
movdqa %xmm8, %xmm0 #255.14 | |
movdqa 1760(%rsp), %xmm3 #255.14 | |
psrld $7, %xmm14 #255.14 | |
pslld $25, %xmm0 #255.14 | |
paddd %xmm13, %xmm3 #255.14 | |
por %xmm0, %xmm14 #255.14 | |
movdqa %xmm8, %xmm0 #255.14 | |
movdqa %xmm8, %xmm13 #255.14 | |
psrld $18, %xmm0 #255.14 | |
pslld $14, %xmm13 #255.14 | |
por %xmm13, %xmm0 #255.14 | |
movdqa %xmm8, %xmm13 #255.14 | |
pxor %xmm0, %xmm14 #255.14 | |
psrld $3, %xmm13 #255.14 | |
pxor %xmm13, %xmm14 #255.14 | |
movdqa %xmm7, %xmm0 #256.9 | |
paddd %xmm14, %xmm15 #255.14 | |
movdqa %xmm7, %xmm13 #256.9 | |
paddd %xmm15, %xmm3 #255.14 | |
movdqa %xmm7, %xmm14 #256.9 | |
movdqa %xmm7, %xmm15 #256.9 | |
psrld $6, %xmm0 #256.9 | |
pslld $26, %xmm13 #256.9 | |
psrld $11, %xmm14 #256.9 | |
pslld $21, %xmm15 #256.9 | |
por %xmm13, %xmm0 #256.9 | |
por %xmm15, %xmm14 #256.9 | |
movdqa %xmm7, %xmm13 #256.9 | |
movdqa %xmm7, %xmm15 #256.9 | |
psrld $25, %xmm13 #256.9 | |
pslld $7, %xmm15 #256.9 | |
pxor %xmm14, %xmm0 #256.9 | |
por %xmm15, %xmm13 #256.9 | |
movdqa %xmm7, %xmm15 #256.9 | |
pxor %xmm13, %xmm0 #256.9 | |
movdqa %xmm7, %xmm13 #256.9 | |
pand %xmm6, %xmm15 #256.9 | |
pandn %xmm11, %xmm13 #256.9 | |
movdqa .L_2il0floatpacket.6780(%rip), %xmm14 #256.9 | |
pxor %xmm13, %xmm15 #256.9 | |
paddd %xmm0, %xmm1 #256.9 | |
paddd %xmm15, %xmm14 #256.9 | |
paddd %xmm14, %xmm1 #256.9 | |
movdqa %xmm5, %xmm14 #256.9 | |
movdqa %xmm5, %xmm13 #256.9 | |
psrld $2, %xmm14 #256.9 | |
pslld $30, %xmm13 #256.9 | |
movdqa %xmm5, %xmm15 #256.9 | |
por %xmm13, %xmm14 #256.9 | |
movdqa %xmm5, %xmm13 #256.9 | |
psrld $13, %xmm15 #256.9 | |
pslld $19, %xmm13 #256.9 | |
por %xmm13, %xmm15 #256.9 | |
movdqa %xmm5, %xmm13 #256.9 | |
pxor %xmm15, %xmm14 #256.9 | |
movdqa %xmm5, %xmm15 #256.9 | |
movdqa %xmm5, %xmm0 #256.9 | |
psrld $22, %xmm13 #256.9 | |
pslld $10, %xmm15 #256.9 | |
pand %xmm4, %xmm0 #256.9 | |
por %xmm15, %xmm13 #256.9 | |
movdqa %xmm5, %xmm15 #256.9 | |
pxor %xmm13, %xmm14 #256.9 | |
pand %xmm12, %xmm15 #256.9 | |
movdqa %xmm0, %xmm13 #256.9 | |
paddd %xmm3, %xmm1 #256.9 | |
pxor %xmm15, %xmm13 #256.9 | |
paddd %xmm1, %xmm2 #256.9 | |
pxor %xmm9, %xmm13 #256.9 | |
movdqa %xmm10, %xmm9 #257.14 | |
paddd %xmm13, %xmm14 #256.9 | |
movdqa %xmm10, %xmm13 #257.14 | |
paddd %xmm14, %xmm1 #256.9 | |
movdqa %xmm10, %xmm15 #257.14 | |
movdqa %xmm10, %xmm14 #257.14 | |
psrld $17, %xmm13 #257.14 | |
pslld $15, %xmm9 #257.14 | |
psrld $19, %xmm15 #257.14 | |
pslld $13, %xmm14 #257.14 | |
por %xmm9, %xmm13 #257.14 | |
por %xmm14, %xmm15 #257.14 | |
movdqa %xmm10, 2032(%rsp) #253.14 | |
pxor %xmm15, %xmm13 #257.14 | |
psrld $10, %xmm10 #257.14 | |
movdqa 1776(%rsp), %xmm15 #257.14 | |
pxor %xmm10, %xmm13 #257.14 | |
paddd %xmm13, %xmm15 #257.14 | |
movdqa 1520(%rsp), %xmm13 #257.14 | |
movdqa %xmm13, %xmm14 #257.14 | |
movdqa %xmm13, %xmm9 #257.14 | |
psrld $7, %xmm14 #257.14 | |
pslld $25, %xmm9 #257.14 | |
por %xmm9, %xmm14 #257.14 | |
movdqa %xmm13, %xmm9 #257.14 | |
movdqa %xmm13, %xmm10 #257.14 | |
psrld $18, %xmm9 #257.14 | |
pslld $14, %xmm10 #257.14 | |
por %xmm10, %xmm9 #257.14 | |
movdqa %xmm13, %xmm10 #257.14 | |
pxor %xmm9, %xmm14 #257.14 | |
psrld $3, %xmm10 #257.14 | |
pxor %xmm10, %xmm14 #257.14 | |
movdqa %xmm2, %xmm10 #258.9 | |
paddd %xmm14, %xmm8 #257.14 | |
movdqa %xmm2, %xmm14 #258.9 | |
paddd %xmm8, %xmm15 #257.14 | |
movdqa %xmm2, %xmm9 #258.9 | |
movdqa %xmm2, %xmm8 #258.9 | |
psrld $6, %xmm14 #258.9 | |
pslld $26, %xmm10 #258.9 | |
psrld $11, %xmm9 #258.9 | |
pslld $21, %xmm8 #258.9 | |
por %xmm10, %xmm14 #258.9 | |
por %xmm8, %xmm9 #258.9 | |
movdqa %xmm2, %xmm10 #258.9 | |
movdqa %xmm2, %xmm8 #258.9 | |
psrld $25, %xmm10 #258.9 | |
pslld $7, %xmm8 #258.9 | |
pxor %xmm9, %xmm14 #258.9 | |
por %xmm8, %xmm10 #258.9 | |
movdqa %xmm2, %xmm8 #258.9 | |
pxor %xmm10, %xmm14 #258.9 | |
movdqa %xmm2, %xmm10 #258.9 | |
pand %xmm7, %xmm8 #258.9 | |
pandn %xmm6, %xmm10 #258.9 | |
movdqa .L_2il0floatpacket.6781(%rip), %xmm9 #258.9 | |
pxor %xmm10, %xmm8 #258.9 | |
paddd %xmm8, %xmm9 #258.9 | |
movdqa %xmm1, %xmm8 #258.9 | |
movdqa %xmm1, %xmm10 #258.9 | |
paddd %xmm14, %xmm11 #258.9 | |
psrld $2, %xmm8 #258.9 | |
pslld $30, %xmm10 #258.9 | |
paddd %xmm9, %xmm11 #258.9 | |
por %xmm10, %xmm8 #258.9 | |
movdqa %xmm1, %xmm10 #258.9 | |
movdqa %xmm1, %xmm9 #258.9 | |
psrld $13, %xmm10 #258.9 | |
pslld $19, %xmm9 #258.9 | |
por %xmm9, %xmm10 #258.9 | |
movdqa %xmm1, %xmm9 #258.9 | |
pxor %xmm10, %xmm8 #258.9 | |
movdqa %xmm1, %xmm10 #258.9 | |
movdqa %xmm1, %xmm14 #258.9 | |
psrld $22, %xmm9 #258.9 | |
pslld $10, %xmm10 #258.9 | |
pand %xmm5, %xmm14 #258.9 | |
por %xmm10, %xmm9 #258.9 | |
movdqa %xmm1, %xmm10 #258.9 | |
pxor %xmm9, %xmm8 #258.9 | |
pand %xmm4, %xmm10 #258.9 | |
movdqa %xmm14, %xmm9 #258.9 | |
paddd %xmm15, %xmm11 #258.9 | |
pxor %xmm10, %xmm9 #258.9 | |
paddd %xmm11, %xmm12 #258.9 | |
pxor %xmm0, %xmm9 #258.9 | |
movdqa %xmm3, %xmm0 #259.14 | |
paddd %xmm9, %xmm8 #258.9 | |
movdqa %xmm3, %xmm10 #259.14 | |
paddd %xmm8, %xmm11 #258.9 | |
movdqa %xmm3, %xmm9 #259.14 | |
movdqa %xmm3, %xmm8 #259.14 | |
psrld $17, %xmm0 #259.14 | |
pslld $15, %xmm10 #259.14 | |
psrld $19, %xmm9 #259.14 | |
pslld $13, %xmm8 #259.14 | |
por %xmm10, %xmm0 #259.14 | |
por %xmm8, %xmm9 #259.14 | |
movdqa %xmm3, 2048(%rsp) #255.14 | |
pxor %xmm9, %xmm0 #259.14 | |
psrld $10, %xmm3 #259.14 | |
pxor %xmm3, %xmm0 #259.14 | |
movdqa 1824(%rsp), %xmm3 #259.14 | |
paddd %xmm0, %xmm3 #259.14 | |
movdqa 1568(%rsp), %xmm0 #259.14 | |
movdqa %xmm0, %xmm10 #259.14 | |
movdqa %xmm0, %xmm8 #259.14 | |
psrld $7, %xmm10 #259.14 | |
pslld $25, %xmm8 #259.14 | |
por %xmm8, %xmm10 #259.14 | |
movdqa %xmm0, %xmm8 #259.14 | |
movdqa %xmm0, %xmm9 #259.14 | |
psrld $18, %xmm8 #259.14 | |
pslld $14, %xmm9 #259.14 | |
por %xmm9, %xmm8 #259.14 | |
movdqa %xmm0, %xmm9 #259.14 | |
pxor %xmm8, %xmm10 #259.14 | |
psrld $3, %xmm9 #259.14 | |
pxor %xmm9, %xmm10 #259.14 | |
movdqa %xmm12, %xmm8 #260.9 | |
paddd %xmm10, %xmm13 #259.14 | |
movdqa %xmm12, %xmm10 #260.9 | |
paddd %xmm13, %xmm3 #259.14 | |
movdqa %xmm12, %xmm13 #260.9 | |
movdqa %xmm12, %xmm9 #260.9 | |
psrld $6, %xmm13 #260.9 | |
pslld $26, %xmm8 #260.9 | |
psrld $11, %xmm10 #260.9 | |
pslld $21, %xmm9 #260.9 | |
por %xmm8, %xmm13 #260.9 | |
por %xmm9, %xmm10 #260.9 | |
movdqa %xmm12, %xmm8 #260.9 | |
pxor %xmm10, %xmm13 #260.9 | |
movdqa %xmm12, %xmm10 #260.9 | |
psrld $25, %xmm8 #260.9 | |
pslld $7, %xmm10 #260.9 | |
por %xmm10, %xmm8 #260.9 | |
movdqa %xmm12, %xmm10 #260.9 | |
pxor %xmm8, %xmm13 #260.9 | |
movdqa %xmm12, %xmm8 #260.9 | |
pand %xmm2, %xmm8 #260.9 | |
pandn %xmm7, %xmm10 #260.9 | |
movdqa .L_2il0floatpacket.6782(%rip), %xmm9 #260.9 | |
pxor %xmm10, %xmm8 #260.9 | |
paddd %xmm8, %xmm9 #260.9 | |
movdqa %xmm11, %xmm10 #260.9 | |
movdqa %xmm11, %xmm8 #260.9 | |
paddd %xmm13, %xmm6 #260.9 | |
psrld $2, %xmm10 #260.9 | |
pslld $30, %xmm8 #260.9 | |
paddd %xmm9, %xmm6 #260.9 | |
por %xmm8, %xmm10 #260.9 | |
movdqa %xmm11, %xmm8 #260.9 | |
movdqa %xmm11, %xmm9 #260.9 | |
psrld $13, %xmm8 #260.9 | |
pslld $19, %xmm9 #260.9 | |
por %xmm9, %xmm8 #260.9 | |
movdqa %xmm11, %xmm9 #260.9 | |
pxor %xmm8, %xmm10 #260.9 | |
movdqa %xmm11, %xmm8 #260.9 | |
movdqa %xmm11, %xmm13 #260.9 | |
psrld $22, %xmm8 #260.9 | |
pslld $10, %xmm9 #260.9 | |
pand %xmm1, %xmm13 #260.9 | |
por %xmm9, %xmm8 #260.9 | |
movdqa %xmm11, %xmm9 #260.9 | |
pxor %xmm8, %xmm10 #260.9 | |
pand %xmm5, %xmm9 #260.9 | |
movdqa %xmm13, %xmm8 #260.9 | |
paddd %xmm3, %xmm6 #260.9 | |
pxor %xmm9, %xmm8 #260.9 | |
paddd %xmm6, %xmm4 #260.9 | |
pxor %xmm14, %xmm8 #260.9 | |
movdqa %xmm15, %xmm14 #261.14 | |
paddd %xmm8, %xmm10 #260.9 | |
movdqa %xmm15, %xmm8 #261.14 | |
paddd %xmm10, %xmm6 #260.9 | |
movdqa %xmm15, %xmm10 #261.14 | |
movdqa %xmm15, %xmm9 #261.14 | |
psrld $17, %xmm10 #261.14 | |
pslld $15, %xmm14 #261.14 | |
psrld $19, %xmm8 #261.14 | |
pslld $13, %xmm9 #261.14 | |
por %xmm14, %xmm10 #261.14 | |
por %xmm9, %xmm8 #261.14 | |
movdqa %xmm15, 2064(%rsp) #257.14 | |
pxor %xmm8, %xmm10 #261.14 | |
psrld $10, %xmm15 #261.14 | |
pxor %xmm15, %xmm10 #261.14 | |
movdqa 1616(%rsp), %xmm15 #261.14 | |
movdqa %xmm15, %xmm14 #261.14 | |
movdqa %xmm15, %xmm9 #261.14 | |
movdqa 1872(%rsp), %xmm8 #261.14 | |
psrld $7, %xmm14 #261.14 | |
pslld $25, %xmm9 #261.14 | |
paddd %xmm10, %xmm8 #261.14 | |
por %xmm9, %xmm14 #261.14 | |
movdqa %xmm15, %xmm10 #261.14 | |
movdqa %xmm15, %xmm9 #261.14 | |
psrld $18, %xmm10 #261.14 | |
pslld $14, %xmm9 #261.14 | |
por %xmm9, %xmm10 #261.14 | |
movdqa %xmm15, %xmm9 #261.14 | |
pxor %xmm10, %xmm14 #261.14 | |
psrld $3, %xmm9 #261.14 | |
pxor %xmm9, %xmm14 #261.14 | |
movdqa %xmm4, %xmm9 #262.9 | |
paddd %xmm14, %xmm0 #261.14 | |
movdqa %xmm4, %xmm14 #262.9 | |
paddd %xmm0, %xmm8 #261.14 | |
movdqa %xmm4, %xmm10 #262.9 | |
movdqa %xmm4, %xmm0 #262.9 | |
psrld $6, %xmm14 #262.9 | |
pslld $26, %xmm9 #262.9 | |
psrld $11, %xmm10 #262.9 | |
pslld $21, %xmm0 #262.9 | |
por %xmm9, %xmm14 #262.9 | |
por %xmm0, %xmm10 #262.9 | |
movdqa %xmm4, %xmm9 #262.9 | |
movdqa %xmm4, %xmm0 #262.9 | |
psrld $25, %xmm9 #262.9 | |
pslld $7, %xmm0 #262.9 | |
pxor %xmm10, %xmm14 #262.9 | |
por %xmm0, %xmm9 #262.9 | |
movdqa %xmm4, %xmm0 #262.9 | |
pxor %xmm9, %xmm14 #262.9 | |
movdqa %xmm4, %xmm9 #262.9 | |
pand %xmm12, %xmm0 #262.9 | |
pandn %xmm2, %xmm9 #262.9 | |
movdqa .L_2il0floatpacket.6783(%rip), %xmm10 #262.9 | |
pxor %xmm9, %xmm0 #262.9 | |
paddd %xmm14, %xmm7 #262.9 | |
paddd %xmm0, %xmm10 #262.9 | |
paddd %xmm10, %xmm7 #262.9 | |
movdqa %xmm6, %xmm0 #262.9 | |
movdqa %xmm11, 2096(%rsp) #258.9 | |
paddd %xmm8, %xmm7 #262.9 | |
movdqa %xmm8, 2160(%rsp) #261.14 | |
pand %xmm11, %xmm0 #262.9 | |
movdqa %xmm6, %xmm11 #262.9 | |
movdqa %xmm6, %xmm8 #262.9 | |
movdqa %xmm6, %xmm9 #262.9 | |
movdqa %xmm6, %xmm10 #262.9 | |
psrld $2, %xmm11 #262.9 | |
pslld $30, %xmm8 #262.9 | |
psrld $13, %xmm9 #262.9 | |
pslld $19, %xmm10 #262.9 | |
por %xmm8, %xmm11 #262.9 | |
por %xmm10, %xmm9 #262.9 | |
pxor %xmm9, %xmm11 #262.9 | |
movdqa %xmm6, %xmm14 #262.9 | |
movdqa %xmm6, %xmm9 #262.9 | |
movdqa %xmm6, %xmm8 #262.9 | |
psrld $22, %xmm14 #262.9 | |
pslld $10, %xmm9 #262.9 | |
pand %xmm1, %xmm8 #262.9 | |
por %xmm9, %xmm14 #262.9 | |
movdqa %xmm0, 2192(%rsp) #262.9 | |
pxor %xmm8, %xmm0 #262.9 | |
pxor %xmm14, %xmm11 #262.9 | |
pxor %xmm13, %xmm0 #262.9 | |
paddd %xmm0, %xmm11 #262.9 | |
movdqa %xmm3, %xmm13 #263.14 | |
movdqa %xmm3, %xmm0 #263.14 | |
movdqa %xmm3, %xmm8 #263.14 | |
movdqa %xmm3, %xmm9 #263.14 | |
psrld $17, %xmm13 #263.14 | |
pslld $15, %xmm0 #263.14 | |
psrld $19, %xmm8 #263.14 | |
pslld $13, %xmm9 #263.14 | |
por %xmm0, %xmm13 #263.14 | |
por %xmm9, %xmm8 #263.14 | |
paddd %xmm7, %xmm5 #262.9 | |
movdqa %xmm3, 2112(%rsp) #259.14 | |
pxor %xmm8, %xmm13 #263.14 | |
psrld $10, %xmm3 #263.14 | |
paddd %xmm11, %xmm7 #262.9 | |
pxor %xmm3, %xmm13 #263.14 | |
movdqa %xmm5, %xmm11 #264.9 | |
movdqa 1936(%rsp), %xmm3 #263.14 | |
movdqa %xmm5, %xmm9 #264.9 | |
paddd %xmm13, %xmm3 #263.14 | |
psrld $6, %xmm11 #264.9 | |
movdqa 1680(%rsp), %xmm13 #263.14 | |
pslld $21, %xmm9 #264.9 | |
movdqa %xmm13, %xmm10 #263.14 | |
movdqa %xmm13, %xmm14 #263.14 | |
movdqa %xmm13, %xmm0 #263.14 | |
movdqa %xmm13, %xmm8 #263.14 | |
psrld $7, %xmm10 #263.14 | |
pslld $25, %xmm14 #263.14 | |
psrld $18, %xmm0 #263.14 | |
pslld $14, %xmm8 #263.14 | |
por %xmm14, %xmm10 #263.14 | |
por %xmm8, %xmm0 #263.14 | |
pxor %xmm0, %xmm10 #263.14 | |
psrld $3, %xmm13 #263.14 | |
pxor %xmm13, %xmm10 #263.14 | |
movdqa %xmm5, %xmm0 #264.9 | |
movdqa %xmm5, %xmm8 #264.9 | |
paddd %xmm10, %xmm15 #263.14 | |
pslld $26, %xmm0 #264.9 | |
psrld $11, %xmm8 #264.9 | |
movdqa %xmm5, %xmm10 #264.9 | |
movdqa %xmm5, %xmm13 #264.9 | |
por %xmm0, %xmm11 #264.9 | |
por %xmm9, %xmm8 #264.9 | |
psrld $25, %xmm10 #264.9 | |
pslld $7, %xmm13 #264.9 | |
movdqa %xmm5, %xmm0 #264.9 | |
pxor %xmm8, %xmm11 #264.9 | |
movdqa %xmm5, 2176(%rsp) #262.9 | |
por %xmm13, %xmm10 #264.9 | |
pand %xmm4, %xmm0 #264.9 | |
pandn %xmm12, %xmm5 #264.9 | |
movdqa %xmm4, 2128(%rsp) #260.9 | |
pxor %xmm10, %xmm11 #264.9 | |
movdqa .L_2il0floatpacket.6784(%rip), %xmm4 #264.9 | |
pxor %xmm5, %xmm0 #264.9 | |
paddd %xmm11, %xmm2 #264.9 | |
paddd %xmm0, %xmm4 #264.9 | |
paddd %xmm15, %xmm3 #263.14 | |
paddd %xmm4, %xmm2 #264.9 | |
movdqa %xmm7, %xmm0 #264.9 | |
paddd %xmm3, %xmm2 #264.9 | |
movdqa %xmm6, 2144(%rsp) #260.9 | |
pand %xmm6, %xmm0 #264.9 | |
movdqa %xmm3, 2224(%rsp) #263.14 | |
movdqa %xmm7, %xmm3 #264.9 | |
movdqa %xmm7, %xmm5 #264.9 | |
movdqa %xmm7, %xmm6 #264.9 | |
movdqa %xmm7, %xmm8 #264.9 | |
psrld $2, %xmm3 #264.9 | |
pslld $30, %xmm5 #264.9 | |
psrld $13, %xmm6 #264.9 | |
pslld $19, %xmm8 #264.9 | |
por %xmm5, %xmm3 #264.9 | |
por %xmm8, %xmm6 #264.9 | |
paddd %xmm2, %xmm1 #264.9 | |
movdqa %xmm7, 2208(%rsp) #262.9 | |
pxor %xmm6, %xmm3 #264.9 | |
psrld $22, %xmm7 #264.9 | |
movdqa %xmm12, 2080(%rsp) #258.9 | |
movdqa %xmm2, 2240(%rsp) #264.9 | |
movdqa %xmm1, 2256(%rsp) #264.9 | |
movdqa %xmm0, 2272(%rsp) #264.9 | |
movdqa %xmm3, 2288(%rsp) #264.9 | |
movdqa %xmm7, 2304(%rsp) #264.9 | |
# LOE | |
..B2.18: # Preds ..B2.19 | |
movdqa 2208(%rsp), %xmm2 #264.9 | |
movdqa 2096(%rsp), %xmm6 #264.9 | |
movdqa %xmm2, %xmm12 #264.9 | |
movdqa 2272(%rsp), %xmm3 #264.9 | |
movdqa %xmm2, %xmm1 #264.9 | |
movdqa 2304(%rsp), %xmm7 #264.9 | |
pslld $10, %xmm12 #264.9 | |
pand %xmm6, %xmm1 #264.9 | |
movdqa %xmm3, %xmm4 #264.9 | |
movdqa 2288(%rsp), %xmm5 #264.9 | |
por %xmm12, %xmm7 #264.9 | |
movdqa 2192(%rsp), %xmm15 #264.9 | |
pxor %xmm1, %xmm4 #264.9 | |
pxor %xmm7, %xmm5 #264.9 | |
pxor %xmm4, %xmm15 #264.9 | |
movdqa 2240(%rsp), %xmm12 #264.9 | |
paddd %xmm15, %xmm5 #264.9 | |
movdqa 1712(%rsp), %xmm9 #265.14 | |
paddd %xmm5, %xmm12 #264.9 | |
movdqa 2160(%rsp), %xmm10 #265.14 | |
movdqa %xmm9, %xmm7 #265.14 | |
movdqa %xmm9, %xmm1 #265.14 | |
movdqa %xmm9, %xmm4 #265.14 | |
movdqa %xmm9, %xmm5 #265.14 | |
movdqa %xmm10, %xmm13 #265.14 | |
movdqa %xmm10, %xmm0 #265.14 | |
movdqa %xmm10, %xmm11 #265.14 | |
movdqa %xmm10, %xmm14 #265.14 | |
psrld $7, %xmm7 #265.14 | |
pslld $25, %xmm1 #265.14 | |
psrld $18, %xmm4 #265.14 | |
pslld $14, %xmm5 #265.14 | |
psrld $17, %xmm13 #265.14 | |
pslld $15, %xmm0 #265.14 | |
psrld $19, %xmm11 #265.14 | |
pslld $13, %xmm14 #265.14 | |
por %xmm1, %xmm7 #265.14 | |
por %xmm5, %xmm4 #265.14 | |
movdqa %xmm9, %xmm15 #265.14 | |
por %xmm0, %xmm13 #265.14 | |
por %xmm14, %xmm11 #265.14 | |
pxor %xmm4, %xmm7 #265.14 | |
psrld $3, %xmm15 #265.14 | |
movdqa 1680(%rsp), %xmm0 #265.14 | |
pxor %xmm11, %xmm13 #265.14 | |
movdqa 2256(%rsp), %xmm11 #266.9 | |
pxor %xmm15, %xmm7 #265.14 | |
paddd %xmm7, %xmm0 #265.14 | |
movdqa %xmm11, %xmm14 #266.9 | |
movdqa %xmm11, %xmm7 #266.9 | |
movdqa %xmm11, %xmm1 #266.9 | |
movdqa %xmm11, %xmm4 #266.9 | |
psrld $10, %xmm10 #265.14 | |
psrld $6, %xmm14 #266.9 | |
pslld $26, %xmm7 #266.9 | |
psrld $11, %xmm1 #266.9 | |
pslld $21, %xmm4 #266.9 | |
movdqa %xmm11, %xmm5 #266.9 | |
movdqa %xmm11, %xmm15 #266.9 | |
movdqa 2016(%rsp), %xmm8 #265.14 | |
pxor %xmm10, %xmm13 #265.14 | |
por %xmm7, %xmm14 #266.9 | |
por %xmm4, %xmm1 #266.9 | |
psrld $25, %xmm5 #266.9 | |
pslld $7, %xmm15 #266.9 | |
paddd %xmm13, %xmm8 #265.14 | |
pxor %xmm1, %xmm14 #266.9 | |
por %xmm15, %xmm5 #266.9 | |
paddd %xmm0, %xmm8 #265.14 | |
movdqa 2176(%rsp), %xmm1 #266.9 | |
pxor %xmm5, %xmm14 #266.9 | |
movdqa 2128(%rsp), %xmm5 #266.9 | |
movdqa %xmm11, %xmm13 #266.9 | |
movdqa %xmm11, %xmm0 #266.9 | |
pand %xmm1, %xmm13 #266.9 | |
pandn %xmm5, %xmm0 #266.9 | |
movdqa %xmm12, %xmm7 #266.9 | |
movdqa 2080(%rsp), %xmm4 #266.9 | |
pxor %xmm0, %xmm13 #266.9 | |
movdqa .L_2il0floatpacket.6785(%rip), %xmm10 #266.9 | |
movdqa %xmm12, %xmm0 #266.9 | |
paddd %xmm14, %xmm4 #266.9 | |
paddd %xmm13, %xmm10 #266.9 | |
psrld $2, %xmm0 #266.9 | |
pslld $30, %xmm7 #266.9 | |
movdqa %xmm12, %xmm15 #266.9 | |
movdqa %xmm12, %xmm13 #266.9 | |
paddd %xmm10, %xmm4 #266.9 | |
por %xmm7, %xmm0 #266.9 | |
psrld $13, %xmm15 #266.9 | |
pslld $19, %xmm13 #266.9 | |
movdqa %xmm12, %xmm7 #266.9 | |
movdqa %xmm12, %xmm10 #266.9 | |
por %xmm13, %xmm15 #266.9 | |
psrld $22, %xmm7 #266.9 | |
pslld $10, %xmm10 #266.9 | |
pxor %xmm15, %xmm0 #266.9 | |
por %xmm10, %xmm7 #266.9 | |
movdqa %xmm2, %xmm14 #266.9 | |
pxor %xmm7, %xmm0 #266.9 | |
pand %xmm12, %xmm14 #266.9 | |
movdqa 2144(%rsp), %xmm7 #266.9 | |
movdqa %xmm14, %xmm13 #266.9 | |
movdqa %xmm7, %xmm15 #266.9 | |
paddd %xmm8, %xmm4 #266.9 | |
pand %xmm12, %xmm15 #266.9 | |
paddd %xmm4, %xmm6 #266.9 | |
pxor %xmm15, %xmm13 #266.9 | |
pxor %xmm13, %xmm3 #266.9 | |
movdqa 2224(%rsp), %xmm15 #267.14 | |
paddd %xmm3, %xmm0 #266.9 | |
paddd %xmm0, %xmm4 #266.9 | |
movdqa %xmm15, %xmm3 #267.14 | |
movdqa %xmm15, %xmm10 #267.14 | |
movdqa %xmm15, %xmm13 #267.14 | |
movdqa %xmm15, %xmm0 #267.14 | |
psrld $17, %xmm3 #267.14 | |
pslld $15, %xmm10 #267.14 | |
psrld $19, %xmm13 #267.14 | |
pslld $13, %xmm0 #267.14 | |
por %xmm10, %xmm3 #267.14 | |
por %xmm0, %xmm13 #267.14 | |
psrld $10, %xmm15 #267.14 | |
pxor %xmm13, %xmm3 #267.14 | |
pxor %xmm15, %xmm3 #267.14 | |
movdqa 1728(%rsp), %xmm15 #267.14 | |
movdqa %xmm15, %xmm13 #267.14 | |
movdqa %xmm15, %xmm0 #267.14 | |
movdqa 2032(%rsp), %xmm10 #267.14 | |
psrld $7, %xmm13 #267.14 | |
pslld $25, %xmm0 #267.14 | |
paddd %xmm3, %xmm10 #267.14 | |
por %xmm0, %xmm13 #267.14 | |
movdqa %xmm15, %xmm0 #267.14 | |
movdqa %xmm15, %xmm3 #267.14 | |
psrld $18, %xmm0 #267.14 | |
pslld $14, %xmm3 #267.14 | |
por %xmm3, %xmm0 #267.14 | |
movdqa %xmm15, %xmm3 #267.14 | |
pxor %xmm0, %xmm13 #267.14 | |
psrld $3, %xmm3 #267.14 | |
pxor %xmm3, %xmm13 #267.14 | |
movdqa %xmm6, %xmm0 #268.9 | |
paddd %xmm13, %xmm9 #267.14 | |
movdqa %xmm6, %xmm3 #268.9 | |
paddd %xmm9, %xmm10 #267.14 | |
movdqa %xmm6, %xmm9 #268.9 | |
movdqa %xmm6, %xmm13 #268.9 | |
psrld $6, %xmm9 #268.9 | |
pslld $26, %xmm0 #268.9 | |
psrld $11, %xmm3 #268.9 | |
pslld $21, %xmm13 #268.9 | |
por %xmm0, %xmm9 #268.9 | |
por %xmm13, %xmm3 #268.9 | |
movdqa %xmm6, %xmm0 #268.9 | |
pxor %xmm3, %xmm9 #268.9 | |
movdqa %xmm6, %xmm3 #268.9 | |
psrld $25, %xmm0 #268.9 | |
pslld $7, %xmm3 #268.9 | |
por %xmm3, %xmm0 #268.9 | |
movdqa %xmm11, %xmm3 #268.9 | |
movdqa %xmm6, %xmm13 #268.9 | |
pand %xmm6, %xmm3 #268.9 | |
pandn %xmm1, %xmm13 #268.9 | |
pxor %xmm0, %xmm9 #268.9 | |
movdqa .L_2il0floatpacket.6786(%rip), %xmm0 #268.9 | |
pxor %xmm13, %xmm3 #268.9 | |
paddd %xmm3, %xmm0 #268.9 | |
movdqa %xmm4, %xmm13 #268.9 | |
movdqa %xmm4, %xmm3 #268.9 | |
paddd %xmm9, %xmm5 #268.9 | |
psrld $2, %xmm13 #268.9 | |
pslld $30, %xmm3 #268.9 | |
paddd %xmm0, %xmm5 #268.9 | |
por %xmm3, %xmm13 #268.9 | |
movdqa %xmm4, %xmm3 #268.9 | |
movdqa %xmm4, %xmm0 #268.9 | |
psrld $13, %xmm3 #268.9 | |
pslld $19, %xmm0 #268.9 | |
por %xmm0, %xmm3 #268.9 | |
movdqa %xmm4, %xmm0 #268.9 | |
pxor %xmm3, %xmm13 #268.9 | |
movdqa %xmm4, %xmm3 #268.9 | |
movdqa %xmm4, %xmm9 #268.9 | |
psrld $22, %xmm0 #268.9 | |
pslld $10, %xmm3 #268.9 | |
pand %xmm12, %xmm9 #268.9 | |
por %xmm3, %xmm0 #268.9 | |
movdqa %xmm2, %xmm3 #268.9 | |
pxor %xmm0, %xmm13 #268.9 | |
pand %xmm4, %xmm3 #268.9 | |
movdqa %xmm9, %xmm0 #268.9 | |
paddd %xmm10, %xmm5 #268.9 | |
pxor %xmm3, %xmm0 #268.9 | |
paddd %xmm5, %xmm7 #268.9 | |
pxor %xmm14, %xmm0 #268.9 | |
movdqa %xmm8, %xmm14 #269.15 | |
paddd %xmm0, %xmm13 #268.9 | |
movdqa %xmm8, %xmm3 #269.15 | |
paddd %xmm13, %xmm5 #268.9 | |
movdqa %xmm8, %xmm13 #269.15 | |
movdqa %xmm8, %xmm0 #269.15 | |
psrld $17, %xmm13 #269.15 | |
pslld $15, %xmm14 #269.15 | |
psrld $19, %xmm3 #269.15 | |
pslld $13, %xmm0 #269.15 | |
por %xmm14, %xmm13 #269.15 | |
por %xmm0, %xmm3 #269.15 | |
movdqa %xmm8, 2320(%rsp) #265.14 | |
pxor %xmm3, %xmm13 #269.15 | |
psrld $10, %xmm8 #269.15 | |
pxor %xmm8, %xmm13 #269.15 | |
movdqa 1744(%rsp), %xmm8 #269.15 | |
movdqa %xmm8, %xmm14 #269.15 | |
movdqa %xmm8, %xmm0 #269.15 | |
movdqa 2048(%rsp), %xmm3 #269.15 | |
psrld $7, %xmm14 #269.15 | |
pslld $25, %xmm0 #269.15 | |
paddd %xmm13, %xmm3 #269.15 | |
por %xmm0, %xmm14 #269.15 | |
movdqa %xmm8, %xmm0 #269.15 | |
movdqa %xmm8, %xmm13 #269.15 | |
psrld $18, %xmm0 #269.15 | |
pslld $14, %xmm13 #269.15 | |
por %xmm13, %xmm0 #269.15 | |
movdqa %xmm8, %xmm13 #269.15 | |
pxor %xmm0, %xmm14 #269.15 | |
psrld $3, %xmm13 #269.15 | |
pxor %xmm13, %xmm14 #269.15 | |
movdqa %xmm7, %xmm0 #270.9 | |
paddd %xmm14, %xmm15 #269.15 | |
movdqa %xmm7, %xmm13 #270.9 | |
paddd %xmm15, %xmm3 #269.15 | |
movdqa %xmm7, %xmm14 #270.9 | |
movdqa %xmm7, %xmm15 #270.9 | |
psrld $6, %xmm0 #270.9 | |
pslld $26, %xmm13 #270.9 | |
psrld $11, %xmm14 #270.9 | |
pslld $21, %xmm15 #270.9 | |
por %xmm13, %xmm0 #270.9 | |
por %xmm15, %xmm14 #270.9 | |
movdqa %xmm7, %xmm13 #270.9 | |
movdqa %xmm7, %xmm15 #270.9 | |
psrld $25, %xmm13 #270.9 | |
pslld $7, %xmm15 #270.9 | |
pxor %xmm14, %xmm0 #270.9 | |
por %xmm15, %xmm13 #270.9 | |
movdqa %xmm7, %xmm15 #270.9 | |
pxor %xmm13, %xmm0 #270.9 | |
movdqa %xmm7, %xmm13 #270.9 | |
pand %xmm6, %xmm15 #270.9 | |
pandn %xmm11, %xmm13 #270.9 | |
movdqa .L_2il0floatpacket.6787(%rip), %xmm14 #270.9 | |
pxor %xmm13, %xmm15 #270.9 | |
paddd %xmm0, %xmm1 #270.9 | |
paddd %xmm15, %xmm14 #270.9 | |
paddd %xmm14, %xmm1 #270.9 | |
movdqa %xmm5, %xmm14 #270.9 | |
movdqa %xmm5, %xmm13 #270.9 | |
psrld $2, %xmm14 #270.9 | |
pslld $30, %xmm13 #270.9 | |
movdqa %xmm5, %xmm15 #270.9 | |
por %xmm13, %xmm14 #270.9 | |
movdqa %xmm5, %xmm13 #270.9 | |
psrld $13, %xmm15 #270.9 | |
pslld $19, %xmm13 #270.9 | |
por %xmm13, %xmm15 #270.9 | |
movdqa %xmm5, %xmm13 #270.9 | |
pxor %xmm15, %xmm14 #270.9 | |
movdqa %xmm5, %xmm15 #270.9 | |
movdqa %xmm5, %xmm0 #270.9 | |
psrld $22, %xmm13 #270.9 | |
pslld $10, %xmm15 #270.9 | |
pand %xmm4, %xmm0 #270.9 | |
por %xmm15, %xmm13 #270.9 | |
movdqa %xmm5, %xmm15 #270.9 | |
pxor %xmm13, %xmm14 #270.9 | |
pand %xmm12, %xmm15 #270.9 | |
movdqa %xmm0, %xmm13 #270.9 | |
paddd %xmm3, %xmm1 #270.9 | |
pxor %xmm15, %xmm13 #270.9 | |
paddd %xmm1, %xmm2 #270.9 | |
pxor %xmm9, %xmm13 #270.9 | |
movdqa %xmm10, %xmm9 #271.15 | |
paddd %xmm13, %xmm14 #270.9 | |
movdqa %xmm10, %xmm13 #271.15 | |
paddd %xmm14, %xmm1 #270.9 | |
movdqa %xmm10, %xmm15 #271.15 | |
movdqa %xmm10, %xmm14 #271.15 | |
psrld $17, %xmm13 #271.15 | |
pslld $15, %xmm9 #271.15 | |
psrld $19, %xmm15 #271.15 | |
pslld $13, %xmm14 #271.15 | |
por %xmm9, %xmm13 #271.15 | |
por %xmm14, %xmm15 #271.15 | |
movdqa %xmm10, 2336(%rsp) #267.14 | |
pxor %xmm15, %xmm13 #271.15 | |
psrld $10, %xmm10 #271.15 | |
movdqa 2064(%rsp), %xmm15 #271.15 | |
pxor %xmm10, %xmm13 #271.15 | |
paddd %xmm13, %xmm15 #271.15 | |
movdqa 1760(%rsp), %xmm13 #271.15 | |
movdqa %xmm13, %xmm14 #271.15 | |
movdqa %xmm13, %xmm9 #271.15 | |
psrld $7, %xmm14 #271.15 | |
pslld $25, %xmm9 #271.15 | |
por %xmm9, %xmm14 #271.15 | |
movdqa %xmm13, %xmm9 #271.15 | |
movdqa %xmm13, %xmm10 #271.15 | |
psrld $18, %xmm9 #271.15 | |
pslld $14, %xmm10 #271.15 | |
por %xmm10, %xmm9 #271.15 | |
movdqa %xmm13, %xmm10 #271.15 | |
pxor %xmm9, %xmm14 #271.15 | |
psrld $3, %xmm10 #271.15 | |
pxor %xmm10, %xmm14 #271.15 | |
movdqa %xmm2, %xmm10 #272.9 | |
paddd %xmm14, %xmm8 #271.15 | |
movdqa %xmm2, %xmm14 #272.9 | |
paddd %xmm8, %xmm15 #271.15 | |
movdqa %xmm2, %xmm9 #272.9 | |
movdqa %xmm2, %xmm8 #272.9 | |
psrld $6, %xmm14 #272.9 | |
pslld $26, %xmm10 #272.9 | |
psrld $11, %xmm9 #272.9 | |
pslld $21, %xmm8 #272.9 | |
por %xmm10, %xmm14 #272.9 | |
por %xmm8, %xmm9 #272.9 | |
movdqa %xmm2, %xmm10 #272.9 | |
movdqa %xmm2, %xmm8 #272.9 | |
psrld $25, %xmm10 #272.9 | |
pslld $7, %xmm8 #272.9 | |
pxor %xmm9, %xmm14 #272.9 | |
por %xmm8, %xmm10 #272.9 | |
movdqa %xmm2, %xmm8 #272.9 | |
pxor %xmm10, %xmm14 #272.9 | |
movdqa %xmm2, %xmm10 #272.9 | |
pand %xmm7, %xmm8 #272.9 | |
pandn %xmm6, %xmm10 #272.9 | |
movdqa .L_2il0floatpacket.6788(%rip), %xmm9 #272.9 | |
pxor %xmm10, %xmm8 #272.9 | |
paddd %xmm8, %xmm9 #272.9 | |
movdqa %xmm1, %xmm8 #272.9 | |
movdqa %xmm1, %xmm10 #272.9 | |
paddd %xmm14, %xmm11 #272.9 | |
psrld $2, %xmm8 #272.9 | |
pslld $30, %xmm10 #272.9 | |
paddd %xmm9, %xmm11 #272.9 | |
por %xmm10, %xmm8 #272.9 | |
movdqa %xmm1, %xmm10 #272.9 | |
movdqa %xmm1, %xmm9 #272.9 | |
psrld $13, %xmm10 #272.9 | |
pslld $19, %xmm9 #272.9 | |
por %xmm9, %xmm10 #272.9 | |
movdqa %xmm1, %xmm9 #272.9 | |
pxor %xmm10, %xmm8 #272.9 | |
movdqa %xmm1, %xmm10 #272.9 | |
movdqa %xmm1, %xmm14 #272.9 | |
psrld $22, %xmm9 #272.9 | |
pslld $10, %xmm10 #272.9 | |
pand %xmm5, %xmm14 #272.9 | |
por %xmm10, %xmm9 #272.9 | |
movdqa %xmm1, %xmm10 #272.9 | |
pxor %xmm9, %xmm8 #272.9 | |
pand %xmm4, %xmm10 #272.9 | |
movdqa %xmm14, %xmm9 #272.9 | |
paddd %xmm15, %xmm11 #272.9 | |
pxor %xmm10, %xmm9 #272.9 | |
paddd %xmm11, %xmm12 #272.9 | |
pxor %xmm0, %xmm9 #272.9 | |
movdqa %xmm3, %xmm0 #273.15 | |
paddd %xmm9, %xmm8 #272.9 | |
movdqa %xmm3, %xmm10 #273.15 | |
paddd %xmm8, %xmm11 #272.9 | |
movdqa %xmm3, %xmm9 #273.15 | |
movdqa %xmm3, %xmm8 #273.15 | |
psrld $17, %xmm0 #273.15 | |
pslld $15, %xmm10 #273.15 | |
psrld $19, %xmm9 #273.15 | |
pslld $13, %xmm8 #273.15 | |
por %xmm10, %xmm0 #273.15 | |
por %xmm8, %xmm9 #273.15 | |
movdqa %xmm3, 2352(%rsp) #269.15 | |
pxor %xmm9, %xmm0 #273.15 | |
psrld $10, %xmm3 #273.15 | |
pxor %xmm3, %xmm0 #273.15 | |
movdqa 2112(%rsp), %xmm3 #273.15 | |
paddd %xmm0, %xmm3 #273.15 | |
movdqa 1776(%rsp), %xmm0 #273.15 | |
movdqa %xmm0, %xmm10 #273.15 | |
movdqa %xmm0, %xmm8 #273.15 | |
psrld $7, %xmm10 #273.15 | |
pslld $25, %xmm8 #273.15 | |
por %xmm8, %xmm10 #273.15 | |
movdqa %xmm0, %xmm8 #273.15 | |
movdqa %xmm0, %xmm9 #273.15 | |
psrld $18, %xmm8 #273.15 | |
pslld $14, %xmm9 #273.15 | |
por %xmm9, %xmm8 #273.15 | |
movdqa %xmm0, %xmm9 #273.15 | |
pxor %xmm8, %xmm10 #273.15 | |
psrld $3, %xmm9 #273.15 | |
pxor %xmm9, %xmm10 #273.15 | |
movdqa %xmm12, %xmm8 #274.9 | |
paddd %xmm10, %xmm13 #273.15 | |
movdqa %xmm12, %xmm10 #274.9 | |
paddd %xmm13, %xmm3 #273.15 | |
movdqa %xmm12, %xmm13 #274.9 | |
movdqa %xmm12, %xmm9 #274.9 | |
psrld $6, %xmm13 #274.9 | |
pslld $26, %xmm8 #274.9 | |
psrld $11, %xmm10 #274.9 | |
pslld $21, %xmm9 #274.9 | |
por %xmm8, %xmm13 #274.9 | |
por %xmm9, %xmm10 #274.9 | |
movdqa %xmm12, %xmm8 #274.9 | |
pxor %xmm10, %xmm13 #274.9 | |
movdqa %xmm12, %xmm10 #274.9 | |
psrld $25, %xmm8 #274.9 | |
pslld $7, %xmm10 #274.9 | |
por %xmm10, %xmm8 #274.9 | |
movdqa %xmm12, %xmm10 #274.9 | |
pxor %xmm8, %xmm13 #274.9 | |
movdqa %xmm12, %xmm8 #274.9 | |
pand %xmm2, %xmm8 #274.9 | |
pandn %xmm7, %xmm10 #274.9 | |
movdqa .L_2il0floatpacket.6789(%rip), %xmm9 #274.9 | |
pxor %xmm10, %xmm8 #274.9 | |
paddd %xmm8, %xmm9 #274.9 | |
movdqa %xmm11, %xmm10 #274.9 | |
movdqa %xmm11, %xmm8 #274.9 | |
paddd %xmm13, %xmm6 #274.9 | |
psrld $2, %xmm10 #274.9 | |
pslld $30, %xmm8 #274.9 | |
paddd %xmm9, %xmm6 #274.9 | |
por %xmm8, %xmm10 #274.9 | |
movdqa %xmm11, %xmm8 #274.9 | |
movdqa %xmm11, %xmm9 #274.9 | |
psrld $13, %xmm8 #274.9 | |
pslld $19, %xmm9 #274.9 | |
por %xmm9, %xmm8 #274.9 | |
movdqa %xmm11, %xmm9 #274.9 | |
pxor %xmm8, %xmm10 #274.9 | |
movdqa %xmm11, %xmm8 #274.9 | |
movdqa %xmm11, %xmm13 #274.9 | |
psrld $22, %xmm8 #274.9 | |
pslld $10, %xmm9 #274.9 | |
pand %xmm1, %xmm13 #274.9 | |
por %xmm9, %xmm8 #274.9 | |
movdqa %xmm11, %xmm9 #274.9 | |
pxor %xmm8, %xmm10 #274.9 | |
pand %xmm5, %xmm9 #274.9 | |
movdqa %xmm13, %xmm8 #274.9 | |
paddd %xmm3, %xmm6 #274.9 | |
pxor %xmm9, %xmm8 #274.9 | |
paddd %xmm6, %xmm4 #274.9 | |
pxor %xmm14, %xmm8 #274.9 | |
movdqa %xmm15, %xmm14 #275.15 | |
paddd %xmm8, %xmm10 #274.9 | |
movdqa %xmm15, %xmm8 #275.15 | |
paddd %xmm10, %xmm6 #274.9 | |
movdqa %xmm15, %xmm10 #275.15 | |
movdqa %xmm15, %xmm9 #275.15 | |
psrld $17, %xmm10 #275.15 | |
pslld $15, %xmm14 #275.15 | |
psrld $19, %xmm8 #275.15 | |
pslld $13, %xmm9 #275.15 | |
por %xmm14, %xmm10 #275.15 | |
por %xmm9, %xmm8 #275.15 | |
movdqa %xmm15, 2368(%rsp) #271.15 | |
pxor %xmm8, %xmm10 #275.15 | |
psrld $10, %xmm15 #275.15 | |
pxor %xmm15, %xmm10 #275.15 | |
movdqa 1824(%rsp), %xmm15 #275.15 | |
movdqa %xmm15, %xmm14 #275.15 | |
movdqa %xmm15, %xmm9 #275.15 | |
movdqa 2160(%rsp), %xmm8 #275.15 | |
psrld $7, %xmm14 #275.15 | |
pslld $25, %xmm9 #275.15 | |
paddd %xmm10, %xmm8 #275.15 | |
por %xmm9, %xmm14 #275.15 | |
movdqa %xmm15, %xmm10 #275.15 | |
movdqa %xmm15, %xmm9 #275.15 | |
psrld $18, %xmm10 #275.15 | |
pslld $14, %xmm9 #275.15 | |
por %xmm9, %xmm10 #275.15 | |
movdqa %xmm15, %xmm9 #275.15 | |
pxor %xmm10, %xmm14 #275.15 | |
psrld $3, %xmm9 #275.15 | |
pxor %xmm9, %xmm14 #275.15 | |
movdqa %xmm4, %xmm9 #276.9 | |
paddd %xmm14, %xmm0 #275.15 | |
movdqa %xmm4, %xmm14 #276.9 | |
paddd %xmm0, %xmm8 #275.15 | |
movdqa %xmm4, %xmm10 #276.9 | |
movdqa %xmm4, %xmm0 #276.9 | |
psrld $6, %xmm14 #276.9 | |
pslld $26, %xmm9 #276.9 | |
psrld $11, %xmm10 #276.9 | |
pslld $21, %xmm0 #276.9 | |
por %xmm9, %xmm14 #276.9 | |
por %xmm0, %xmm10 #276.9 | |
movdqa %xmm4, %xmm9 #276.9 | |
movdqa %xmm4, %xmm0 #276.9 | |
psrld $25, %xmm9 #276.9 | |
pslld $7, %xmm0 #276.9 | |
pxor %xmm10, %xmm14 #276.9 | |
por %xmm0, %xmm9 #276.9 | |
movdqa %xmm4, %xmm0 #276.9 | |
pxor %xmm9, %xmm14 #276.9 | |
movdqa %xmm4, %xmm9 #276.9 | |
pand %xmm12, %xmm0 #276.9 | |
pandn %xmm2, %xmm9 #276.9 | |
movdqa .L_2il0floatpacket.6790(%rip), %xmm10 #276.9 | |
pxor %xmm9, %xmm0 #276.9 | |
paddd %xmm14, %xmm7 #276.9 | |
paddd %xmm0, %xmm10 #276.9 | |
paddd %xmm10, %xmm7 #276.9 | |
movdqa %xmm6, %xmm0 #276.9 | |
movdqa %xmm11, 2400(%rsp) #272.9 | |
paddd %xmm8, %xmm7 #276.9 | |
movdqa %xmm8, 2464(%rsp) #275.15 | |
pand %xmm11, %xmm0 #276.9 | |
movdqa %xmm6, %xmm11 #276.9 | |
movdqa %xmm6, %xmm8 #276.9 | |
movdqa %xmm6, %xmm9 #276.9 | |
movdqa %xmm6, %xmm10 #276.9 | |
psrld $2, %xmm11 #276.9 | |
pslld $30, %xmm8 #276.9 | |
psrld $13, %xmm9 #276.9 | |
pslld $19, %xmm10 #276.9 | |
por %xmm8, %xmm11 #276.9 | |
por %xmm10, %xmm9 #276.9 | |
pxor %xmm9, %xmm11 #276.9 | |
movdqa %xmm6, %xmm14 #276.9 | |
movdqa %xmm6, %xmm9 #276.9 | |
movdqa %xmm6, %xmm8 #276.9 | |
psrld $22, %xmm14 #276.9 | |
pslld $10, %xmm9 #276.9 | |
pand %xmm1, %xmm8 #276.9 | |
por %xmm9, %xmm14 #276.9 | |
movdqa %xmm0, 2496(%rsp) #276.9 | |
pxor %xmm8, %xmm0 #276.9 | |
pxor %xmm14, %xmm11 #276.9 | |
pxor %xmm13, %xmm0 #276.9 | |
paddd %xmm0, %xmm11 #276.9 | |
movdqa %xmm3, %xmm13 #277.15 | |
movdqa %xmm3, %xmm0 #277.15 | |
movdqa %xmm3, %xmm8 #277.15 | |
movdqa %xmm3, %xmm9 #277.15 | |
psrld $17, %xmm13 #277.15 | |
pslld $15, %xmm0 #277.15 | |
psrld $19, %xmm8 #277.15 | |
pslld $13, %xmm9 #277.15 | |
por %xmm0, %xmm13 #277.15 | |
por %xmm9, %xmm8 #277.15 | |
paddd %xmm7, %xmm5 #276.9 | |
movdqa %xmm3, 2416(%rsp) #273.15 | |
pxor %xmm8, %xmm13 #277.15 | |
psrld $10, %xmm3 #277.15 | |
paddd %xmm11, %xmm7 #276.9 | |
pxor %xmm3, %xmm13 #277.15 | |
movdqa %xmm5, %xmm11 #278.9 | |
movdqa 2224(%rsp), %xmm3 #277.15 | |
movdqa %xmm5, %xmm9 #278.9 | |
paddd %xmm13, %xmm3 #277.15 | |
psrld $6, %xmm11 #278.9 | |
movdqa 1872(%rsp), %xmm13 #277.15 | |
pslld $21, %xmm9 #278.9 | |
movdqa %xmm13, %xmm10 #277.15 | |
movdqa %xmm13, %xmm14 #277.15 | |
movdqa %xmm13, %xmm0 #277.15 | |
movdqa %xmm13, %xmm8 #277.15 | |
psrld $7, %xmm10 #277.15 | |
pslld $25, %xmm14 #277.15 | |
psrld $18, %xmm0 #277.15 | |
pslld $14, %xmm8 #277.15 | |
por %xmm14, %xmm10 #277.15 | |
por %xmm8, %xmm0 #277.15 | |
pxor %xmm0, %xmm10 #277.15 | |
psrld $3, %xmm13 #277.15 | |
pxor %xmm13, %xmm10 #277.15 | |
movdqa %xmm5, %xmm0 #278.9 | |
movdqa %xmm5, %xmm8 #278.9 | |
paddd %xmm10, %xmm15 #277.15 | |
pslld $26, %xmm0 #278.9 | |
psrld $11, %xmm8 #278.9 | |
movdqa %xmm5, %xmm10 #278.9 | |
movdqa %xmm5, %xmm13 #278.9 | |
por %xmm0, %xmm11 #278.9 | |
por %xmm9, %xmm8 #278.9 | |
psrld $25, %xmm10 #278.9 | |
pslld $7, %xmm13 #278.9 | |
movdqa %xmm5, %xmm0 #278.9 | |
pxor %xmm8, %xmm11 #278.9 | |
movdqa %xmm5, 2480(%rsp) #276.9 | |
por %xmm13, %xmm10 #278.9 | |
pand %xmm4, %xmm0 #278.9 | |
pandn %xmm12, %xmm5 #278.9 | |
movdqa %xmm4, 2432(%rsp) #274.9 | |
pxor %xmm10, %xmm11 #278.9 | |
movdqa .L_2il0floatpacket.6791(%rip), %xmm4 #278.9 | |
pxor %xmm5, %xmm0 #278.9 | |
paddd %xmm11, %xmm2 #278.9 | |
paddd %xmm0, %xmm4 #278.9 | |
paddd %xmm15, %xmm3 #277.15 | |
paddd %xmm4, %xmm2 #278.9 | |
movdqa %xmm7, %xmm0 #278.9 | |
paddd %xmm3, %xmm2 #278.9 | |
movdqa %xmm6, 2448(%rsp) #274.9 | |
pand %xmm6, %xmm0 #278.9 | |
movdqa %xmm3, 2528(%rsp) #277.15 | |
movdqa %xmm7, %xmm3 #278.9 | |
movdqa %xmm7, %xmm5 #278.9 | |
movdqa %xmm7, %xmm6 #278.9 | |
movdqa %xmm7, %xmm8 #278.9 | |
psrld $2, %xmm3 #278.9 | |
pslld $30, %xmm5 #278.9 | |
psrld $13, %xmm6 #278.9 | |
pslld $19, %xmm8 #278.9 | |
por %xmm5, %xmm3 #278.9 | |
por %xmm8, %xmm6 #278.9 | |
movdqa %xmm7, %xmm9 #278.9 | |
movdqa %xmm7, 2512(%rsp) #276.9 | |
paddd %xmm2, %xmm1 #278.9 | |
pxor %xmm6, %xmm3 #278.9 | |
psrld $22, %xmm9 #278.9 | |
pslld $10, %xmm7 #278.9 | |
movdqa %xmm12, 2384(%rsp) #272.9 | |
movdqa %xmm2, 2544(%rsp) #278.9 | |
movdqa %xmm1, 2560(%rsp) #278.9 | |
movdqa %xmm0, 2576(%rsp) #278.9 | |
movdqa %xmm3, 2592(%rsp) #278.9 | |
movdqa %xmm9, 2608(%rsp) #278.9 | |
movdqa %xmm7, 2624(%rsp) #278.9 | |
# LOE | |
..B2.17: # Preds ..B2.18 | |
movdqa 2512(%rsp), %xmm2 #278.9 | |
movdqa 2400(%rsp), %xmm6 #278.9 | |
movdqa %xmm2, %xmm7 #278.9 | |
movdqa 2576(%rsp), %xmm3 #278.9 | |
pand %xmm6, %xmm7 #278.9 | |
movdqa 2608(%rsp), %xmm12 #278.9 | |
movdqa %xmm3, %xmm1 #278.9 | |
por 2624(%rsp), %xmm12 #278.9 | |
pxor %xmm7, %xmm1 #278.9 | |
movdqa 2592(%rsp), %xmm4 #278.9 | |
movdqa 2496(%rsp), %xmm5 #278.9 | |
pxor %xmm12, %xmm4 #278.9 | |
movdqa 2464(%rsp), %xmm14 #279.15 | |
pxor %xmm1, %xmm5 #278.9 | |
movdqa %xmm14, %xmm15 #279.15 | |
movdqa %xmm14, %xmm13 #279.15 | |
movdqa %xmm14, %xmm0 #279.15 | |
movdqa %xmm14, %xmm11 #279.15 | |
movdqa 2544(%rsp), %xmm12 #278.9 | |
paddd %xmm5, %xmm4 #278.9 | |
movdqa 1936(%rsp), %xmm9 #279.15 | |
psrld $17, %xmm15 #279.15 | |
pslld $15, %xmm13 #279.15 | |
psrld $19, %xmm0 #279.15 | |
pslld $13, %xmm11 #279.15 | |
paddd %xmm4, %xmm12 #278.9 | |
por %xmm13, %xmm15 #279.15 | |
por %xmm11, %xmm0 #279.15 | |
movdqa %xmm9, %xmm10 #279.15 | |
movdqa %xmm9, %xmm7 #279.15 | |
movdqa %xmm9, %xmm1 #279.15 | |
movdqa %xmm9, %xmm4 #279.15 | |
movdqa 2560(%rsp), %xmm11 #280.9 | |
pxor %xmm0, %xmm15 #279.15 | |
psrld $10, %xmm14 #279.15 | |
psrld $7, %xmm10 #279.15 | |
pslld $25, %xmm7 #279.15 | |
psrld $18, %xmm1 #279.15 | |
pslld $14, %xmm4 #279.15 | |
pxor %xmm14, %xmm15 #279.15 | |
por %xmm7, %xmm10 #279.15 | |
por %xmm4, %xmm1 #279.15 | |
movdqa %xmm9, %xmm5 #279.15 | |
movdqa %xmm11, %xmm13 #280.9 | |
movdqa %xmm11, %xmm0 #280.9 | |
movdqa %xmm11, %xmm14 #280.9 | |
movdqa %xmm11, %xmm7 #280.9 | |
pxor %xmm1, %xmm10 #279.15 | |
movdqa 2320(%rsp), %xmm8 #279.15 | |
psrld $3, %xmm5 #279.15 | |
psrld $6, %xmm13 #280.9 | |
pslld $26, %xmm0 #280.9 | |
psrld $11, %xmm14 #280.9 | |
pslld $21, %xmm7 #280.9 | |
movdqa %xmm11, %xmm1 #280.9 | |
movdqa %xmm11, %xmm4 #280.9 | |
paddd %xmm15, %xmm8 #279.15 | |
pxor %xmm5, %xmm10 #279.15 | |
movdqa 1872(%rsp), %xmm15 #279.15 | |
por %xmm0, %xmm13 #280.9 | |
por %xmm7, %xmm14 #280.9 | |
psrld $25, %xmm1 #280.9 | |
pslld $7, %xmm4 #280.9 | |
paddd %xmm10, %xmm15 #279.15 | |
pxor %xmm14, %xmm13 #280.9 | |
por %xmm4, %xmm1 #280.9 | |
movdqa 2432(%rsp), %xmm5 #280.9 | |
paddd %xmm15, %xmm8 #279.15 | |
pxor %xmm1, %xmm13 #280.9 | |
movdqa %xmm11, %xmm15 #280.9 | |
movdqa 2480(%rsp), %xmm1 #280.9 | |
movdqa %xmm11, %xmm0 #280.9 | |
pand %xmm1, %xmm15 #280.9 | |
pandn %xmm5, %xmm0 #280.9 | |
movdqa 2384(%rsp), %xmm4 #280.9 | |
pxor %xmm0, %xmm15 #280.9 | |
movdqa .L_2il0floatpacket.6792(%rip), %xmm10 #280.9 | |
movdqa %xmm12, %xmm0 #280.9 | |
movdqa %xmm12, %xmm7 #280.9 | |
paddd %xmm13, %xmm4 #280.9 | |
paddd %xmm15, %xmm10 #280.9 | |
psrld $2, %xmm0 #280.9 | |
pslld $30, %xmm7 #280.9 | |
movdqa %xmm12, %xmm15 #280.9 | |
movdqa %xmm12, %xmm13 #280.9 | |
paddd %xmm10, %xmm4 #280.9 | |
por %xmm7, %xmm0 #280.9 | |
psrld $13, %xmm15 #280.9 | |
pslld $19, %xmm13 #280.9 | |
movdqa %xmm12, %xmm7 #280.9 | |
movdqa %xmm12, %xmm10 #280.9 | |
por %xmm13, %xmm15 #280.9 | |
psrld $22, %xmm7 #280.9 | |
pslld $10, %xmm10 #280.9 | |
pxor %xmm15, %xmm0 #280.9 | |
por %xmm10, %xmm7 #280.9 | |
movdqa %xmm2, %xmm14 #280.9 | |
pxor %xmm7, %xmm0 #280.9 | |
movdqa 2448(%rsp), %xmm7 #280.9 | |
pand %xmm12, %xmm14 #280.9 | |
movdqa %xmm7, %xmm15 #280.9 | |
movdqa %xmm14, %xmm13 #280.9 | |
pand %xmm12, %xmm15 #280.9 | |
paddd %xmm8, %xmm4 #280.9 | |
pxor %xmm15, %xmm13 #280.9 | |
paddd %xmm4, %xmm6 #280.9 | |
pxor %xmm13, %xmm3 #280.9 | |
movdqa 2528(%rsp), %xmm15 #282.14 | |
paddd %xmm3, %xmm0 #280.9 | |
paddd %xmm0, %xmm4 #280.9 | |
movdqa %xmm15, %xmm3 #282.14 | |
movdqa %xmm15, %xmm10 #282.14 | |
movdqa %xmm15, %xmm13 #282.14 | |
movdqa %xmm15, %xmm0 #282.14 | |
psrld $17, %xmm3 #282.14 | |
pslld $15, %xmm10 #282.14 | |
psrld $19, %xmm13 #282.14 | |
pslld $13, %xmm0 #282.14 | |
por %xmm10, %xmm3 #282.14 | |
por %xmm0, %xmm13 #282.14 | |
psrld $10, %xmm15 #282.14 | |
pxor %xmm13, %xmm3 #282.14 | |
pxor %xmm15, %xmm3 #282.14 | |
movdqa 2016(%rsp), %xmm15 #282.14 | |
movdqa %xmm15, %xmm13 #282.14 | |
movdqa %xmm15, %xmm0 #282.14 | |
movdqa 2336(%rsp), %xmm10 #282.14 | |
psrld $7, %xmm13 #282.14 | |
pslld $25, %xmm0 #282.14 | |
paddd %xmm3, %xmm10 #282.14 | |
por %xmm0, %xmm13 #282.14 | |
movdqa %xmm15, %xmm0 #282.14 | |
movdqa %xmm15, %xmm3 #282.14 | |
psrld $18, %xmm0 #282.14 | |
pslld $14, %xmm3 #282.14 | |
por %xmm3, %xmm0 #282.14 | |
movdqa %xmm15, %xmm3 #282.14 | |
pxor %xmm0, %xmm13 #282.14 | |
psrld $3, %xmm3 #282.14 | |
pxor %xmm3, %xmm13 #282.14 | |
movdqa %xmm6, %xmm0 #283.9 | |
paddd %xmm13, %xmm9 #282.14 | |
movdqa %xmm6, %xmm3 #283.9 | |
paddd %xmm9, %xmm10 #282.14 | |
movdqa %xmm6, %xmm9 #283.9 | |
movdqa %xmm6, %xmm13 #283.9 | |
psrld $6, %xmm9 #283.9 | |
pslld $26, %xmm0 #283.9 | |
psrld $11, %xmm3 #283.9 | |
pslld $21, %xmm13 #283.9 | |
por %xmm0, %xmm9 #283.9 | |
por %xmm13, %xmm3 #283.9 | |
movdqa %xmm6, %xmm0 #283.9 | |
pxor %xmm3, %xmm9 #283.9 | |
movdqa %xmm6, %xmm3 #283.9 | |
psrld $25, %xmm0 #283.9 | |
pslld $7, %xmm3 #283.9 | |
por %xmm3, %xmm0 #283.9 | |
movdqa %xmm11, %xmm3 #283.9 | |
movdqa %xmm6, %xmm13 #283.9 | |
pand %xmm6, %xmm3 #283.9 | |
pandn %xmm1, %xmm13 #283.9 | |
pxor %xmm0, %xmm9 #283.9 | |
movdqa .L_2il0floatpacket.6793(%rip), %xmm0 #283.9 | |
pxor %xmm13, %xmm3 #283.9 | |
paddd %xmm3, %xmm0 #283.9 | |
movdqa %xmm4, %xmm13 #283.9 | |
movdqa %xmm4, %xmm3 #283.9 | |
paddd %xmm9, %xmm5 #283.9 | |
psrld $2, %xmm13 #283.9 | |
pslld $30, %xmm3 #283.9 | |
paddd %xmm0, %xmm5 #283.9 | |
por %xmm3, %xmm13 #283.9 | |
movdqa %xmm4, %xmm3 #283.9 | |
movdqa %xmm4, %xmm0 #283.9 | |
psrld $13, %xmm3 #283.9 | |
pslld $19, %xmm0 #283.9 | |
por %xmm0, %xmm3 #283.9 | |
movdqa %xmm4, %xmm0 #283.9 | |
pxor %xmm3, %xmm13 #283.9 | |
movdqa %xmm4, %xmm3 #283.9 | |
movdqa %xmm4, %xmm9 #283.9 | |
psrld $22, %xmm0 #283.9 | |
pslld $10, %xmm3 #283.9 | |
pand %xmm12, %xmm9 #283.9 | |
por %xmm3, %xmm0 #283.9 | |
movdqa %xmm2, %xmm3 #283.9 | |
pxor %xmm0, %xmm13 #283.9 | |
pand %xmm4, %xmm3 #283.9 | |
movdqa %xmm9, %xmm0 #283.9 | |
paddd %xmm10, %xmm5 #283.9 | |
pxor %xmm3, %xmm0 #283.9 | |
paddd %xmm5, %xmm7 #283.9 | |
pxor %xmm14, %xmm0 #283.9 | |
movdqa %xmm8, %xmm14 #284.14 | |
paddd %xmm0, %xmm13 #283.9 | |
movdqa %xmm8, %xmm3 #284.14 | |
paddd %xmm13, %xmm5 #283.9 | |
movdqa %xmm8, %xmm13 #284.14 | |
movdqa %xmm8, %xmm0 #284.14 | |
psrld $17, %xmm13 #284.14 | |
pslld $15, %xmm14 #284.14 | |
psrld $19, %xmm3 #284.14 | |
pslld $13, %xmm0 #284.14 | |
por %xmm14, %xmm13 #284.14 | |
por %xmm0, %xmm3 #284.14 | |
movdqa %xmm8, 2640(%rsp) #279.15 | |
pxor %xmm3, %xmm13 #284.14 | |
psrld $10, %xmm8 #284.14 | |
pxor %xmm8, %xmm13 #284.14 | |
movdqa 2032(%rsp), %xmm8 #284.14 | |
movdqa %xmm8, %xmm14 #284.14 | |
movdqa %xmm8, %xmm0 #284.14 | |
movdqa 2352(%rsp), %xmm3 #284.14 | |
psrld $7, %xmm14 #284.14 | |
pslld $25, %xmm0 #284.14 | |
paddd %xmm13, %xmm3 #284.14 | |
por %xmm0, %xmm14 #284.14 | |
movdqa %xmm8, %xmm0 #284.14 | |
movdqa %xmm8, %xmm13 #284.14 | |
psrld $18, %xmm0 #284.14 | |
pslld $14, %xmm13 #284.14 | |
por %xmm13, %xmm0 #284.14 | |
movdqa %xmm8, %xmm13 #284.14 | |
pxor %xmm0, %xmm14 #284.14 | |
psrld $3, %xmm13 #284.14 | |
pxor %xmm13, %xmm14 #284.14 | |
movdqa %xmm7, %xmm0 #285.9 | |
paddd %xmm14, %xmm15 #284.14 | |
movdqa %xmm7, %xmm13 #285.9 | |
paddd %xmm15, %xmm3 #284.14 | |
movdqa %xmm7, %xmm14 #285.9 | |
movdqa %xmm7, %xmm15 #285.9 | |
psrld $6, %xmm0 #285.9 | |
pslld $26, %xmm13 #285.9 | |
psrld $11, %xmm14 #285.9 | |
pslld $21, %xmm15 #285.9 | |
por %xmm13, %xmm0 #285.9 | |
por %xmm15, %xmm14 #285.9 | |
movdqa %xmm7, %xmm13 #285.9 | |
movdqa %xmm7, %xmm15 #285.9 | |
psrld $25, %xmm13 #285.9 | |
pslld $7, %xmm15 #285.9 | |
pxor %xmm14, %xmm0 #285.9 | |
por %xmm15, %xmm13 #285.9 | |
movdqa %xmm7, %xmm15 #285.9 | |
pxor %xmm13, %xmm0 #285.9 | |
movdqa %xmm7, %xmm13 #285.9 | |
pand %xmm6, %xmm15 #285.9 | |
pandn %xmm11, %xmm13 #285.9 | |
movdqa .L_2il0floatpacket.6794(%rip), %xmm14 #285.9 | |
pxor %xmm13, %xmm15 #285.9 | |
paddd %xmm0, %xmm1 #285.9 | |
paddd %xmm15, %xmm14 #285.9 | |
paddd %xmm14, %xmm1 #285.9 | |
movdqa %xmm5, %xmm14 #285.9 | |
movdqa %xmm5, %xmm13 #285.9 | |
psrld $2, %xmm14 #285.9 | |
pslld $30, %xmm13 #285.9 | |
movdqa %xmm5, %xmm15 #285.9 | |
por %xmm13, %xmm14 #285.9 | |
movdqa %xmm5, %xmm13 #285.9 | |
psrld $13, %xmm15 #285.9 | |
pslld $19, %xmm13 #285.9 | |
por %xmm13, %xmm15 #285.9 | |
movdqa %xmm5, %xmm13 #285.9 | |
pxor %xmm15, %xmm14 #285.9 | |
movdqa %xmm5, %xmm15 #285.9 | |
movdqa %xmm5, %xmm0 #285.9 | |
psrld $22, %xmm13 #285.9 | |
pslld $10, %xmm15 #285.9 | |
pand %xmm4, %xmm0 #285.9 | |
por %xmm15, %xmm13 #285.9 | |
movdqa %xmm5, %xmm15 #285.9 | |
pxor %xmm13, %xmm14 #285.9 | |
pand %xmm12, %xmm15 #285.9 | |
movdqa %xmm0, %xmm13 #285.9 | |
paddd %xmm3, %xmm1 #285.9 | |
pxor %xmm15, %xmm13 #285.9 | |
paddd %xmm1, %xmm2 #285.9 | |
pxor %xmm9, %xmm13 #285.9 | |
movdqa %xmm10, %xmm9 #286.14 | |
paddd %xmm13, %xmm14 #285.9 | |
movdqa %xmm10, %xmm13 #286.14 | |
paddd %xmm14, %xmm1 #285.9 | |
movdqa %xmm10, %xmm15 #286.14 | |
movdqa %xmm10, %xmm14 #286.14 | |
psrld $17, %xmm13 #286.14 | |
pslld $15, %xmm9 #286.14 | |
psrld $19, %xmm15 #286.14 | |
pslld $13, %xmm14 #286.14 | |
por %xmm9, %xmm13 #286.14 | |
por %xmm14, %xmm15 #286.14 | |
movdqa %xmm10, 2656(%rsp) #282.14 | |
pxor %xmm15, %xmm13 #286.14 | |
psrld $10, %xmm10 #286.14 | |
movdqa 2368(%rsp), %xmm15 #286.14 | |
pxor %xmm10, %xmm13 #286.14 | |
paddd %xmm13, %xmm15 #286.14 | |
movdqa 2048(%rsp), %xmm13 #286.14 | |
movdqa %xmm13, %xmm14 #286.14 | |
movdqa %xmm13, %xmm9 #286.14 | |
psrld $7, %xmm14 #286.14 | |
pslld $25, %xmm9 #286.14 | |
por %xmm9, %xmm14 #286.14 | |
movdqa %xmm13, %xmm9 #286.14 | |
movdqa %xmm13, %xmm10 #286.14 | |
psrld $18, %xmm9 #286.14 | |
pslld $14, %xmm10 #286.14 | |
por %xmm10, %xmm9 #286.14 | |
movdqa %xmm13, %xmm10 #286.14 | |
pxor %xmm9, %xmm14 #286.14 | |
psrld $3, %xmm10 #286.14 | |
pxor %xmm10, %xmm14 #286.14 | |
movdqa %xmm2, %xmm10 #287.9 | |
paddd %xmm14, %xmm8 #286.14 | |
movdqa %xmm2, %xmm14 #287.9 | |
paddd %xmm8, %xmm15 #286.14 | |
movdqa %xmm2, %xmm9 #287.9 | |
movdqa %xmm2, %xmm8 #287.9 | |
psrld $6, %xmm14 #287.9 | |
pslld $26, %xmm10 #287.9 | |
psrld $11, %xmm9 #287.9 | |
pslld $21, %xmm8 #287.9 | |
por %xmm10, %xmm14 #287.9 | |
por %xmm8, %xmm9 #287.9 | |
movdqa %xmm2, %xmm10 #287.9 | |
movdqa %xmm2, %xmm8 #287.9 | |
psrld $25, %xmm10 #287.9 | |
pslld $7, %xmm8 #287.9 | |
pxor %xmm9, %xmm14 #287.9 | |
por %xmm8, %xmm10 #287.9 | |
movdqa %xmm2, %xmm8 #287.9 | |
pxor %xmm10, %xmm14 #287.9 | |
movdqa %xmm2, %xmm10 #287.9 | |
pand %xmm7, %xmm8 #287.9 | |
pandn %xmm6, %xmm10 #287.9 | |
movdqa .L_2il0floatpacket.6795(%rip), %xmm9 #287.9 | |
pxor %xmm10, %xmm8 #287.9 | |
paddd %xmm8, %xmm9 #287.9 | |
movdqa %xmm1, %xmm8 #287.9 | |
movdqa %xmm1, %xmm10 #287.9 | |
paddd %xmm14, %xmm11 #287.9 | |
psrld $2, %xmm8 #287.9 | |
pslld $30, %xmm10 #287.9 | |
paddd %xmm9, %xmm11 #287.9 | |
por %xmm10, %xmm8 #287.9 | |
movdqa %xmm1, %xmm10 #287.9 | |
movdqa %xmm1, %xmm9 #287.9 | |
psrld $13, %xmm10 #287.9 | |
pslld $19, %xmm9 #287.9 | |
por %xmm9, %xmm10 #287.9 | |
movdqa %xmm1, %xmm9 #287.9 | |
pxor %xmm10, %xmm8 #287.9 | |
movdqa %xmm1, %xmm10 #287.9 | |
movdqa %xmm1, %xmm14 #287.9 | |
psrld $22, %xmm9 #287.9 | |
pslld $10, %xmm10 #287.9 | |
pand %xmm5, %xmm14 #287.9 | |
por %xmm10, %xmm9 #287.9 | |
movdqa %xmm1, %xmm10 #287.9 | |
pxor %xmm9, %xmm8 #287.9 | |
pand %xmm4, %xmm10 #287.9 | |
movdqa %xmm14, %xmm9 #287.9 | |
paddd %xmm15, %xmm11 #287.9 | |
pxor %xmm10, %xmm9 #287.9 | |
paddd %xmm11, %xmm12 #287.9 | |
pxor %xmm0, %xmm9 #287.9 | |
movdqa %xmm3, %xmm0 #288.14 | |
paddd %xmm9, %xmm8 #287.9 | |
movdqa %xmm3, %xmm10 #288.14 | |
paddd %xmm8, %xmm11 #287.9 | |
movdqa %xmm3, %xmm9 #288.14 | |
movdqa %xmm3, %xmm8 #288.14 | |
psrld $17, %xmm0 #288.14 | |
pslld $15, %xmm10 #288.14 | |
psrld $19, %xmm9 #288.14 | |
pslld $13, %xmm8 #288.14 | |
por %xmm10, %xmm0 #288.14 | |
por %xmm8, %xmm9 #288.14 | |
movdqa %xmm3, 2672(%rsp) #284.14 | |
pxor %xmm9, %xmm0 #288.14 | |
psrld $10, %xmm3 #288.14 | |
pxor %xmm3, %xmm0 #288.14 | |
movdqa 2416(%rsp), %xmm3 #288.14 | |
paddd %xmm0, %xmm3 #288.14 | |
movdqa 2064(%rsp), %xmm0 #288.14 | |
movdqa %xmm0, %xmm10 #288.14 | |
movdqa %xmm0, %xmm8 #288.14 | |
psrld $7, %xmm10 #288.14 | |
pslld $25, %xmm8 #288.14 | |
por %xmm8, %xmm10 #288.14 | |
movdqa %xmm0, %xmm8 #288.14 | |
movdqa %xmm0, %xmm9 #288.14 | |
psrld $18, %xmm8 #288.14 | |
pslld $14, %xmm9 #288.14 | |
por %xmm9, %xmm8 #288.14 | |
movdqa %xmm0, %xmm9 #288.14 | |
pxor %xmm8, %xmm10 #288.14 | |
psrld $3, %xmm9 #288.14 | |
pxor %xmm9, %xmm10 #288.14 | |
movdqa %xmm12, %xmm8 #289.9 | |
paddd %xmm10, %xmm13 #288.14 | |
movdqa %xmm12, %xmm10 #289.9 | |
paddd %xmm13, %xmm3 #288.14 | |
movdqa %xmm12, %xmm13 #289.9 | |
movdqa %xmm12, %xmm9 #289.9 | |
psrld $6, %xmm13 #289.9 | |
pslld $26, %xmm8 #289.9 | |
psrld $11, %xmm10 #289.9 | |
pslld $21, %xmm9 #289.9 | |
por %xmm8, %xmm13 #289.9 | |
por %xmm9, %xmm10 #289.9 | |
movdqa %xmm12, %xmm8 #289.9 | |
pxor %xmm10, %xmm13 #289.9 | |
movdqa %xmm12, %xmm10 #289.9 | |
psrld $25, %xmm8 #289.9 | |
pslld $7, %xmm10 #289.9 | |
por %xmm10, %xmm8 #289.9 | |
movdqa %xmm12, %xmm10 #289.9 | |
pxor %xmm8, %xmm13 #289.9 | |
movdqa %xmm12, %xmm8 #289.9 | |
pand %xmm2, %xmm8 #289.9 | |
pandn %xmm7, %xmm10 #289.9 | |
movdqa .L_2il0floatpacket.6796(%rip), %xmm9 #289.9 | |
pxor %xmm10, %xmm8 #289.9 | |
paddd %xmm8, %xmm9 #289.9 | |
movdqa %xmm11, %xmm10 #289.9 | |
movdqa %xmm11, %xmm8 #289.9 | |
paddd %xmm13, %xmm6 #289.9 | |
psrld $2, %xmm10 #289.9 | |
pslld $30, %xmm8 #289.9 | |
paddd %xmm9, %xmm6 #289.9 | |
por %xmm8, %xmm10 #289.9 | |
movdqa %xmm11, %xmm8 #289.9 | |
movdqa %xmm11, %xmm9 #289.9 | |
psrld $13, %xmm8 #289.9 | |
pslld $19, %xmm9 #289.9 | |
por %xmm9, %xmm8 #289.9 | |
movdqa %xmm11, %xmm9 #289.9 | |
pxor %xmm8, %xmm10 #289.9 | |
movdqa %xmm11, %xmm8 #289.9 | |
movdqa %xmm11, %xmm13 #289.9 | |
psrld $22, %xmm8 #289.9 | |
pslld $10, %xmm9 #289.9 | |
pand %xmm1, %xmm13 #289.9 | |
por %xmm9, %xmm8 #289.9 | |
movdqa %xmm11, %xmm9 #289.9 | |
pxor %xmm8, %xmm10 #289.9 | |
pand %xmm5, %xmm9 #289.9 | |
movdqa %xmm13, %xmm8 #289.9 | |
paddd %xmm3, %xmm6 #289.9 | |
pxor %xmm9, %xmm8 #289.9 | |
paddd %xmm6, %xmm4 #289.9 | |
pxor %xmm14, %xmm8 #289.9 | |
movdqa %xmm15, %xmm14 #290.14 | |
paddd %xmm8, %xmm10 #289.9 | |
movdqa %xmm15, %xmm8 #290.14 | |
paddd %xmm10, %xmm6 #289.9 | |
movdqa %xmm15, %xmm10 #290.14 | |
movdqa %xmm15, %xmm9 #290.14 | |
psrld $17, %xmm10 #290.14 | |
pslld $15, %xmm14 #290.14 | |
psrld $19, %xmm8 #290.14 | |
pslld $13, %xmm9 #290.14 | |
por %xmm14, %xmm10 #290.14 | |
por %xmm9, %xmm8 #290.14 | |
movdqa %xmm15, 2688(%rsp) #286.14 | |
pxor %xmm8, %xmm10 #290.14 | |
psrld $10, %xmm15 #290.14 | |
pxor %xmm15, %xmm10 #290.14 | |
movdqa 2112(%rsp), %xmm15 #290.14 | |
movdqa %xmm15, %xmm14 #290.14 | |
movdqa %xmm15, %xmm9 #290.14 | |
movdqa 2464(%rsp), %xmm8 #290.14 | |
psrld $7, %xmm14 #290.14 | |
pslld $25, %xmm9 #290.14 | |
paddd %xmm10, %xmm8 #290.14 | |
por %xmm9, %xmm14 #290.14 | |
movdqa %xmm15, %xmm10 #290.14 | |
movdqa %xmm15, %xmm9 #290.14 | |
psrld $18, %xmm10 #290.14 | |
pslld $14, %xmm9 #290.14 | |
por %xmm9, %xmm10 #290.14 | |
movdqa %xmm15, %xmm9 #290.14 | |
pxor %xmm10, %xmm14 #290.14 | |
psrld $3, %xmm9 #290.14 | |
pxor %xmm9, %xmm14 #290.14 | |
movdqa %xmm4, %xmm9 #291.9 | |
paddd %xmm14, %xmm0 #290.14 | |
movdqa %xmm4, %xmm14 #291.9 | |
paddd %xmm0, %xmm8 #290.14 | |
movdqa %xmm4, %xmm10 #291.9 | |
movdqa %xmm4, %xmm0 #291.9 | |
psrld $6, %xmm14 #291.9 | |
pslld $26, %xmm9 #291.9 | |
psrld $11, %xmm10 #291.9 | |
pslld $21, %xmm0 #291.9 | |
por %xmm9, %xmm14 #291.9 | |
por %xmm0, %xmm10 #291.9 | |
movdqa %xmm4, %xmm9 #291.9 | |
movdqa %xmm4, %xmm0 #291.9 | |
psrld $25, %xmm9 #291.9 | |
pslld $7, %xmm0 #291.9 | |
pxor %xmm10, %xmm14 #291.9 | |
por %xmm0, %xmm9 #291.9 | |
movdqa %xmm4, %xmm0 #291.9 | |
pxor %xmm9, %xmm14 #291.9 | |
movdqa %xmm4, %xmm9 #291.9 | |
pand %xmm12, %xmm0 #291.9 | |
pandn %xmm2, %xmm9 #291.9 | |
movdqa .L_2il0floatpacket.6797(%rip), %xmm10 #291.9 | |
pxor %xmm9, %xmm0 #291.9 | |
paddd %xmm14, %xmm7 #291.9 | |
paddd %xmm0, %xmm10 #291.9 | |
paddd %xmm10, %xmm7 #291.9 | |
movdqa %xmm6, %xmm0 #291.9 | |
movdqa %xmm11, 2720(%rsp) #287.9 | |
paddd %xmm8, %xmm7 #291.9 | |
movdqa %xmm8, 2784(%rsp) #290.14 | |
pand %xmm11, %xmm0 #291.9 | |
movdqa %xmm6, %xmm11 #291.9 | |
movdqa %xmm6, %xmm8 #291.9 | |
movdqa %xmm6, %xmm9 #291.9 | |
movdqa %xmm6, %xmm10 #291.9 | |
psrld $2, %xmm11 #291.9 | |
pslld $30, %xmm8 #291.9 | |
psrld $13, %xmm9 #291.9 | |
pslld $19, %xmm10 #291.9 | |
por %xmm8, %xmm11 #291.9 | |
por %xmm10, %xmm9 #291.9 | |
pxor %xmm9, %xmm11 #291.9 | |
movdqa %xmm6, %xmm14 #291.9 | |
movdqa %xmm6, %xmm9 #291.9 | |
movdqa %xmm6, %xmm8 #291.9 | |
psrld $22, %xmm14 #291.9 | |
pslld $10, %xmm9 #291.9 | |
pand %xmm1, %xmm8 #291.9 | |
por %xmm9, %xmm14 #291.9 | |
movdqa %xmm0, 2816(%rsp) #291.9 | |
pxor %xmm8, %xmm0 #291.9 | |
pxor %xmm14, %xmm11 #291.9 | |
pxor %xmm13, %xmm0 #291.9 | |
paddd %xmm0, %xmm11 #291.9 | |
movdqa %xmm3, %xmm13 #292.14 | |
movdqa %xmm3, %xmm0 #292.14 | |
movdqa %xmm3, %xmm8 #292.14 | |
movdqa %xmm3, %xmm9 #292.14 | |
psrld $17, %xmm13 #292.14 | |
pslld $15, %xmm0 #292.14 | |
psrld $19, %xmm8 #292.14 | |
pslld $13, %xmm9 #292.14 | |
por %xmm0, %xmm13 #292.14 | |
por %xmm9, %xmm8 #292.14 | |
paddd %xmm7, %xmm5 #291.9 | |
movdqa %xmm3, 2736(%rsp) #288.14 | |
pxor %xmm8, %xmm13 #292.14 | |
psrld $10, %xmm3 #292.14 | |
paddd %xmm11, %xmm7 #291.9 | |
pxor %xmm3, %xmm13 #292.14 | |
movdqa %xmm5, %xmm11 #293.9 | |
movdqa 2528(%rsp), %xmm3 #292.14 | |
movdqa %xmm5, %xmm9 #293.9 | |
paddd %xmm13, %xmm3 #292.14 | |
psrld $6, %xmm11 #293.9 | |
movdqa 2160(%rsp), %xmm13 #292.14 | |
pslld $21, %xmm9 #293.9 | |
movdqa %xmm13, %xmm10 #292.14 | |
movdqa %xmm13, %xmm14 #292.14 | |
movdqa %xmm13, %xmm0 #292.14 | |
movdqa %xmm13, %xmm8 #292.14 | |
psrld $7, %xmm10 #292.14 | |
pslld $25, %xmm14 #292.14 | |
psrld $18, %xmm0 #292.14 | |
pslld $14, %xmm8 #292.14 | |
por %xmm14, %xmm10 #292.14 | |
por %xmm8, %xmm0 #292.14 | |
pxor %xmm0, %xmm10 #292.14 | |
psrld $3, %xmm13 #292.14 | |
pxor %xmm13, %xmm10 #292.14 | |
movdqa %xmm5, %xmm0 #293.9 | |
movdqa %xmm5, %xmm8 #293.9 | |
paddd %xmm10, %xmm15 #292.14 | |
pslld $26, %xmm0 #293.9 | |
psrld $11, %xmm8 #293.9 | |
movdqa %xmm5, %xmm10 #293.9 | |
movdqa %xmm5, %xmm13 #293.9 | |
por %xmm0, %xmm11 #293.9 | |
por %xmm9, %xmm8 #293.9 | |
psrld $25, %xmm10 #293.9 | |
pslld $7, %xmm13 #293.9 | |
movdqa %xmm5, %xmm0 #293.9 | |
pxor %xmm8, %xmm11 #293.9 | |
movdqa %xmm5, 2800(%rsp) #291.9 | |
por %xmm13, %xmm10 #293.9 | |
pand %xmm4, %xmm0 #293.9 | |
pandn %xmm12, %xmm5 #293.9 | |
movdqa %xmm4, 2752(%rsp) #289.9 | |
pxor %xmm10, %xmm11 #293.9 | |
movdqa .L_2il0floatpacket.6798(%rip), %xmm4 #293.9 | |
pxor %xmm5, %xmm0 #293.9 | |
paddd %xmm11, %xmm2 #293.9 | |
paddd %xmm0, %xmm4 #293.9 | |
paddd %xmm15, %xmm3 #292.14 | |
paddd %xmm4, %xmm2 #293.9 | |
movdqa %xmm7, %xmm0 #293.9 | |
paddd %xmm3, %xmm2 #293.9 | |
movdqa %xmm6, 2768(%rsp) #289.9 | |
pand %xmm6, %xmm0 #293.9 | |
movdqa %xmm3, 2848(%rsp) #292.14 | |
movdqa %xmm7, %xmm3 #293.9 | |
movdqa %xmm7, %xmm5 #293.9 | |
movdqa %xmm7, %xmm6 #293.9 | |
movdqa %xmm7, %xmm8 #293.9 | |
psrld $2, %xmm3 #293.9 | |
pslld $30, %xmm5 #293.9 | |
psrld $13, %xmm6 #293.9 | |
pslld $19, %xmm8 #293.9 | |
movdqa %xmm7, %xmm9 #293.9 | |
movdqa %xmm7, 2832(%rsp) #291.9 | |
por %xmm5, %xmm3 #293.9 | |
por %xmm8, %xmm6 #293.9 | |
psrld $22, %xmm9 #293.9 | |
pslld $10, %xmm7 #293.9 | |
paddd %xmm2, %xmm1 #293.9 | |
pxor %xmm6, %xmm3 #293.9 | |
por %xmm7, %xmm9 #293.9 | |
movdqa %xmm12, 2704(%rsp) #287.9 | |
movdqa %xmm2, 2864(%rsp) #293.9 | |
movdqa %xmm1, 2880(%rsp) #293.9 | |
movdqa %xmm0, 2896(%rsp) #293.9 | |
movdqa %xmm3, 2912(%rsp) #293.9 | |
movdqa %xmm9, 2928(%rsp) #293.9 | |
# LOE | |
..B2.16: # Preds ..B2.17 | |
movdqa 2832(%rsp), %xmm2 #293.9 | |
movdqa 2720(%rsp), %xmm6 #293.9 | |
movdqa %xmm2, %xmm12 #293.9 | |
movdqa 2896(%rsp), %xmm3 #293.9 | |
pand %xmm6, %xmm12 #293.9 | |
movdqa %xmm3, %xmm7 #293.9 | |
movdqa 2912(%rsp), %xmm1 #293.9 | |
pxor %xmm12, %xmm7 #293.9 | |
movdqa 2816(%rsp), %xmm4 #293.9 | |
movdqa 2784(%rsp), %xmm11 #294.14 | |
pxor %xmm7, %xmm4 #293.9 | |
pxor 2928(%rsp), %xmm1 #293.9 | |
movdqa %xmm11, %xmm5 #294.14 | |
movdqa %xmm11, %xmm15 #294.14 | |
movdqa %xmm11, %xmm13 #294.14 | |
movdqa %xmm11, %xmm0 #294.14 | |
paddd %xmm4, %xmm1 #293.9 | |
movdqa 2864(%rsp), %xmm12 #293.9 | |
psrld $17, %xmm5 #294.14 | |
movdqa 2224(%rsp), %xmm9 #294.14 | |
pslld $15, %xmm15 #294.14 | |
psrld $19, %xmm13 #294.14 | |
pslld $13, %xmm0 #294.14 | |
paddd %xmm1, %xmm12 #293.9 | |
por %xmm15, %xmm5 #294.14 | |
por %xmm0, %xmm13 #294.14 | |
movdqa %xmm9, %xmm14 #294.14 | |
movdqa %xmm9, %xmm10 #294.14 | |
movdqa %xmm9, %xmm7 #294.14 | |
movdqa %xmm9, %xmm1 #294.14 | |
pxor %xmm13, %xmm5 #294.14 | |
psrld $10, %xmm11 #294.14 | |
psrld $7, %xmm14 #294.14 | |
pslld $25, %xmm10 #294.14 | |
psrld $18, %xmm7 #294.14 | |
pslld $14, %xmm1 #294.14 | |
pxor %xmm11, %xmm5 #294.14 | |
movdqa 2880(%rsp), %xmm11 #295.9 | |
por %xmm10, %xmm14 #294.14 | |
por %xmm1, %xmm7 #294.14 | |
movdqa %xmm9, %xmm4 #294.14 | |
pxor %xmm7, %xmm14 #294.14 | |
psrld $3, %xmm4 #294.14 | |
movdqa %xmm11, %xmm15 #295.9 | |
movdqa %xmm11, %xmm13 #295.9 | |
movdqa %xmm11, %xmm0 #295.9 | |
movdqa %xmm11, %xmm7 #295.9 | |
movdqa 2640(%rsp), %xmm8 #294.14 | |
pxor %xmm4, %xmm14 #294.14 | |
psrld $6, %xmm15 #295.9 | |
pslld $26, %xmm13 #295.9 | |
psrld $11, %xmm0 #295.9 | |
pslld $21, %xmm7 #295.9 | |
movdqa %xmm11, %xmm1 #295.9 | |
movdqa %xmm11, %xmm4 #295.9 | |
paddd %xmm5, %xmm8 #294.14 | |
por %xmm13, %xmm15 #295.9 | |
movdqa 2160(%rsp), %xmm5 #294.14 | |
por %xmm7, %xmm0 #295.9 | |
psrld $25, %xmm1 #295.9 | |
pslld $7, %xmm4 #295.9 | |
paddd %xmm14, %xmm5 #294.14 | |
pxor %xmm0, %xmm15 #295.9 | |
por %xmm4, %xmm1 #295.9 | |
paddd %xmm5, %xmm8 #294.14 | |
movdqa 2752(%rsp), %xmm5 #295.9 | |
pxor %xmm1, %xmm15 #295.9 | |
movdqa 2800(%rsp), %xmm1 #295.9 | |
movdqa %xmm11, %xmm13 #295.9 | |
movdqa %xmm11, %xmm14 #295.9 | |
pand %xmm1, %xmm13 #295.9 | |
pandn %xmm5, %xmm14 #295.9 | |
movdqa %xmm12, %xmm0 #295.9 | |
movdqa 2704(%rsp), %xmm4 #295.9 | |
pxor %xmm14, %xmm13 #295.9 | |
movdqa .L_2il0floatpacket.6799(%rip), %xmm10 #295.9 | |
movdqa %xmm12, %xmm7 #295.9 | |
paddd %xmm15, %xmm4 #295.9 | |
paddd %xmm13, %xmm10 #295.9 | |
psrld $2, %xmm0 #295.9 | |
pslld $30, %xmm7 #295.9 | |
movdqa %xmm12, %xmm15 #295.9 | |
movdqa %xmm12, %xmm13 #295.9 | |
paddd %xmm10, %xmm4 #295.9 | |
por %xmm7, %xmm0 #295.9 | |
psrld $13, %xmm15 #295.9 | |
pslld $19, %xmm13 #295.9 | |
movdqa %xmm12, %xmm7 #295.9 | |
movdqa %xmm12, %xmm10 #295.9 | |
por %xmm13, %xmm15 #295.9 | |
psrld $22, %xmm7 #295.9 | |
pslld $10, %xmm10 #295.9 | |
pxor %xmm15, %xmm0 #295.9 | |
por %xmm10, %xmm7 #295.9 | |
movdqa %xmm2, %xmm14 #295.9 | |
pxor %xmm7, %xmm0 #295.9 | |
pand %xmm12, %xmm14 #295.9 | |
movdqa 2768(%rsp), %xmm7 #295.9 | |
movdqa %xmm14, %xmm13 #295.9 | |
movdqa %xmm7, %xmm15 #295.9 | |
paddd %xmm8, %xmm4 #295.9 | |
pand %xmm12, %xmm15 #295.9 | |
paddd %xmm4, %xmm6 #295.9 | |
pxor %xmm15, %xmm13 #295.9 | |
pxor %xmm13, %xmm3 #295.9 | |
movdqa 2848(%rsp), %xmm15 #296.14 | |
paddd %xmm3, %xmm0 #295.9 | |
paddd %xmm0, %xmm4 #295.9 | |
movdqa %xmm15, %xmm3 #296.14 | |
movdqa %xmm15, %xmm10 #296.14 | |
movdqa %xmm15, %xmm13 #296.14 | |
movdqa %xmm15, %xmm0 #296.14 | |
psrld $17, %xmm3 #296.14 | |
pslld $15, %xmm10 #296.14 | |
psrld $19, %xmm13 #296.14 | |
pslld $13, %xmm0 #296.14 | |
por %xmm10, %xmm3 #296.14 | |
por %xmm0, %xmm13 #296.14 | |
psrld $10, %xmm15 #296.14 | |
pxor %xmm13, %xmm3 #296.14 | |
pxor %xmm15, %xmm3 #296.14 | |
movdqa 2320(%rsp), %xmm15 #296.14 | |
movdqa %xmm15, %xmm13 #296.14 | |
movdqa %xmm15, %xmm0 #296.14 | |
movdqa 2656(%rsp), %xmm10 #296.14 | |
psrld $7, %xmm13 #296.14 | |
pslld $25, %xmm0 #296.14 | |
paddd %xmm3, %xmm10 #296.14 | |
por %xmm0, %xmm13 #296.14 | |
movdqa %xmm15, %xmm0 #296.14 | |
movdqa %xmm15, %xmm3 #296.14 | |
psrld $18, %xmm0 #296.14 | |
pslld $14, %xmm3 #296.14 | |
por %xmm3, %xmm0 #296.14 | |
movdqa %xmm15, %xmm3 #296.14 | |
pxor %xmm0, %xmm13 #296.14 | |
psrld $3, %xmm3 #296.14 | |
pxor %xmm3, %xmm13 #296.14 | |
movdqa %xmm6, %xmm0 #297.9 | |
paddd %xmm13, %xmm9 #296.14 | |
movdqa %xmm6, %xmm3 #297.9 | |
paddd %xmm9, %xmm10 #296.14 | |
movdqa %xmm6, %xmm9 #297.9 | |
movdqa %xmm6, %xmm13 #297.9 | |
psrld $6, %xmm9 #297.9 | |
pslld $26, %xmm0 #297.9 | |
psrld $11, %xmm3 #297.9 | |
pslld $21, %xmm13 #297.9 | |
por %xmm0, %xmm9 #297.9 | |
por %xmm13, %xmm3 #297.9 | |
movdqa %xmm6, %xmm0 #297.9 | |
pxor %xmm3, %xmm9 #297.9 | |
movdqa %xmm6, %xmm3 #297.9 | |
psrld $25, %xmm0 #297.9 | |
pslld $7, %xmm3 #297.9 | |
por %xmm3, %xmm0 #297.9 | |
movdqa %xmm11, %xmm3 #297.9 | |
movdqa %xmm6, %xmm13 #297.9 | |
pand %xmm6, %xmm3 #297.9 | |
pandn %xmm1, %xmm13 #297.9 | |
pxor %xmm0, %xmm9 #297.9 | |
movdqa .L_2il0floatpacket.6800(%rip), %xmm0 #297.9 | |
pxor %xmm13, %xmm3 #297.9 | |
paddd %xmm3, %xmm0 #297.9 | |
movdqa %xmm4, %xmm13 #297.9 | |
movdqa %xmm4, %xmm3 #297.9 | |
paddd %xmm9, %xmm5 #297.9 | |
psrld $2, %xmm13 #297.9 | |
pslld $30, %xmm3 #297.9 | |
paddd %xmm0, %xmm5 #297.9 | |
por %xmm3, %xmm13 #297.9 | |
movdqa %xmm4, %xmm3 #297.9 | |
movdqa %xmm4, %xmm0 #297.9 | |
psrld $13, %xmm3 #297.9 | |
pslld $19, %xmm0 #297.9 | |
por %xmm0, %xmm3 #297.9 | |
movdqa %xmm4, %xmm0 #297.9 | |
pxor %xmm3, %xmm13 #297.9 | |
movdqa %xmm4, %xmm3 #297.9 | |
movdqa %xmm4, %xmm9 #297.9 | |
psrld $22, %xmm0 #297.9 | |
pslld $10, %xmm3 #297.9 | |
pand %xmm12, %xmm9 #297.9 | |
por %xmm3, %xmm0 #297.9 | |
movdqa %xmm2, %xmm3 #297.9 | |
pxor %xmm0, %xmm13 #297.9 | |
pand %xmm4, %xmm3 #297.9 | |
movdqa %xmm9, %xmm0 #297.9 | |
paddd %xmm10, %xmm5 #297.9 | |
pxor %xmm3, %xmm0 #297.9 | |
paddd %xmm5, %xmm7 #297.9 | |
pxor %xmm14, %xmm0 #297.9 | |
movdqa %xmm8, %xmm14 #298.14 | |
paddd %xmm0, %xmm13 #297.9 | |
movdqa %xmm8, %xmm3 #298.14 | |
paddd %xmm13, %xmm5 #297.9 | |
movdqa %xmm8, %xmm13 #298.14 | |
movdqa %xmm8, %xmm0 #298.14 | |
psrld $17, %xmm13 #298.14 | |
pslld $15, %xmm14 #298.14 | |
psrld $19, %xmm3 #298.14 | |
pslld $13, %xmm0 #298.14 | |
por %xmm14, %xmm13 #298.14 | |
por %xmm0, %xmm3 #298.14 | |
movdqa %xmm8, 2944(%rsp) #294.14 | |
pxor %xmm3, %xmm13 #298.14 | |
psrld $10, %xmm8 #298.14 | |
pxor %xmm8, %xmm13 #298.14 | |
movdqa 2336(%rsp), %xmm8 #298.14 | |
movdqa %xmm8, %xmm14 #298.14 | |
movdqa %xmm8, %xmm0 #298.14 | |
movdqa 2672(%rsp), %xmm3 #298.14 | |
psrld $7, %xmm14 #298.14 | |
pslld $25, %xmm0 #298.14 | |
paddd %xmm13, %xmm3 #298.14 | |
por %xmm0, %xmm14 #298.14 | |
movdqa %xmm8, %xmm0 #298.14 | |
movdqa %xmm8, %xmm13 #298.14 | |
psrld $18, %xmm0 #298.14 | |
pslld $14, %xmm13 #298.14 | |
por %xmm13, %xmm0 #298.14 | |
movdqa %xmm8, %xmm13 #298.14 | |
pxor %xmm0, %xmm14 #298.14 | |
psrld $3, %xmm13 #298.14 | |
pxor %xmm13, %xmm14 #298.14 | |
movdqa %xmm7, %xmm0 #299.9 | |
paddd %xmm14, %xmm15 #298.14 | |
movdqa %xmm7, %xmm13 #299.9 | |
paddd %xmm15, %xmm3 #298.14 | |
movdqa %xmm7, %xmm14 #299.9 | |
movdqa %xmm7, %xmm15 #299.9 | |
psrld $6, %xmm0 #299.9 | |
pslld $26, %xmm13 #299.9 | |
psrld $11, %xmm14 #299.9 | |
pslld $21, %xmm15 #299.9 | |
por %xmm13, %xmm0 #299.9 | |
por %xmm15, %xmm14 #299.9 | |
movdqa %xmm7, %xmm13 #299.9 | |
movdqa %xmm7, %xmm15 #299.9 | |
psrld $25, %xmm13 #299.9 | |
pslld $7, %xmm15 #299.9 | |
pxor %xmm14, %xmm0 #299.9 | |
por %xmm15, %xmm13 #299.9 | |
movdqa %xmm7, %xmm15 #299.9 | |
pxor %xmm13, %xmm0 #299.9 | |
movdqa %xmm7, %xmm13 #299.9 | |
pand %xmm6, %xmm15 #299.9 | |
pandn %xmm11, %xmm13 #299.9 | |
movdqa .L_2il0floatpacket.6801(%rip), %xmm14 #299.9 | |
pxor %xmm13, %xmm15 #299.9 | |
paddd %xmm0, %xmm1 #299.9 | |
paddd %xmm15, %xmm14 #299.9 | |
paddd %xmm14, %xmm1 #299.9 | |
movdqa %xmm5, %xmm14 #299.9 | |
movdqa %xmm5, %xmm13 #299.9 | |
psrld $2, %xmm14 #299.9 | |
pslld $30, %xmm13 #299.9 | |
movdqa %xmm5, %xmm15 #299.9 | |
por %xmm13, %xmm14 #299.9 | |
movdqa %xmm5, %xmm13 #299.9 | |
psrld $13, %xmm15 #299.9 | |
pslld $19, %xmm13 #299.9 | |
por %xmm13, %xmm15 #299.9 | |
movdqa %xmm5, %xmm13 #299.9 | |
pxor %xmm15, %xmm14 #299.9 | |
movdqa %xmm5, %xmm15 #299.9 | |
movdqa %xmm5, %xmm0 #299.9 | |
psrld $22, %xmm13 #299.9 | |
pslld $10, %xmm15 #299.9 | |
pand %xmm4, %xmm0 #299.9 | |
por %xmm15, %xmm13 #299.9 | |
movdqa %xmm5, %xmm15 #299.9 | |
pxor %xmm13, %xmm14 #299.9 | |
pand %xmm12, %xmm15 #299.9 | |
movdqa %xmm0, %xmm13 #299.9 | |
paddd %xmm3, %xmm1 #299.9 | |
pxor %xmm15, %xmm13 #299.9 | |
paddd %xmm1, %xmm2 #299.9 | |
pxor %xmm9, %xmm13 #299.9 | |
movdqa %xmm10, %xmm9 #300.14 | |
paddd %xmm13, %xmm14 #299.9 | |
movdqa %xmm10, %xmm13 #300.14 | |
paddd %xmm14, %xmm1 #299.9 | |
movdqa %xmm10, %xmm15 #300.14 | |
movdqa %xmm10, %xmm14 #300.14 | |
psrld $17, %xmm13 #300.14 | |
pslld $15, %xmm9 #300.14 | |
psrld $19, %xmm15 #300.14 | |
pslld $13, %xmm14 #300.14 | |
por %xmm9, %xmm13 #300.14 | |
por %xmm14, %xmm15 #300.14 | |
movdqa %xmm10, 2960(%rsp) #296.14 | |
pxor %xmm15, %xmm13 #300.14 | |
psrld $10, %xmm10 #300.14 | |
movdqa 2688(%rsp), %xmm15 #300.14 | |
pxor %xmm10, %xmm13 #300.14 | |
paddd %xmm13, %xmm15 #300.14 | |
movdqa 2352(%rsp), %xmm13 #300.14 | |
movdqa %xmm13, %xmm14 #300.14 | |
movdqa %xmm13, %xmm9 #300.14 | |
psrld $7, %xmm14 #300.14 | |
pslld $25, %xmm9 #300.14 | |
por %xmm9, %xmm14 #300.14 | |
movdqa %xmm13, %xmm9 #300.14 | |
movdqa %xmm13, %xmm10 #300.14 | |
psrld $18, %xmm9 #300.14 | |
pslld $14, %xmm10 #300.14 | |
por %xmm10, %xmm9 #300.14 | |
movdqa %xmm13, %xmm10 #300.14 | |
pxor %xmm9, %xmm14 #300.14 | |
psrld $3, %xmm10 #300.14 | |
pxor %xmm10, %xmm14 #300.14 | |
movdqa %xmm2, %xmm10 #301.9 | |
paddd %xmm14, %xmm8 #300.14 | |
movdqa %xmm2, %xmm14 #301.9 | |
paddd %xmm8, %xmm15 #300.14 | |
movdqa %xmm2, %xmm9 #301.9 | |
movdqa %xmm2, %xmm8 #301.9 | |
psrld $6, %xmm14 #301.9 | |
pslld $26, %xmm10 #301.9 | |
psrld $11, %xmm9 #301.9 | |
pslld $21, %xmm8 #301.9 | |
por %xmm10, %xmm14 #301.9 | |
por %xmm8, %xmm9 #301.9 | |
movdqa %xmm2, %xmm10 #301.9 | |
movdqa %xmm2, %xmm8 #301.9 | |
psrld $25, %xmm10 #301.9 | |
pslld $7, %xmm8 #301.9 | |
pxor %xmm9, %xmm14 #301.9 | |
por %xmm8, %xmm10 #301.9 | |
movdqa %xmm2, %xmm8 #301.9 | |
pxor %xmm10, %xmm14 #301.9 | |
movdqa %xmm2, %xmm10 #301.9 | |
pand %xmm7, %xmm8 #301.9 | |
pandn %xmm6, %xmm10 #301.9 | |
movdqa .L_2il0floatpacket.6802(%rip), %xmm9 #301.9 | |
pxor %xmm10, %xmm8 #301.9 | |
paddd %xmm8, %xmm9 #301.9 | |
movdqa %xmm1, %xmm8 #301.9 | |
movdqa %xmm1, %xmm10 #301.9 | |
paddd %xmm14, %xmm11 #301.9 | |
psrld $2, %xmm8 #301.9 | |
pslld $30, %xmm10 #301.9 | |
paddd %xmm9, %xmm11 #301.9 | |
por %xmm10, %xmm8 #301.9 | |
movdqa %xmm1, %xmm10 #301.9 | |
movdqa %xmm1, %xmm9 #301.9 | |
psrld $13, %xmm10 #301.9 | |
pslld $19, %xmm9 #301.9 | |
por %xmm9, %xmm10 #301.9 | |
movdqa %xmm1, %xmm9 #301.9 | |
pxor %xmm10, %xmm8 #301.9 | |
movdqa %xmm1, %xmm10 #301.9 | |
movdqa %xmm1, %xmm14 #301.9 | |
psrld $22, %xmm9 #301.9 | |
pslld $10, %xmm10 #301.9 | |
pand %xmm5, %xmm14 #301.9 | |
por %xmm10, %xmm9 #301.9 | |
movdqa %xmm1, %xmm10 #301.9 | |
pxor %xmm9, %xmm8 #301.9 | |
pand %xmm4, %xmm10 #301.9 | |
movdqa %xmm14, %xmm9 #301.9 | |
paddd %xmm15, %xmm11 #301.9 | |
pxor %xmm10, %xmm9 #301.9 | |
paddd %xmm11, %xmm12 #301.9 | |
pxor %xmm0, %xmm9 #301.9 | |
movdqa %xmm3, %xmm0 #302.15 | |
paddd %xmm9, %xmm8 #301.9 | |
movdqa %xmm3, %xmm10 #302.15 | |
paddd %xmm8, %xmm11 #301.9 | |
movdqa %xmm3, %xmm9 #302.15 | |
movdqa %xmm3, %xmm8 #302.15 | |
psrld $17, %xmm0 #302.15 | |
pslld $15, %xmm10 #302.15 | |
psrld $19, %xmm9 #302.15 | |
pslld $13, %xmm8 #302.15 | |
por %xmm10, %xmm0 #302.15 | |
por %xmm8, %xmm9 #302.15 | |
movdqa %xmm3, 2976(%rsp) #298.14 | |
pxor %xmm9, %xmm0 #302.15 | |
psrld $10, %xmm3 #302.15 | |
pxor %xmm3, %xmm0 #302.15 | |
movdqa 2736(%rsp), %xmm3 #302.15 | |
paddd %xmm0, %xmm3 #302.15 | |
movdqa 2368(%rsp), %xmm0 #302.15 | |
movdqa %xmm0, %xmm10 #302.15 | |
movdqa %xmm0, %xmm8 #302.15 | |
psrld $7, %xmm10 #302.15 | |
pslld $25, %xmm8 #302.15 | |
por %xmm8, %xmm10 #302.15 | |
movdqa %xmm0, %xmm8 #302.15 | |
movdqa %xmm0, %xmm9 #302.15 | |
psrld $18, %xmm8 #302.15 | |
pslld $14, %xmm9 #302.15 | |
por %xmm9, %xmm8 #302.15 | |
movdqa %xmm0, %xmm9 #302.15 | |
pxor %xmm8, %xmm10 #302.15 | |
psrld $3, %xmm9 #302.15 | |
pxor %xmm9, %xmm10 #302.15 | |
movdqa %xmm12, %xmm8 #303.9 | |
paddd %xmm10, %xmm13 #302.15 | |
movdqa %xmm12, %xmm10 #303.9 | |
paddd %xmm13, %xmm3 #302.15 | |
movdqa %xmm12, %xmm13 #303.9 | |
movdqa %xmm12, %xmm9 #303.9 | |
psrld $6, %xmm13 #303.9 | |
pslld $26, %xmm8 #303.9 | |
psrld $11, %xmm10 #303.9 | |
pslld $21, %xmm9 #303.9 | |
por %xmm8, %xmm13 #303.9 | |
por %xmm9, %xmm10 #303.9 | |
movdqa %xmm12, %xmm8 #303.9 | |
pxor %xmm10, %xmm13 #303.9 | |
movdqa %xmm12, %xmm10 #303.9 | |
psrld $25, %xmm8 #303.9 | |
pslld $7, %xmm10 #303.9 | |
por %xmm10, %xmm8 #303.9 | |
movdqa %xmm12, %xmm10 #303.9 | |
pxor %xmm8, %xmm13 #303.9 | |
movdqa %xmm12, %xmm8 #303.9 | |
pand %xmm2, %xmm8 #303.9 | |
pandn %xmm7, %xmm10 #303.9 | |
movdqa .L_2il0floatpacket.6803(%rip), %xmm9 #303.9 | |
pxor %xmm10, %xmm8 #303.9 | |
paddd %xmm8, %xmm9 #303.9 | |
movdqa %xmm11, %xmm10 #303.9 | |
movdqa %xmm11, %xmm8 #303.9 | |
paddd %xmm13, %xmm6 #303.9 | |
psrld $2, %xmm10 #303.9 | |
pslld $30, %xmm8 #303.9 | |
paddd %xmm9, %xmm6 #303.9 | |
por %xmm8, %xmm10 #303.9 | |
movdqa %xmm11, %xmm8 #303.9 | |
movdqa %xmm11, %xmm9 #303.9 | |
psrld $13, %xmm8 #303.9 | |
pslld $19, %xmm9 #303.9 | |
por %xmm9, %xmm8 #303.9 | |
movdqa %xmm11, %xmm9 #303.9 | |
pxor %xmm8, %xmm10 #303.9 | |
movdqa %xmm11, %xmm8 #303.9 | |
movdqa %xmm11, %xmm13 #303.9 | |
psrld $22, %xmm8 #303.9 | |
pslld $10, %xmm9 #303.9 | |
pand %xmm1, %xmm13 #303.9 | |
por %xmm9, %xmm8 #303.9 | |
movdqa %xmm11, %xmm9 #303.9 | |
pxor %xmm8, %xmm10 #303.9 | |
pand %xmm5, %xmm9 #303.9 | |
movdqa %xmm13, %xmm8 #303.9 | |
paddd %xmm3, %xmm6 #303.9 | |
pxor %xmm9, %xmm8 #303.9 | |
paddd %xmm6, %xmm4 #303.9 | |
pxor %xmm14, %xmm8 #303.9 | |
movdqa %xmm15, %xmm14 #304.15 | |
paddd %xmm8, %xmm10 #303.9 | |
movdqa %xmm15, %xmm8 #304.15 | |
paddd %xmm10, %xmm6 #303.9 | |
movdqa %xmm15, %xmm10 #304.15 | |
movdqa %xmm15, %xmm9 #304.15 | |
psrld $17, %xmm10 #304.15 | |
pslld $15, %xmm14 #304.15 | |
psrld $19, %xmm8 #304.15 | |
pslld $13, %xmm9 #304.15 | |
por %xmm14, %xmm10 #304.15 | |
por %xmm9, %xmm8 #304.15 | |
psrld $10, %xmm15 #304.15 | |
pxor %xmm8, %xmm10 #304.15 | |
pxor %xmm15, %xmm10 #304.15 | |
movdqa 2416(%rsp), %xmm15 #304.15 | |
movdqa %xmm15, %xmm14 #304.15 | |
movdqa %xmm15, %xmm9 #304.15 | |
movdqa 2784(%rsp), %xmm8 #304.15 | |
psrld $7, %xmm14 #304.15 | |
pslld $25, %xmm9 #304.15 | |
paddd %xmm10, %xmm8 #304.15 | |
por %xmm9, %xmm14 #304.15 | |
movdqa %xmm15, %xmm10 #304.15 | |
movdqa %xmm15, %xmm9 #304.15 | |
psrld $18, %xmm10 #304.15 | |
pslld $14, %xmm9 #304.15 | |
por %xmm9, %xmm10 #304.15 | |
movdqa %xmm15, %xmm9 #304.15 | |
pxor %xmm10, %xmm14 #304.15 | |
psrld $3, %xmm9 #304.15 | |
pxor %xmm9, %xmm14 #304.15 | |
movdqa %xmm4, %xmm9 #305.9 | |
paddd %xmm14, %xmm0 #304.15 | |
movdqa %xmm4, %xmm14 #305.9 | |
paddd %xmm0, %xmm8 #304.15 | |
movdqa %xmm4, %xmm10 #305.9 | |
movdqa %xmm4, %xmm0 #305.9 | |
psrld $6, %xmm14 #305.9 | |
pslld $26, %xmm9 #305.9 | |
psrld $11, %xmm10 #305.9 | |
pslld $21, %xmm0 #305.9 | |
por %xmm9, %xmm14 #305.9 | |
por %xmm0, %xmm10 #305.9 | |
movdqa %xmm4, %xmm9 #305.9 | |
movdqa %xmm4, %xmm0 #305.9 | |
psrld $25, %xmm9 #305.9 | |
pslld $7, %xmm0 #305.9 | |
pxor %xmm10, %xmm14 #305.9 | |
por %xmm0, %xmm9 #305.9 | |
movdqa %xmm4, %xmm0 #305.9 | |
pxor %xmm9, %xmm14 #305.9 | |
movdqa %xmm4, %xmm9 #305.9 | |
pand %xmm12, %xmm0 #305.9 | |
pandn %xmm2, %xmm9 #305.9 | |
movdqa .L_2il0floatpacket.6804(%rip), %xmm10 #305.9 | |
pxor %xmm9, %xmm0 #305.9 | |
paddd %xmm14, %xmm7 #305.9 | |
paddd %xmm0, %xmm10 #305.9 | |
paddd %xmm10, %xmm7 #305.9 | |
movdqa %xmm6, %xmm0 #305.9 | |
movdqa %xmm11, 3008(%rsp) #301.9 | |
paddd %xmm8, %xmm7 #305.9 | |
movdqa %xmm8, 3056(%rsp) #304.15 | |
pand %xmm11, %xmm0 #305.9 | |
movdqa %xmm6, %xmm11 #305.9 | |
movdqa %xmm6, %xmm8 #305.9 | |
movdqa %xmm6, %xmm9 #305.9 | |
movdqa %xmm6, %xmm10 #305.9 | |
psrld $2, %xmm11 #305.9 | |
pslld $30, %xmm8 #305.9 | |
psrld $13, %xmm9 #305.9 | |
pslld $19, %xmm10 #305.9 | |
por %xmm8, %xmm11 #305.9 | |
por %xmm10, %xmm9 #305.9 | |
pxor %xmm9, %xmm11 #305.9 | |
movdqa %xmm6, %xmm14 #305.9 | |
movdqa %xmm6, %xmm9 #305.9 | |
movdqa %xmm6, %xmm8 #305.9 | |
psrld $22, %xmm14 #305.9 | |
pslld $10, %xmm9 #305.9 | |
pand %xmm1, %xmm8 #305.9 | |
por %xmm9, %xmm14 #305.9 | |
movdqa %xmm0, 3088(%rsp) #305.9 | |
pxor %xmm8, %xmm0 #305.9 | |
pxor %xmm14, %xmm11 #305.9 | |
pxor %xmm13, %xmm0 #305.9 | |
paddd %xmm0, %xmm11 #305.9 | |
movdqa %xmm3, %xmm13 #306.15 | |
movdqa %xmm3, %xmm0 #306.15 | |
movdqa %xmm3, %xmm8 #306.15 | |
movdqa %xmm3, %xmm9 #306.15 | |
psrld $17, %xmm13 #306.15 | |
pslld $15, %xmm0 #306.15 | |
psrld $19, %xmm8 #306.15 | |
pslld $13, %xmm9 #306.15 | |
por %xmm0, %xmm13 #306.15 | |
por %xmm9, %xmm8 #306.15 | |
psrld $10, %xmm3 #306.15 | |
pxor %xmm8, %xmm13 #306.15 | |
paddd %xmm7, %xmm5 #305.9 | |
pxor %xmm3, %xmm13 #306.15 | |
paddd %xmm11, %xmm7 #305.9 | |
movdqa 2848(%rsp), %xmm3 #306.15 | |
movdqa %xmm5, %xmm11 #307.9 | |
paddd %xmm13, %xmm3 #306.15 | |
movdqa %xmm5, %xmm9 #307.9 | |
movdqa 2464(%rsp), %xmm13 #306.15 | |
psrld $6, %xmm11 #307.9 | |
movdqa %xmm13, %xmm10 #306.15 | |
movdqa %xmm13, %xmm14 #306.15 | |
movdqa %xmm13, %xmm0 #306.15 | |
movdqa %xmm13, %xmm8 #306.15 | |
psrld $7, %xmm10 #306.15 | |
pslld $25, %xmm14 #306.15 | |
psrld $18, %xmm0 #306.15 | |
pslld $14, %xmm8 #306.15 | |
por %xmm14, %xmm10 #306.15 | |
por %xmm8, %xmm0 #306.15 | |
pxor %xmm0, %xmm10 #306.15 | |
psrld $3, %xmm13 #306.15 | |
pxor %xmm13, %xmm10 #306.15 | |
movdqa %xmm5, %xmm0 #307.9 | |
movdqa %xmm5, %xmm8 #307.9 | |
paddd %xmm10, %xmm15 #306.15 | |
pslld $26, %xmm0 #307.9 | |
psrld $11, %xmm8 #307.9 | |
pslld $21, %xmm9 #307.9 | |
movdqa %xmm5, %xmm10 #307.9 | |
movdqa %xmm5, %xmm13 #307.9 | |
por %xmm0, %xmm11 #307.9 | |
por %xmm9, %xmm8 #307.9 | |
psrld $25, %xmm10 #307.9 | |
pslld $7, %xmm13 #307.9 | |
movdqa %xmm5, %xmm0 #307.9 | |
movdqa %xmm5, 3072(%rsp) #305.9 | |
pxor %xmm8, %xmm11 #307.9 | |
por %xmm13, %xmm10 #307.9 | |
pand %xmm4, %xmm0 #307.9 | |
pandn %xmm12, %xmm5 #307.9 | |
pxor %xmm10, %xmm11 #307.9 | |
movdqa %xmm4, 3024(%rsp) #303.9 | |
pxor %xmm5, %xmm0 #307.9 | |
movdqa .L_2il0floatpacket.6805(%rip), %xmm4 #307.9 | |
paddd %xmm11, %xmm2 #307.9 | |
paddd %xmm0, %xmm4 #307.9 | |
paddd %xmm15, %xmm3 #306.15 | |
paddd %xmm4, %xmm2 #307.9 | |
movdqa %xmm7, %xmm0 #307.9 | |
movdqa %xmm6, 3040(%rsp) #303.9 | |
paddd %xmm3, %xmm2 #307.9 | |
movdqa %xmm3, 3120(%rsp) #306.15 | |
pand %xmm6, %xmm0 #307.9 | |
movdqa %xmm7, %xmm3 #307.9 | |
movdqa %xmm7, %xmm5 #307.9 | |
movdqa %xmm7, %xmm6 #307.9 | |
movdqa %xmm7, %xmm8 #307.9 | |
psrld $2, %xmm3 #307.9 | |
pslld $30, %xmm5 #307.9 | |
psrld $13, %xmm6 #307.9 | |
pslld $19, %xmm8 #307.9 | |
movdqa %xmm7, %xmm9 #307.9 | |
por %xmm5, %xmm3 #307.9 | |
movdqa %xmm7, 3104(%rsp) #305.9 | |
por %xmm8, %xmm6 #307.9 | |
psrld $22, %xmm9 #307.9 | |
pslld $10, %xmm7 #307.9 | |
pxor %xmm6, %xmm3 #307.9 | |
por %xmm7, %xmm9 #307.9 | |
paddd %xmm2, %xmm1 #307.9 | |
pxor %xmm9, %xmm3 #307.9 | |
movdqa %xmm12, 2992(%rsp) #301.9 | |
movdqa %xmm2, 3136(%rsp) #307.9 | |
movdqa %xmm1, 3152(%rsp) #307.9 | |
movdqa %xmm0, 3168(%rsp) #307.9 | |
movdqa %xmm3, 3184(%rsp) #307.9 | |
# LOE | |
..B2.15: # Preds ..B2.16 | |
movdqa 3104(%rsp), %xmm4 #307.9 | |
movdqa 3008(%rsp), %xmm1 #307.9 | |
movdqa %xmm4, %xmm7 #307.9 | |
movdqa 3168(%rsp), %xmm6 #307.9 | |
pand %xmm1, %xmm7 #307.9 | |
movdqa %xmm6, %xmm12 #307.9 | |
movdqa 3088(%rsp), %xmm5 #307.9 | |
pxor %xmm7, %xmm12 #307.9 | |
movdqa 3184(%rsp), %xmm9 #307.9 | |
pxor %xmm12, %xmm5 #307.9 | |
movdqa 3056(%rsp), %xmm7 #308.15 | |
paddd %xmm5, %xmm9 #307.9 | |
movdqa 2528(%rsp), %xmm5 #308.15 | |
movdqa %xmm7, %xmm13 #308.15 | |
movdqa %xmm7, %xmm3 #308.15 | |
movdqa %xmm7, %xmm14 #308.15 | |
movdqa %xmm7, %xmm8 #308.15 | |
psrld $17, %xmm13 #308.15 | |
pslld $15, %xmm3 #308.15 | |
psrld $19, %xmm14 #308.15 | |
pslld $13, %xmm8 #308.15 | |
movdqa %xmm5, %xmm15 #308.15 | |
movdqa %xmm5, %xmm11 #308.15 | |
movdqa %xmm5, %xmm0 #308.15 | |
movdqa %xmm5, %xmm12 #308.15 | |
por %xmm3, %xmm13 #308.15 | |
movdqa 3136(%rsp), %xmm10 #307.9 | |
por %xmm8, %xmm14 #308.15 | |
psrld $7, %xmm15 #308.15 | |
pslld $25, %xmm11 #308.15 | |
psrld $18, %xmm0 #308.15 | |
pslld $14, %xmm12 #308.15 | |
paddd %xmm9, %xmm10 #307.9 | |
pxor %xmm14, %xmm13 #308.15 | |
psrld $10, %xmm7 #308.15 | |
por %xmm11, %xmm15 #308.15 | |
por %xmm12, %xmm0 #308.15 | |
movdqa %xmm5, %xmm9 #308.15 | |
movdqa 3152(%rsp), %xmm12 #309.9 | |
pxor %xmm7, %xmm13 #308.15 | |
movdqa 2944(%rsp), %xmm2 #308.15 | |
pxor %xmm0, %xmm15 #308.15 | |
psrld $3, %xmm9 #308.15 | |
movdqa %xmm12, %xmm7 #309.9 | |
movdqa %xmm12, %xmm3 #309.9 | |
movdqa %xmm12, %xmm14 #309.9 | |
movdqa %xmm12, %xmm8 #309.9 | |
paddd %xmm13, %xmm2 #308.15 | |
movdqa 2464(%rsp), %xmm13 #308.15 | |
pxor %xmm9, %xmm15 #308.15 | |
psrld $6, %xmm7 #309.9 | |
pslld $26, %xmm3 #309.9 | |
psrld $11, %xmm14 #309.9 | |
pslld $21, %xmm8 #309.9 | |
paddd %xmm15, %xmm13 #308.15 | |
por %xmm3, %xmm7 #309.9 | |
por %xmm8, %xmm14 #309.9 | |
movdqa %xmm12, %xmm11 #309.9 | |
movdqa %xmm12, %xmm9 #309.9 | |
paddd %xmm13, %xmm2 #308.15 | |
movdqa 3072(%rsp), %xmm13 #309.9 | |
pxor %xmm14, %xmm7 #309.9 | |
movdqa 3024(%rsp), %xmm3 #309.9 | |
psrld $25, %xmm11 #309.9 | |
pslld $7, %xmm9 #309.9 | |
movdqa %xmm12, %xmm14 #309.9 | |
movdqa %xmm12, %xmm8 #309.9 | |
por %xmm9, %xmm11 #309.9 | |
pand %xmm13, %xmm14 #309.9 | |
pandn %xmm3, %xmm8 #309.9 | |
movdqa 2992(%rsp), %xmm15 #309.9 | |
pxor %xmm11, %xmm7 #309.9 | |
movdqa .L_2il0floatpacket.6806(%rip), %xmm0 #309.9 | |
pxor %xmm8, %xmm14 #309.9 | |
paddd %xmm7, %xmm15 #309.9 | |
paddd %xmm14, %xmm0 #309.9 | |
paddd %xmm0, %xmm15 #309.9 | |
movdqa %xmm10, %xmm0 #309.9 | |
movdqa %xmm10, %xmm9 #309.9 | |
movdqa %xmm10, %xmm7 #309.9 | |
movdqa %xmm10, %xmm14 #309.9 | |
psrld $2, %xmm0 #309.9 | |
pslld $30, %xmm9 #309.9 | |
psrld $13, %xmm7 #309.9 | |
pslld $19, %xmm14 #309.9 | |
por %xmm9, %xmm0 #309.9 | |
por %xmm14, %xmm7 #309.9 | |
movdqa %xmm4, %xmm8 #309.9 | |
pxor %xmm7, %xmm0 #309.9 | |
movdqa %xmm10, %xmm11 #309.9 | |
movdqa 3040(%rsp), %xmm7 #309.9 | |
movdqa %xmm10, %xmm9 #309.9 | |
pand %xmm10, %xmm8 #309.9 | |
psrld $22, %xmm11 #309.9 | |
pslld $10, %xmm9 #309.9 | |
movdqa %xmm7, %xmm14 #309.9 | |
paddd %xmm2, %xmm15 #309.9 | |
por %xmm9, %xmm11 #309.9 | |
pand %xmm10, %xmm14 #309.9 | |
movdqa %xmm8, %xmm9 #309.9 | |
paddd %xmm15, %xmm1 #309.9 | |
pxor %xmm14, %xmm9 #309.9 | |
pxor %xmm11, %xmm0 #309.9 | |
pxor %xmm9, %xmm6 #309.9 | |
movdqa %xmm1, %xmm9 #311.9 | |
movdqa %xmm1, %xmm14 #311.9 | |
paddd %xmm6, %xmm0 #309.9 | |
psrld $6, %xmm9 #311.9 | |
pslld $26, %xmm14 #311.9 | |
paddd %xmm0, %xmm15 #309.9 | |
por %xmm14, %xmm9 #311.9 | |
movdqa %xmm1, %xmm11 #311.9 | |
movdqa %xmm1, %xmm6 #311.9 | |
movdqa %xmm1, %xmm14 #311.9 | |
movdqa %xmm1, %xmm0 #311.9 | |
psrld $11, %xmm11 #311.9 | |
pslld $21, %xmm6 #311.9 | |
psrld $25, %xmm14 #311.9 | |
pslld $7, %xmm0 #311.9 | |
por %xmm6, %xmm11 #311.9 | |
por %xmm0, %xmm14 #311.9 | |
movdqa %xmm12, %xmm0 #311.9 | |
movdqa %xmm1, %xmm6 #311.9 | |
pxor %xmm11, %xmm9 #311.9 | |
pand %xmm1, %xmm0 #311.9 | |
pandn %xmm13, %xmm6 #311.9 | |
pxor %xmm14, %xmm9 #311.9 | |
pxor %xmm6, %xmm0 #311.9 | |
movdqa 3120(%rsp), %xmm6 #310.15 | |
paddd %xmm9, %xmm3 #311.9 | |
movdqa %xmm6, %xmm9 #310.15 | |
movdqa %xmm6, %xmm14 #310.15 | |
movdqa .L_2il0floatpacket.6807(%rip), %xmm11 #311.9 | |
psrld $17, %xmm9 #310.15 | |
pslld $15, %xmm14 #310.15 | |
paddd %xmm0, %xmm11 #311.9 | |
por %xmm14, %xmm9 #310.15 | |
movdqa %xmm6, %xmm0 #310.15 | |
movdqa %xmm6, %xmm14 #310.15 | |
psrld $19, %xmm0 #310.15 | |
pslld $13, %xmm14 #310.15 | |
psrld $10, %xmm6 #310.15 | |
por %xmm14, %xmm0 #310.15 | |
paddd %xmm11, %xmm3 #311.9 | |
pxor %xmm0, %xmm9 #310.15 | |
movdqa 2960(%rsp), %xmm0 #310.15 | |
pxor %xmm6, %xmm9 #310.15 | |
paddd %xmm9, %xmm0 #310.15 | |
movdqa 2640(%rsp), %xmm9 #310.15 | |
movdqa %xmm9, %xmm14 #310.15 | |
movdqa %xmm9, %xmm11 #310.15 | |
psrld $7, %xmm14 #310.15 | |
pslld $25, %xmm11 #310.15 | |
por %xmm11, %xmm14 #310.15 | |
movdqa %xmm9, %xmm6 #310.15 | |
movdqa %xmm9, %xmm11 #310.15 | |
psrld $18, %xmm6 #310.15 | |
pslld $14, %xmm11 #310.15 | |
por %xmm11, %xmm6 #310.15 | |
movdqa %xmm15, %xmm11 #311.9 | |
pxor %xmm6, %xmm14 #310.15 | |
movdqa %xmm9, %xmm6 #310.15 | |
psrld $3, %xmm6 #310.15 | |
psrld $13, %xmm11 #311.9 | |
pxor %xmm6, %xmm14 #310.15 | |
movdqa %xmm15, %xmm6 #311.9 | |
paddd %xmm14, %xmm5 #310.15 | |
movdqa %xmm15, %xmm14 #311.9 | |
paddd %xmm5, %xmm0 #310.15 | |
movdqa %xmm15, %xmm5 #311.9 | |
paddd %xmm0, %xmm3 #311.9 | |
movdqa %xmm15, %xmm0 #311.9 | |
psrld $2, %xmm0 #311.9 | |
pslld $30, %xmm14 #311.9 | |
pslld $19, %xmm5 #311.9 | |
por %xmm14, %xmm0 #311.9 | |
por %xmm5, %xmm11 #311.9 | |
movdqa %xmm15, %xmm14 #311.9 | |
movdqa %xmm15, %xmm5 #311.9 | |
pand %xmm10, %xmm6 #311.9 | |
pxor %xmm11, %xmm0 #311.9 | |
psrld $22, %xmm14 #311.9 | |
pslld $10, %xmm5 #311.9 | |
movdqa %xmm4, %xmm11 #311.9 | |
por %xmm5, %xmm14 #311.9 | |
pand %xmm15, %xmm11 #311.9 | |
movdqa %xmm6, %xmm5 #311.9 | |
paddd %xmm3, %xmm7 #311.9 | |
pxor %xmm11, %xmm5 #311.9 | |
pxor %xmm14, %xmm0 #311.9 | |
pxor %xmm8, %xmm5 #311.9 | |
movdqa %xmm7, %xmm8 #313.9 | |
paddd %xmm5, %xmm0 #311.9 | |
movdqa %xmm7, %xmm5 #313.9 | |
psrld $6, %xmm8 #313.9 | |
pslld $26, %xmm5 #313.9 | |
movdqa %xmm7, %xmm14 #313.9 | |
movdqa %xmm7, %xmm11 #313.9 | |
paddd %xmm0, %xmm3 #311.9 | |
por %xmm5, %xmm8 #313.9 | |
psrld $11, %xmm14 #313.9 | |
pslld $21, %xmm11 #313.9 | |
movdqa %xmm7, %xmm0 #313.9 | |
movdqa %xmm7, %xmm5 #313.9 | |
por %xmm11, %xmm14 #313.9 | |
psrld $25, %xmm0 #313.9 | |
pslld $7, %xmm5 #313.9 | |
pxor %xmm14, %xmm8 #313.9 | |
por %xmm5, %xmm0 #313.9 | |
movdqa %xmm7, %xmm5 #313.9 | |
pxor %xmm0, %xmm8 #313.9 | |
movdqa %xmm7, %xmm0 #313.9 | |
pand %xmm1, %xmm0 #313.9 | |
pandn %xmm12, %xmm5 #313.9 | |
movdqa .L_2il0floatpacket.6808(%rip), %xmm14 #313.9 | |
pxor %xmm5, %xmm0 #313.9 | |
paddd %xmm8, %xmm13 #313.9 | |
paddd %xmm0, %xmm14 #313.9 | |
paddd %xmm14, %xmm13 #313.9 | |
movdqa %xmm2, %xmm11 #312.15 | |
movdqa %xmm2, %xmm5 #312.15 | |
movdqa %xmm2, %xmm14 #312.15 | |
movdqa %xmm2, %xmm8 #312.15 | |
psrld $17, %xmm11 #312.15 | |
pslld $15, %xmm5 #312.15 | |
psrld $19, %xmm14 #312.15 | |
pslld $13, %xmm8 #312.15 | |
por %xmm5, %xmm11 #312.15 | |
por %xmm8, %xmm14 #312.15 | |
psrld $10, %xmm2 #312.15 | |
pxor %xmm14, %xmm11 #312.15 | |
movdqa 2656(%rsp), %xmm14 #312.15 | |
pxor %xmm2, %xmm11 #312.15 | |
movdqa 2976(%rsp), %xmm2 #312.15 | |
movdqa %xmm14, %xmm5 #312.15 | |
paddd %xmm11, %xmm2 #312.15 | |
movdqa %xmm14, %xmm0 #312.15 | |
movdqa %xmm14, %xmm8 #312.15 | |
movdqa %xmm14, %xmm11 #312.15 | |
psrld $7, %xmm5 #312.15 | |
pslld $25, %xmm0 #312.15 | |
psrld $18, %xmm8 #312.15 | |
pslld $14, %xmm11 #312.15 | |
por %xmm0, %xmm5 #312.15 | |
por %xmm11, %xmm8 #312.15 | |
pxor %xmm8, %xmm5 #312.15 | |
psrld $3, %xmm14 #312.15 | |
pxor %xmm14, %xmm5 #312.15 | |
movdqa %xmm3, %xmm14 #313.9 | |
paddd %xmm5, %xmm9 #312.15 | |
movdqa %xmm3, %xmm5 #313.9 | |
movdqa %xmm3, %xmm8 #313.9 | |
movdqa %xmm3, %xmm11 #313.9 | |
psrld $2, %xmm5 #313.9 | |
pslld $30, %xmm14 #313.9 | |
psrld $13, %xmm8 #313.9 | |
pslld $19, %xmm11 #313.9 | |
por %xmm14, %xmm5 #313.9 | |
por %xmm11, %xmm8 #313.9 | |
paddd %xmm9, %xmm2 #312.15 | |
pxor %xmm8, %xmm5 #313.9 | |
movdqa %xmm3, %xmm0 #313.9 | |
movdqa %xmm3, %xmm9 #313.9 | |
movdqa %xmm3, %xmm14 #313.9 | |
movdqa %xmm3, %xmm8 #313.9 | |
psrld $22, %xmm0 #313.9 | |
pslld $10, %xmm9 #313.9 | |
pand %xmm15, %xmm14 #313.9 | |
pand %xmm10, %xmm8 #313.9 | |
por %xmm9, %xmm0 #313.9 | |
pxor %xmm8, %xmm14 #313.9 | |
paddd %xmm2, %xmm13 #313.9 | |
pxor %xmm0, %xmm5 #313.9 | |
pxor %xmm6, %xmm14 #313.9 | |
movdqa %xmm13, %xmm2 #313.9 | |
paddd %xmm14, %xmm5 #313.9 | |
paddd %xmm13, %xmm4 #313.9 | |
movdqa 304(%rsp), %xmm6 #319.9 | |
paddd %xmm5, %xmm2 #313.9 | |
paddd %xmm2, %xmm6 #319.9 | |
movdqa 352(%rsp), %xmm2 #322.9 | |
paddd %xmm10, %xmm2 #322.9 | |
movdqa 400(%rsp), %xmm10 #325.9 | |
paddd %xmm1, %xmm10 #325.9 | |
movdqa 416(%rsp), %xmm1 #326.9 | |
movdqa 320(%rsp), %xmm11 #320.9 | |
paddd %xmm12, %xmm1 #326.9 | |
movdqa 1088(%rsp), %xmm12 #346.9 | |
paddd %xmm3, %xmm11 #320.9 | |
movdqa 368(%rsp), %xmm3 #323.9 | |
paddd %xmm6, %xmm12 #346.9 | |
movdqa 1008(%rsp), %xmm5 #346.9 | |
paddd %xmm4, %xmm3 #323.9 | |
movdqa 384(%rsp), %xmm4 #324.9 | |
paddd %xmm12, %xmm5 #346.9 | |
movdqa %xmm6, 3200(%rsp) #319.9 | |
paddd %xmm7, %xmm4 #324.9 | |
movdqa %xmm5, %xmm6 #347.9 | |
movdqa %xmm5, %xmm9 #347.9 | |
movdqa %xmm5, %xmm13 #347.9 | |
movdqa %xmm5, %xmm7 #347.9 | |
psrld $6, %xmm6 #347.9 | |
pslld $26, %xmm9 #347.9 | |
psrld $11, %xmm13 #347.9 | |
pslld $21, %xmm7 #347.9 | |
movdqa %xmm5, %xmm14 #347.9 | |
movdqa %xmm5, %xmm8 #347.9 | |
por %xmm9, %xmm6 #347.9 | |
por %xmm7, %xmm13 #347.9 | |
psrld $25, %xmm14 #347.9 | |
pslld $7, %xmm8 #347.9 | |
pxor %xmm13, %xmm6 #347.9 | |
por %xmm8, %xmm14 #347.9 | |
movdqa %xmm10, 3296(%rsp) #325.9 | |
pxor %xmm14, %xmm6 #347.9 | |
movdqa 1056(%rsp), %xmm10 #347.9 | |
movdqa %xmm1, 3312(%rsp) #326.9 | |
paddd %xmm6, %xmm10 #347.9 | |
movdqa 336(%rsp), %xmm0 #321.9 | |
movdqa 1120(%rsp), %xmm1 #346.9 | |
paddd %xmm15, %xmm0 #321.9 | |
movdqa 1024(%rsp), %xmm6 #347.9 | |
paddd %xmm12, %xmm1 #346.9 | |
movdqa 1040(%rsp), %xmm9 #347.9 | |
movdqa %xmm6, %xmm15 #347.9 | |
movdqa %xmm5, %xmm12 #347.9 | |
pand %xmm5, %xmm15 #347.9 | |
pandn %xmm9, %xmm12 #347.9 | |
movdqa %xmm1, %xmm8 #347.9 | |
movdqa .L_2il0floatpacket.6746(%rip), %xmm13 #347.9 | |
pxor %xmm12, %xmm15 #347.9 | |
paddd %xmm15, %xmm13 #347.9 | |
movdqa %xmm1, %xmm7 #347.9 | |
paddd %xmm13, %xmm10 #347.9 | |
psrld $2, %xmm8 #347.9 | |
pslld $30, %xmm7 #347.9 | |
paddd %xmm11, %xmm10 #347.9 | |
movdqa %xmm11, 3216(%rsp) #320.9 | |
por %xmm7, %xmm8 #347.9 | |
movdqa %xmm1, %xmm11 #347.9 | |
movdqa %xmm1, %xmm7 #347.9 | |
psrld $13, %xmm11 #347.9 | |
pslld $19, %xmm7 #347.9 | |
por %xmm7, %xmm11 #347.9 | |
movdqa %xmm1, %xmm15 #347.9 | |
movdqa %xmm1, %xmm7 #347.9 | |
psrld $22, %xmm15 #347.9 | |
movdqa 960(%rsp), %xmm13 #347.9 | |
pslld $10, %xmm7 #347.9 | |
movdqa %xmm13, %xmm14 #347.9 | |
pxor %xmm11, %xmm8 #347.9 | |
por %xmm7, %xmm15 #347.9 | |
pand %xmm1, %xmm14 #347.9 | |
movdqa 976(%rsp), %xmm7 #347.9 | |
pxor %xmm15, %xmm8 #347.9 | |
movdqa %xmm7, %xmm15 #347.9 | |
movdqa %xmm14, %xmm11 #347.9 | |
pand %xmm1, %xmm15 #347.9 | |
pxor %xmm15, %xmm11 #347.9 | |
movdqa 1104(%rsp), %xmm15 #347.9 | |
movdqa 992(%rsp), %xmm12 #347.9 | |
pxor %xmm11, %xmm15 #347.9 | |
paddd %xmm10, %xmm12 #347.9 | |
paddd %xmm15, %xmm8 #347.9 | |
paddd %xmm8, %xmm10 #347.9 | |
movdqa %xmm12, %xmm8 #348.9 | |
movdqa %xmm12, %xmm11 #348.9 | |
psrld $6, %xmm8 #348.9 | |
pslld $26, %xmm11 #348.9 | |
movdqa %xmm12, %xmm15 #348.9 | |
por %xmm11, %xmm8 #348.9 | |
movdqa %xmm12, %xmm11 #348.9 | |
psrld $11, %xmm11 #348.9 | |
pslld $21, %xmm15 #348.9 | |
por %xmm15, %xmm11 #348.9 | |
movdqa %xmm12, %xmm15 #348.9 | |
pxor %xmm11, %xmm8 #348.9 | |
movdqa %xmm12, %xmm11 #348.9 | |
psrld $25, %xmm15 #348.9 | |
pslld $7, %xmm11 #348.9 | |
por %xmm11, %xmm15 #348.9 | |
movdqa %xmm12, %xmm11 #348.9 | |
pxor %xmm15, %xmm8 #348.9 | |
pand %xmm5, %xmm11 #348.9 | |
paddd %xmm8, %xmm9 #348.9 | |
movdqa %xmm12, %xmm8 #348.9 | |
pandn %xmm6, %xmm8 #348.9 | |
movdqa .L_2il0floatpacket.6747(%rip), %xmm15 #348.9 | |
pxor %xmm8, %xmm11 #348.9 | |
paddd %xmm11, %xmm15 #348.9 | |
movdqa %xmm10, %xmm11 #348.9 | |
paddd %xmm15, %xmm9 #348.9 | |
psrld $2, %xmm11 #348.9 | |
movdqa %xmm0, 3232(%rsp) #321.9 | |
paddd %xmm0, %xmm9 #348.9 | |
movdqa %xmm10, %xmm0 #348.9 | |
movdqa %xmm10, %xmm15 #348.9 | |
pslld $30, %xmm0 #348.9 | |
pslld $19, %xmm15 #348.9 | |
por %xmm0, %xmm11 #348.9 | |
movdqa %xmm10, %xmm0 #348.9 | |
psrld $13, %xmm0 #348.9 | |
movdqa %xmm10, %xmm8 #348.9 | |
por %xmm15, %xmm0 #348.9 | |
movdqa %xmm10, %xmm15 #348.9 | |
pxor %xmm0, %xmm11 #348.9 | |
movdqa %xmm10, %xmm0 #348.9 | |
psrld $22, %xmm15 #348.9 | |
pslld $10, %xmm0 #348.9 | |
pand %xmm1, %xmm8 #348.9 | |
por %xmm0, %xmm15 #348.9 | |
movdqa %xmm13, %xmm0 #348.9 | |
pxor %xmm15, %xmm11 #348.9 | |
pand %xmm10, %xmm0 #348.9 | |
movdqa %xmm8, %xmm15 #348.9 | |
pxor %xmm0, %xmm15 #348.9 | |
paddd %xmm9, %xmm7 #348.9 | |
pxor %xmm14, %xmm15 #348.9 | |
movdqa %xmm7, %xmm14 #349.9 | |
paddd %xmm15, %xmm11 #348.9 | |
movdqa %xmm7, %xmm15 #349.9 | |
paddd %xmm11, %xmm9 #348.9 | |
movdqa %xmm7, %xmm0 #349.9 | |
movdqa %xmm7, %xmm11 #349.9 | |
psrld $6, %xmm15 #349.9 | |
pslld $26, %xmm14 #349.9 | |
psrld $11, %xmm0 #349.9 | |
pslld $21, %xmm11 #349.9 | |
por %xmm14, %xmm15 #349.9 | |
por %xmm11, %xmm0 #349.9 | |
movdqa %xmm7, %xmm14 #349.9 | |
pxor %xmm0, %xmm15 #349.9 | |
movdqa %xmm7, %xmm0 #349.9 | |
psrld $25, %xmm14 #349.9 | |
pslld $7, %xmm0 #349.9 | |
por %xmm0, %xmm14 #349.9 | |
movdqa %xmm7, %xmm0 #349.9 | |
pxor %xmm14, %xmm15 #349.9 | |
movdqa %xmm7, %xmm14 #349.9 | |
pand %xmm12, %xmm14 #349.9 | |
pandn %xmm5, %xmm0 #349.9 | |
movdqa .L_2il0floatpacket.6748(%rip), %xmm11 #349.9 | |
pxor %xmm0, %xmm14 #349.9 | |
paddd %xmm15, %xmm6 #349.9 | |
paddd %xmm14, %xmm11 #349.9 | |
paddd %xmm11, %xmm6 #349.9 | |
movdqa %xmm9, %xmm0 #349.9 | |
movdqa %xmm2, 3248(%rsp) #322.9 | |
paddd %xmm2, %xmm6 #349.9 | |
movdqa %xmm9, %xmm2 #349.9 | |
pslld $30, %xmm0 #349.9 | |
psrld $2, %xmm2 #349.9 | |
movdqa %xmm9, %xmm15 #349.9 | |
por %xmm0, %xmm2 #349.9 | |
movdqa %xmm9, %xmm0 #349.9 | |
psrld $13, %xmm15 #349.9 | |
pslld $19, %xmm0 #349.9 | |
movdqa %xmm9, %xmm14 #349.9 | |
por %xmm0, %xmm15 #349.9 | |
movdqa %xmm9, %xmm11 #349.9 | |
movdqa %xmm9, %xmm0 #349.9 | |
pand %xmm10, %xmm14 #349.9 | |
pxor %xmm15, %xmm2 #349.9 | |
psrld $22, %xmm11 #349.9 | |
pslld $10, %xmm0 #349.9 | |
movdqa %xmm9, %xmm15 #349.9 | |
por %xmm0, %xmm11 #349.9 | |
pand %xmm1, %xmm15 #349.9 | |
movdqa %xmm14, %xmm0 #349.9 | |
pxor %xmm15, %xmm0 #349.9 | |
pxor %xmm11, %xmm2 #349.9 | |
pxor %xmm8, %xmm0 #349.9 | |
paddd %xmm6, %xmm13 #349.9 | |
paddd %xmm0, %xmm2 #349.9 | |
movdqa %xmm13, %xmm8 #350.9 | |
paddd %xmm2, %xmm6 #349.9 | |
movdqa %xmm13, %xmm0 #350.9 | |
movdqa %xmm13, %xmm2 #350.9 | |
movdqa %xmm13, %xmm11 #350.9 | |
psrld $6, %xmm8 #350.9 | |
pslld $26, %xmm0 #350.9 | |
psrld $11, %xmm2 #350.9 | |
pslld $21, %xmm11 #350.9 | |
por %xmm0, %xmm8 #350.9 | |
por %xmm11, %xmm2 #350.9 | |
pxor %xmm2, %xmm8 #350.9 | |
movdqa %xmm13, %xmm15 #350.9 | |
movdqa %xmm13, %xmm2 #350.9 | |
psrld $25, %xmm15 #350.9 | |
pslld $7, %xmm2 #350.9 | |
movdqa %xmm13, %xmm0 #350.9 | |
por %xmm2, %xmm15 #350.9 | |
movdqa %xmm13, %xmm2 #350.9 | |
pand %xmm7, %xmm0 #350.9 | |
pandn %xmm12, %xmm2 #350.9 | |
pxor %xmm15, %xmm8 #350.9 | |
pxor %xmm2, %xmm0 #350.9 | |
movdqa .L_2il0floatpacket.6749(%rip), %xmm2 #350.9 | |
paddd %xmm8, %xmm5 #350.9 | |
paddd %xmm0, %xmm2 #350.9 | |
movdqa %xmm6, %xmm11 #350.9 | |
paddd %xmm2, %xmm5 #350.9 | |
movdqa %xmm6, %xmm0 #350.9 | |
movdqa %xmm6, %xmm2 #350.9 | |
movdqa %xmm6, %xmm8 #350.9 | |
psrld $2, %xmm11 #350.9 | |
pslld $30, %xmm0 #350.9 | |
psrld $13, %xmm2 #350.9 | |
pslld $19, %xmm8 #350.9 | |
movdqa %xmm3, 3264(%rsp) #323.9 | |
paddd %xmm3, %xmm5 #350.9 | |
movdqa %xmm6, %xmm3 #350.9 | |
por %xmm0, %xmm11 #350.9 | |
por %xmm8, %xmm2 #350.9 | |
pand %xmm9, %xmm3 #350.9 | |
pxor %xmm2, %xmm11 #350.9 | |
movdqa %xmm6, %xmm15 #350.9 | |
movdqa %xmm6, %xmm2 #350.9 | |
movdqa %xmm6, %xmm0 #350.9 | |
psrld $22, %xmm15 #350.9 | |
pslld $10, %xmm2 #350.9 | |
pand %xmm10, %xmm0 #350.9 | |
movdqa %xmm3, %xmm8 #350.9 | |
por %xmm2, %xmm15 #350.9 | |
pxor %xmm0, %xmm8 #350.9 | |
paddd %xmm5, %xmm1 #350.9 | |
pxor %xmm15, %xmm11 #350.9 | |
pxor %xmm14, %xmm8 #350.9 | |
movdqa %xmm1, %xmm14 #351.9 | |
paddd %xmm8, %xmm11 #350.9 | |
movdqa %xmm1, %xmm0 #351.9 | |
movdqa %xmm1, %xmm2 #351.9 | |
movdqa %xmm1, %xmm8 #351.9 | |
psrld $6, %xmm14 #351.9 | |
pslld $26, %xmm0 #351.9 | |
psrld $11, %xmm2 #351.9 | |
pslld $21, %xmm8 #351.9 | |
paddd %xmm11, %xmm5 #350.9 | |
por %xmm0, %xmm14 #351.9 | |
por %xmm8, %xmm2 #351.9 | |
movdqa %xmm1, %xmm11 #351.9 | |
movdqa %xmm1, %xmm15 #351.9 | |
pxor %xmm2, %xmm14 #351.9 | |
psrld $25, %xmm11 #351.9 | |
pslld $7, %xmm15 #351.9 | |
movdqa %xmm1, %xmm0 #351.9 | |
movdqa %xmm1, %xmm2 #351.9 | |
por %xmm15, %xmm11 #351.9 | |
pand %xmm13, %xmm0 #351.9 | |
pandn %xmm7, %xmm2 #351.9 | |
pxor %xmm11, %xmm14 #351.9 | |
pxor %xmm2, %xmm0 #351.9 | |
paddd %xmm14, %xmm12 #351.9 | |
movdqa .L_2il0floatpacket.6750(%rip), %xmm2 #351.9 | |
movdqa %xmm5, %xmm8 #351.9 | |
paddd %xmm0, %xmm2 #351.9 | |
movdqa %xmm5, %xmm0 #351.9 | |
paddd %xmm2, %xmm12 #351.9 | |
psrld $2, %xmm8 #351.9 | |
pslld $30, %xmm0 #351.9 | |
movdqa %xmm5, %xmm2 #351.9 | |
movdqa %xmm5, %xmm11 #351.9 | |
paddd %xmm4, %xmm12 #351.9 | |
movdqa %xmm4, 3280(%rsp) #324.9 | |
movdqa %xmm5, %xmm4 #351.9 | |
por %xmm0, %xmm8 #351.9 | |
psrld $13, %xmm2 #351.9 | |
pslld $19, %xmm11 #351.9 | |
movdqa %xmm5, %xmm14 #351.9 | |
movdqa %xmm5, %xmm15 #351.9 | |
movdqa %xmm5, %xmm0 #351.9 | |
pand %xmm6, %xmm4 #351.9 | |
por %xmm11, %xmm2 #351.9 | |
psrld $22, %xmm14 #351.9 | |
pslld $10, %xmm15 #351.9 | |
pand %xmm9, %xmm0 #351.9 | |
pxor %xmm2, %xmm8 #351.9 | |
movdqa %xmm4, 3408(%rsp) #351.9 | |
por %xmm15, %xmm14 #351.9 | |
pxor %xmm0, %xmm4 #351.9 | |
paddd %xmm12, %xmm10 #351.9 | |
pxor %xmm14, %xmm8 #351.9 | |
pxor %xmm3, %xmm4 #351.9 | |
paddd %xmm4, %xmm8 #351.9 | |
movdqa %xmm10, %xmm3 #352.9 | |
movdqa %xmm10, %xmm0 #352.9 | |
movdqa %xmm10, %xmm2 #352.9 | |
movdqa %xmm10, %xmm4 #352.9 | |
psrld $6, %xmm3 #352.9 | |
pslld $26, %xmm0 #352.9 | |
psrld $11, %xmm2 #352.9 | |
pslld $21, %xmm4 #352.9 | |
paddd %xmm8, %xmm12 #351.9 | |
por %xmm0, %xmm3 #352.9 | |
por %xmm4, %xmm2 #352.9 | |
movdqa %xmm10, %xmm8 #352.9 | |
movdqa %xmm10, %xmm11 #352.9 | |
pxor %xmm2, %xmm3 #352.9 | |
psrld $25, %xmm8 #352.9 | |
pslld $7, %xmm11 #352.9 | |
movdqa %xmm10, %xmm14 #352.9 | |
movdqa %xmm12, %xmm2 #352.9 | |
por %xmm11, %xmm8 #352.9 | |
movdqa %xmm13, 3328(%rsp) #349.9 | |
pand %xmm1, %xmm14 #352.9 | |
movdqa %xmm5, 3376(%rsp) #350.9 | |
pand %xmm5, %xmm2 #352.9 | |
movdqa %xmm10, 3392(%rsp) #351.9 | |
pandn %xmm13, %xmm10 #352.9 | |
movdqa %xmm12, %xmm5 #352.9 | |
movdqa %xmm12, %xmm13 #352.9 | |
movdqa %xmm1, 3360(%rsp) #350.9 | |
pxor %xmm8, %xmm3 #352.9 | |
movdqa .L_2il0floatpacket.6751(%rip), %xmm1 #352.9 | |
pxor %xmm10, %xmm14 #352.9 | |
psrld $2, %xmm5 #352.9 | |
pslld $30, %xmm13 #352.9 | |
paddd %xmm3, %xmm7 #352.9 | |
paddd %xmm14, %xmm1 #352.9 | |
por %xmm13, %xmm5 #352.9 | |
movdqa %xmm12, %xmm15 #352.9 | |
movdqa %xmm12, %xmm13 #352.9 | |
paddd %xmm1, %xmm7 #352.9 | |
psrld $13, %xmm15 #352.9 | |
pslld $19, %xmm13 #352.9 | |
movdqa %xmm12, %xmm0 #352.9 | |
movdqa %xmm12, %xmm1 #352.9 | |
por %xmm13, %xmm15 #352.9 | |
psrld $22, %xmm0 #352.9 | |
pslld $10, %xmm1 #352.9 | |
pxor %xmm15, %xmm5 #352.9 | |
movdqa %xmm12, 3424(%rsp) #351.9 | |
por %xmm1, %xmm0 #352.9 | |
paddd 3296(%rsp), %xmm7 #352.9 | |
pand %xmm6, %xmm12 #352.9 | |
movdqa %xmm2, 3472(%rsp) #352.9 | |
paddd %xmm7, %xmm9 #352.9 | |
pxor %xmm0, %xmm5 #352.9 | |
pxor %xmm12, %xmm2 #352.9 | |
movdqa %xmm6, 3344(%rsp) #349.9 | |
movdqa %xmm7, 3440(%rsp) #352.9 | |
movdqa %xmm9, 3456(%rsp) #352.9 | |
movdqa %xmm5, 3488(%rsp) #352.9 | |
movdqa %xmm2, 3504(%rsp) #352.9 | |
# LOE | |
..B2.14: # Preds ..B2.15 | |
movdqa 3456(%rsp), %xmm0 #353.9 | |
movdqa 3504(%rsp), %xmm8 #352.9 | |
movdqa %xmm0, %xmm7 #353.9 | |
movdqa %xmm0, %xmm5 #353.9 | |
movdqa %xmm0, %xmm11 #353.9 | |
movdqa %xmm0, %xmm13 #353.9 | |
psrld $6, %xmm7 #353.9 | |
pxor 3408(%rsp), %xmm8 #352.9 | |
pslld $26, %xmm5 #353.9 | |
movdqa 3488(%rsp), %xmm12 #352.9 | |
psrld $11, %xmm11 #353.9 | |
pslld $21, %xmm13 #353.9 | |
movdqa %xmm0, %xmm3 #353.9 | |
movdqa %xmm0, %xmm10 #353.9 | |
paddd %xmm8, %xmm12 #352.9 | |
movdqa 3440(%rsp), %xmm1 #352.9 | |
por %xmm5, %xmm7 #353.9 | |
movdqa 3392(%rsp), %xmm9 #353.9 | |
por %xmm13, %xmm11 #353.9 | |
movdqa 3360(%rsp), %xmm15 #353.9 | |
psrld $25, %xmm3 #353.9 | |
pslld $7, %xmm10 #353.9 | |
movdqa %xmm0, %xmm14 #353.9 | |
movdqa %xmm0, %xmm4 #353.9 | |
paddd %xmm12, %xmm1 #352.9 | |
pxor %xmm11, %xmm7 #353.9 | |
por %xmm10, %xmm3 #353.9 | |
pand %xmm9, %xmm14 #353.9 | |
pandn %xmm15, %xmm4 #353.9 | |
movdqa 3328(%rsp), %xmm2 #353.9 | |
pxor %xmm3, %xmm7 #353.9 | |
movdqa 3424(%rsp), %xmm6 #353.9 | |
pxor %xmm4, %xmm14 #353.9 | |
movdqa .L_2il0floatpacket.6752(%rip), %xmm8 #353.9 | |
movdqa %xmm1, %xmm12 #353.9 | |
movdqa %xmm1, %xmm5 #353.9 | |
movdqa %xmm1, %xmm11 #353.9 | |
movdqa %xmm1, %xmm13 #353.9 | |
paddd %xmm7, %xmm2 #353.9 | |
movdqa 3376(%rsp), %xmm4 #353.9 | |
paddd %xmm14, %xmm8 #353.9 | |
movdqa %xmm6, %xmm7 #353.9 | |
psrld $2, %xmm12 #353.9 | |
pslld $30, %xmm5 #353.9 | |
psrld $13, %xmm11 #353.9 | |
pslld $19, %xmm13 #353.9 | |
paddd %xmm8, %xmm2 #353.9 | |
pand %xmm1, %xmm7 #353.9 | |
por %xmm5, %xmm12 #353.9 | |
por %xmm13, %xmm11 #353.9 | |
movdqa %xmm1, %xmm10 #353.9 | |
movdqa %xmm1, %xmm8 #353.9 | |
movdqa %xmm4, %xmm5 #353.9 | |
pxor %xmm11, %xmm12 #353.9 | |
psrld $22, %xmm10 #353.9 | |
pslld $10, %xmm8 #353.9 | |
pand %xmm1, %xmm5 #353.9 | |
movdqa %xmm7, %xmm11 #353.9 | |
por %xmm8, %xmm10 #353.9 | |
movdqa 3312(%rsp), %xmm3 #353.9 | |
pxor %xmm5, %xmm11 #353.9 | |
movdqa 3472(%rsp), %xmm13 #353.9 | |
paddd %xmm2, %xmm3 #353.9 | |
movdqa 3344(%rsp), %xmm2 #353.9 | |
pxor %xmm10, %xmm12 #353.9 | |
pxor %xmm11, %xmm13 #353.9 | |
paddd %xmm3, %xmm2 #353.9 | |
paddd %xmm13, %xmm12 #353.9 | |
movdqa %xmm2, %xmm14 #354.9 | |
paddd %xmm12, %xmm3 #353.9 | |
movdqa %xmm2, %xmm8 #354.9 | |
movdqa %xmm2, %xmm12 #354.9 | |
movdqa %xmm2, %xmm5 #354.9 | |
psrld $6, %xmm14 #354.9 | |
pslld $26, %xmm8 #354.9 | |
psrld $11, %xmm12 #354.9 | |
pslld $21, %xmm5 #354.9 | |
movdqa %xmm2, %xmm11 #354.9 | |
movdqa %xmm2, %xmm13 #354.9 | |
por %xmm8, %xmm14 #354.9 | |
por %xmm5, %xmm12 #354.9 | |
psrld $25, %xmm11 #354.9 | |
pslld $7, %xmm13 #354.9 | |
movdqa %xmm0, %xmm5 #354.9 | |
movdqa %xmm2, %xmm8 #354.9 | |
pxor %xmm12, %xmm14 #354.9 | |
por %xmm13, %xmm11 #354.9 | |
pand %xmm2, %xmm5 #354.9 | |
pandn %xmm9, %xmm8 #354.9 | |
pxor %xmm11, %xmm14 #354.9 | |
pxor %xmm8, %xmm5 #354.9 | |
movdqa .L_2il0floatpacket.6753(%rip), %xmm8 #354.9 | |
paddd %xmm14, %xmm15 #354.9 | |
paddd %xmm5, %xmm8 #354.9 | |
movdqa %xmm3, %xmm12 #354.9 | |
movdqa %xmm3, %xmm11 #354.9 | |
movdqa %xmm3, %xmm13 #354.9 | |
movdqa %xmm3, %xmm10 #354.9 | |
paddd %xmm8, %xmm15 #354.9 | |
movdqa 832(%rsp), %xmm5 #354.9 | |
movdqa %xmm3, %xmm8 #354.9 | |
psrld $2, %xmm12 #354.9 | |
pslld $30, %xmm11 #354.9 | |
psrld $13, %xmm13 #354.9 | |
pslld $19, %xmm10 #354.9 | |
paddd %xmm15, %xmm5 #354.9 | |
pand %xmm1, %xmm8 #354.9 | |
por %xmm11, %xmm12 #354.9 | |
por %xmm10, %xmm13 #354.9 | |
movdqa %xmm3, %xmm14 #354.9 | |
movdqa %xmm3, %xmm15 #354.9 | |
movdqa %xmm6, %xmm11 #354.9 | |
pxor %xmm13, %xmm12 #354.9 | |
psrld $22, %xmm14 #354.9 | |
pslld $10, %xmm15 #354.9 | |
pand %xmm3, %xmm11 #354.9 | |
movdqa %xmm8, %xmm13 #354.9 | |
por %xmm15, %xmm14 #354.9 | |
pxor %xmm11, %xmm13 #354.9 | |
pxor %xmm14, %xmm12 #354.9 | |
pxor %xmm7, %xmm13 #354.9 | |
paddd %xmm5, %xmm4 #354.9 | |
paddd %xmm13, %xmm12 #354.9 | |
paddd %xmm12, %xmm5 #354.9 | |
movdqa %xmm4, %xmm7 #355.9 | |
movdqa %xmm4, %xmm12 #355.9 | |
movdqa %xmm4, %xmm15 #355.9 | |
movdqa %xmm4, %xmm11 #355.9 | |
psrld $6, %xmm7 #355.9 | |
pslld $26, %xmm12 #355.9 | |
psrld $11, %xmm15 #355.9 | |
pslld $21, %xmm11 #355.9 | |
movdqa %xmm4, %xmm13 #355.9 | |
movdqa %xmm4, %xmm10 #355.9 | |
por %xmm12, %xmm7 #355.9 | |
por %xmm11, %xmm15 #355.9 | |
psrld $25, %xmm13 #355.9 | |
pslld $7, %xmm10 #355.9 | |
pxor %xmm15, %xmm7 #355.9 | |
por %xmm10, %xmm13 #355.9 | |
movdqa %xmm4, %xmm10 #355.9 | |
pxor %xmm13, %xmm7 #355.9 | |
pandn %xmm0, %xmm10 #355.9 | |
paddd %xmm7, %xmm9 #355.9 | |
movdqa %xmm4, %xmm7 #355.9 | |
pand %xmm2, %xmm7 #355.9 | |
movdqa %xmm5, %xmm15 #355.9 | |
movdqa .L_2il0floatpacket.6754(%rip), %xmm12 #355.9 | |
pxor %xmm10, %xmm7 #355.9 | |
paddd %xmm7, %xmm12 #355.9 | |
movdqa %xmm5, %xmm11 #355.9 | |
movdqa 848(%rsp), %xmm7 #355.9 | |
paddd %xmm12, %xmm9 #355.9 | |
paddd %xmm9, %xmm7 #355.9 | |
movdqa %xmm5, %xmm9 #355.9 | |
psrld $2, %xmm15 #355.9 | |
pslld $30, %xmm11 #355.9 | |
movdqa %xmm5, %xmm14 #355.9 | |
movdqa %xmm5, %xmm12 #355.9 | |
pand %xmm3, %xmm9 #355.9 | |
por %xmm11, %xmm15 #355.9 | |
psrld $13, %xmm14 #355.9 | |
pslld $19, %xmm12 #355.9 | |
movdqa %xmm5, %xmm11 #355.9 | |
movdqa %xmm5, %xmm13 #355.9 | |
movdqa %xmm5, %xmm10 #355.9 | |
por %xmm12, %xmm14 #355.9 | |
psrld $22, %xmm11 #355.9 | |
pslld $10, %xmm13 #355.9 | |
pand %xmm1, %xmm10 #355.9 | |
movdqa %xmm9, %xmm12 #355.9 | |
pxor %xmm14, %xmm15 #355.9 | |
por %xmm13, %xmm11 #355.9 | |
pxor %xmm10, %xmm12 #355.9 | |
pxor %xmm11, %xmm15 #355.9 | |
pxor %xmm8, %xmm12 #355.9 | |
paddd %xmm7, %xmm6 #355.9 | |
paddd %xmm12, %xmm15 #355.9 | |
movdqa %xmm6, %xmm8 #356.9 | |
paddd %xmm15, %xmm7 #355.9 | |
movdqa %xmm6, %xmm12 #356.9 | |
movdqa %xmm6, %xmm15 #356.9 | |
movdqa %xmm6, %xmm11 #356.9 | |
psrld $6, %xmm8 #356.9 | |
pslld $26, %xmm12 #356.9 | |
psrld $11, %xmm15 #356.9 | |
pslld $21, %xmm11 #356.9 | |
movdqa %xmm6, %xmm13 #356.9 | |
movdqa %xmm6, %xmm10 #356.9 | |
por %xmm12, %xmm8 #356.9 | |
por %xmm11, %xmm15 #356.9 | |
psrld $25, %xmm13 #356.9 | |
pslld $7, %xmm10 #356.9 | |
pxor %xmm15, %xmm8 #356.9 | |
por %xmm10, %xmm13 #356.9 | |
pxor %xmm13, %xmm8 #356.9 | |
movdqa %xmm6, %xmm10 #356.9 | |
paddd %xmm8, %xmm0 #356.9 | |
movdqa %xmm6, %xmm8 #356.9 | |
pand %xmm4, %xmm8 #356.9 | |
pandn %xmm2, %xmm10 #356.9 | |
movdqa .L_2il0floatpacket.6755(%rip), %xmm12 #356.9 | |
pxor %xmm10, %xmm8 #356.9 | |
paddd %xmm8, %xmm12 #356.9 | |
movdqa %xmm7, %xmm14 #356.9 | |
movdqa 864(%rsp), %xmm8 #356.9 | |
paddd %xmm12, %xmm0 #356.9 | |
movdqa %xmm7, %xmm12 #356.9 | |
movdqa %xmm7, %xmm15 #356.9 | |
movdqa %xmm7, %xmm11 #356.9 | |
paddd %xmm0, %xmm8 #356.9 | |
movdqa %xmm7, %xmm0 #356.9 | |
psrld $2, %xmm14 #356.9 | |
pslld $30, %xmm12 #356.9 | |
psrld $13, %xmm15 #356.9 | |
pslld $19, %xmm11 #356.9 | |
pand %xmm5, %xmm0 #356.9 | |
por %xmm12, %xmm14 #356.9 | |
por %xmm11, %xmm15 #356.9 | |
movdqa %xmm7, %xmm13 #356.9 | |
movdqa %xmm7, %xmm10 #356.9 | |
movdqa %xmm7, %xmm12 #356.9 | |
pxor %xmm15, %xmm14 #356.9 | |
psrld $22, %xmm13 #356.9 | |
pslld $10, %xmm10 #356.9 | |
pand %xmm3, %xmm12 #356.9 | |
movdqa %xmm0, %xmm15 #356.9 | |
por %xmm10, %xmm13 #356.9 | |
pxor %xmm12, %xmm15 #356.9 | |
paddd %xmm8, %xmm1 #356.9 | |
pxor %xmm13, %xmm14 #356.9 | |
pxor %xmm9, %xmm15 #356.9 | |
movdqa %xmm1, %xmm9 #357.9 | |
paddd %xmm15, %xmm14 #356.9 | |
movdqa %xmm1, %xmm12 #357.9 | |
movdqa %xmm1, %xmm15 #357.9 | |
movdqa %xmm1, %xmm11 #357.9 | |
psrld $6, %xmm9 #357.9 | |
pslld $26, %xmm12 #357.9 | |
psrld $11, %xmm15 #357.9 | |
pslld $21, %xmm11 #357.9 | |
movdqa %xmm1, %xmm13 #357.9 | |
movdqa %xmm1, %xmm10 #357.9 | |
por %xmm12, %xmm9 #357.9 | |
por %xmm11, %xmm15 #357.9 | |
psrld $25, %xmm13 #357.9 | |
pslld $7, %xmm10 #357.9 | |
pxor %xmm15, %xmm9 #357.9 | |
por %xmm10, %xmm13 #357.9 | |
pxor %xmm13, %xmm9 #357.9 | |
movdqa %xmm1, %xmm10 #357.9 | |
paddd %xmm9, %xmm2 #357.9 | |
movdqa %xmm1, %xmm9 #357.9 | |
pand %xmm6, %xmm9 #357.9 | |
pandn %xmm4, %xmm10 #357.9 | |
movdqa .L_2il0floatpacket.6756(%rip), %xmm12 #357.9 | |
pxor %xmm10, %xmm9 #357.9 | |
paddd %xmm14, %xmm8 #356.9 | |
paddd %xmm9, %xmm12 #357.9 | |
movdqa 880(%rsp), %xmm9 #357.9 | |
paddd %xmm12, %xmm2 #357.9 | |
movdqa %xmm8, %xmm14 #357.9 | |
movdqa %xmm8, %xmm12 #357.9 | |
movdqa %xmm8, %xmm15 #357.9 | |
movdqa %xmm8, %xmm11 #357.9 | |
paddd %xmm2, %xmm9 #357.9 | |
movdqa %xmm8, %xmm2 #357.9 | |
psrld $2, %xmm14 #357.9 | |
pslld $30, %xmm12 #357.9 | |
psrld $13, %xmm15 #357.9 | |
pslld $19, %xmm11 #357.9 | |
pand %xmm7, %xmm2 #357.9 | |
por %xmm12, %xmm14 #357.9 | |
por %xmm11, %xmm15 #357.9 | |
movdqa %xmm8, %xmm13 #357.9 | |
movdqa %xmm8, %xmm10 #357.9 | |
movdqa %xmm8, %xmm12 #357.9 | |
pxor %xmm15, %xmm14 #357.9 | |
psrld $22, %xmm13 #357.9 | |
pslld $10, %xmm10 #357.9 | |
pand %xmm5, %xmm12 #357.9 | |
movdqa %xmm2, %xmm15 #357.9 | |
por %xmm10, %xmm13 #357.9 | |
pxor %xmm12, %xmm15 #357.9 | |
paddd %xmm9, %xmm3 #357.9 | |
pxor %xmm13, %xmm14 #357.9 | |
pxor %xmm0, %xmm15 #357.9 | |
paddd %xmm15, %xmm14 #357.9 | |
movdqa %xmm3, %xmm0 #358.9 | |
movdqa %xmm3, %xmm12 #358.9 | |
movdqa %xmm3, %xmm15 #358.9 | |
movdqa %xmm3, %xmm11 #358.9 | |
psrld $6, %xmm0 #358.9 | |
pslld $26, %xmm12 #358.9 | |
psrld $11, %xmm15 #358.9 | |
pslld $21, %xmm11 #358.9 | |
movdqa %xmm3, %xmm13 #358.9 | |
movdqa %xmm3, %xmm10 #358.9 | |
por %xmm12, %xmm0 #358.9 | |
por %xmm11, %xmm15 #358.9 | |
psrld $25, %xmm13 #358.9 | |
pslld $7, %xmm10 #358.9 | |
pxor %xmm15, %xmm0 #358.9 | |
por %xmm10, %xmm13 #358.9 | |
movdqa %xmm3, %xmm10 #358.9 | |
pxor %xmm13, %xmm0 #358.9 | |
pandn %xmm6, %xmm10 #358.9 | |
paddd %xmm0, %xmm4 #358.9 | |
movdqa %xmm3, %xmm0 #358.9 | |
pand %xmm1, %xmm0 #358.9 | |
paddd %xmm14, %xmm9 #357.9 | |
movdqa .L_2il0floatpacket.6757(%rip), %xmm12 #358.9 | |
pxor %xmm10, %xmm0 #358.9 | |
paddd %xmm0, %xmm12 #358.9 | |
movdqa %xmm9, %xmm14 #358.9 | |
paddd %xmm12, %xmm4 #358.9 | |
movdqa %xmm9, %xmm15 #358.9 | |
movdqa 896(%rsp), %xmm12 #358.9 | |
movdqa %xmm9, %xmm11 #358.9 | |
movdqa %xmm9, %xmm13 #358.9 | |
paddd %xmm4, %xmm12 #358.9 | |
movdqa %xmm9, %xmm4 #358.9 | |
psrld $2, %xmm14 #358.9 | |
pslld $30, %xmm15 #358.9 | |
psrld $13, %xmm11 #358.9 | |
pslld $19, %xmm13 #358.9 | |
pand %xmm8, %xmm4 #358.9 | |
por %xmm15, %xmm14 #358.9 | |
por %xmm13, %xmm11 #358.9 | |
movdqa %xmm9, %xmm10 #358.9 | |
movdqa %xmm9, %xmm0 #358.9 | |
movdqa %xmm9, %xmm15 #358.9 | |
pxor %xmm11, %xmm14 #358.9 | |
psrld $22, %xmm10 #358.9 | |
pslld $10, %xmm0 #358.9 | |
pand %xmm7, %xmm15 #358.9 | |
movdqa %xmm4, %xmm11 #358.9 | |
por %xmm0, %xmm10 #358.9 | |
pxor %xmm15, %xmm11 #358.9 | |
paddd %xmm12, %xmm5 #358.9 | |
pxor %xmm10, %xmm14 #358.9 | |
pxor %xmm2, %xmm11 #358.9 | |
movdqa %xmm5, %xmm2 #359.9 | |
paddd %xmm11, %xmm14 #358.9 | |
movdqa %xmm5, %xmm15 #359.9 | |
movdqa %xmm5, %xmm11 #359.9 | |
movdqa %xmm5, %xmm13 #359.9 | |
paddd %xmm14, %xmm12 #358.9 | |
psrld $6, %xmm2 #359.9 | |
pslld $26, %xmm15 #359.9 | |
psrld $11, %xmm11 #359.9 | |
pslld $21, %xmm13 #359.9 | |
movdqa %xmm5, %xmm10 #359.9 | |
movdqa %xmm5, %xmm14 #359.9 | |
por %xmm15, %xmm2 #359.9 | |
por %xmm13, %xmm11 #359.9 | |
psrld $25, %xmm10 #359.9 | |
pslld $7, %xmm14 #359.9 | |
pxor %xmm11, %xmm2 #359.9 | |
por %xmm14, %xmm10 #359.9 | |
movdqa %xmm5, %xmm0 #359.9 | |
pxor %xmm10, %xmm2 #359.9 | |
pand %xmm3, %xmm0 #359.9 | |
paddd %xmm2, %xmm6 #359.9 | |
movdqa %xmm5, %xmm2 #359.9 | |
pandn %xmm1, %xmm2 #359.9 | |
movdqa %xmm12, %xmm13 #359.9 | |
movdqa .L_2il0floatpacket.6758(%rip), %xmm15 #359.9 | |
pxor %xmm2, %xmm0 #359.9 | |
movdqa %xmm12, %xmm10 #359.9 | |
paddd %xmm0, %xmm15 #359.9 | |
psrld $2, %xmm13 #359.9 | |
pslld $30, %xmm10 #359.9 | |
movdqa %xmm12, %xmm0 #359.9 | |
movdqa %xmm12, %xmm2 #359.9 | |
paddd %xmm15, %xmm6 #359.9 | |
por %xmm10, %xmm13 #359.9 | |
psrld $13, %xmm0 #359.9 | |
pslld $19, %xmm2 #359.9 | |
movdqa %xmm12, %xmm15 #359.9 | |
movdqa %xmm12, %xmm10 #359.9 | |
movdqa 912(%rsp), %xmm11 #359.9 | |
movdqa %xmm12, %xmm14 #359.9 | |
por %xmm2, %xmm0 #359.9 | |
psrld $22, %xmm15 #359.9 | |
pslld $10, %xmm10 #359.9 | |
paddd %xmm6, %xmm11 #359.9 | |
pand %xmm9, %xmm14 #359.9 | |
pxor %xmm0, %xmm13 #359.9 | |
por %xmm10, %xmm15 #359.9 | |
movdqa %xmm12, %xmm6 #359.9 | |
pxor %xmm15, %xmm13 #359.9 | |
pand %xmm8, %xmm6 #359.9 | |
movdqa %xmm14, %xmm15 #359.9 | |
paddd %xmm11, %xmm7 #359.9 | |
pxor %xmm6, %xmm15 #359.9 | |
movdqa %xmm7, %xmm10 #360.9 | |
pxor %xmm4, %xmm15 #359.9 | |
movdqa %xmm7, %xmm4 #360.9 | |
paddd %xmm15, %xmm13 #359.9 | |
movdqa %xmm7, %xmm15 #360.9 | |
paddd %xmm13, %xmm11 #359.9 | |
movdqa %xmm7, %xmm13 #360.9 | |
psrld $6, %xmm4 #360.9 | |
pslld $26, %xmm15 #360.9 | |
psrld $11, %xmm13 #360.9 | |
pslld $21, %xmm10 #360.9 | |
movdqa %xmm7, %xmm0 #360.9 | |
movdqa %xmm7, %xmm2 #360.9 | |
por %xmm15, %xmm4 #360.9 | |
por %xmm10, %xmm13 #360.9 | |
psrld $25, %xmm0 #360.9 | |
pslld $7, %xmm2 #360.9 | |
pxor %xmm13, %xmm4 #360.9 | |
por %xmm2, %xmm0 #360.9 | |
pxor %xmm0, %xmm4 #360.9 | |
movdqa %xmm7, %xmm0 #360.9 | |
movdqa %xmm7, %xmm2 #360.9 | |
pand %xmm5, %xmm0 #360.9 | |
pandn %xmm3, %xmm2 #360.9 | |
paddd %xmm4, %xmm1 #360.9 | |
movdqa .L_2il0floatpacket.6759(%rip), %xmm15 #360.9 | |
pxor %xmm2, %xmm0 #360.9 | |
paddd %xmm0, %xmm15 #360.9 | |
movdqa %xmm11, %xmm0 #360.9 | |
movdqa 928(%rsp), %xmm10 #360.9 | |
paddd %xmm15, %xmm1 #360.9 | |
movdqa %xmm11, %xmm2 #360.9 | |
movdqa %xmm11, %xmm4 #360.9 | |
movdqa %xmm11, %xmm6 #360.9 | |
paddd %xmm1, %xmm10 #360.9 | |
psrld $2, %xmm0 #360.9 | |
pslld $30, %xmm2 #360.9 | |
psrld $13, %xmm4 #360.9 | |
pslld $19, %xmm6 #360.9 | |
movdqa %xmm11, %xmm15 #360.9 | |
movdqa %xmm11, %xmm1 #360.9 | |
movdqa %xmm11, %xmm13 #360.9 | |
por %xmm2, %xmm0 #360.9 | |
por %xmm6, %xmm4 #360.9 | |
psrld $22, %xmm15 #360.9 | |
pslld $10, %xmm1 #360.9 | |
pand %xmm12, %xmm13 #360.9 | |
pxor %xmm4, %xmm0 #360.9 | |
por %xmm1, %xmm15 #360.9 | |
movdqa %xmm11, %xmm2 #360.9 | |
pxor %xmm15, %xmm0 #360.9 | |
pand %xmm9, %xmm2 #360.9 | |
movdqa %xmm13, %xmm15 #360.9 | |
pxor %xmm2, %xmm15 #360.9 | |
paddd %xmm10, %xmm8 #360.9 | |
pxor %xmm14, %xmm15 #360.9 | |
movdqa %xmm8, %xmm14 #361.9 | |
paddd %xmm15, %xmm0 #360.9 | |
movdqa %xmm8, %xmm15 #361.9 | |
paddd %xmm0, %xmm10 #360.9 | |
movdqa %xmm8, %xmm1 #361.9 | |
movdqa %xmm8, %xmm0 #361.9 | |
psrld $6, %xmm14 #361.9 | |
pslld $26, %xmm15 #361.9 | |
psrld $11, %xmm1 #361.9 | |
pslld $21, %xmm0 #361.9 | |
movdqa %xmm8, %xmm2 #361.9 | |
movdqa %xmm8, %xmm4 #361.9 | |
por %xmm15, %xmm14 #361.9 | |
por %xmm0, %xmm1 #361.9 | |
psrld $25, %xmm2 #361.9 | |
pslld $7, %xmm4 #361.9 | |
pxor %xmm1, %xmm14 #361.9 | |
por %xmm4, %xmm2 #361.9 | |
movdqa %xmm8, %xmm0 #361.9 | |
movdqa %xmm8, %xmm1 #361.9 | |
pxor %xmm2, %xmm14 #361.9 | |
pand %xmm7, %xmm0 #361.9 | |
pandn %xmm5, %xmm1 #361.9 | |
movdqa .L_2il0floatpacket.6760(%rip), %xmm15 #361.9 | |
paddd %xmm14, %xmm3 #361.9 | |
pxor %xmm1, %xmm0 #361.9 | |
movdqa %xmm10, %xmm6 #361.9 | |
paddd %xmm0, %xmm15 #361.9 | |
movdqa %xmm3, %xmm0 #361.9 | |
movdqa 944(%rsp), %xmm3 #361.9 | |
paddd %xmm15, %xmm0 #361.9 | |
paddd %xmm0, %xmm3 #361.9 | |
movdqa %xmm10, %xmm15 #361.9 | |
movdqa %xmm10, %xmm14 #361.9 | |
movdqa %xmm10, %xmm0 #361.9 | |
movdqa %xmm10, %xmm1 #361.9 | |
psrld $2, %xmm6 #361.9 | |
pslld $30, %xmm15 #361.9 | |
psrld $13, %xmm14 #361.9 | |
pslld $19, %xmm0 #361.9 | |
pand %xmm11, %xmm1 #361.9 | |
por %xmm15, %xmm6 #361.9 | |
por %xmm0, %xmm14 #361.9 | |
movdqa %xmm10, %xmm2 #361.9 | |
movdqa %xmm10, %xmm4 #361.9 | |
movdqa %xmm10, %xmm15 #361.9 | |
pxor %xmm14, %xmm6 #361.9 | |
psrld $22, %xmm2 #361.9 | |
pslld $10, %xmm4 #361.9 | |
pand %xmm12, %xmm15 #361.9 | |
movdqa %xmm1, %xmm14 #361.9 | |
por %xmm4, %xmm2 #361.9 | |
pxor %xmm15, %xmm14 #361.9 | |
movdqa 3216(%rsp), %xmm15 #363.14 | |
pxor %xmm2, %xmm6 #361.9 | |
pxor %xmm13, %xmm14 #361.9 | |
movdqa %xmm15, %xmm13 #363.14 | |
paddd %xmm14, %xmm6 #361.9 | |
movdqa %xmm15, %xmm14 #363.14 | |
movdqa %xmm15, %xmm0 #363.14 | |
movdqa %xmm15, %xmm2 #363.14 | |
psrld $7, %xmm13 #363.14 | |
pslld $25, %xmm14 #363.14 | |
psrld $18, %xmm0 #363.14 | |
pslld $14, %xmm2 #363.14 | |
por %xmm14, %xmm13 #363.14 | |
por %xmm2, %xmm0 #363.14 | |
movdqa %xmm15, %xmm4 #363.14 | |
pxor %xmm0, %xmm13 #363.14 | |
psrld $3, %xmm4 #363.14 | |
paddd %xmm3, %xmm9 #361.9 | |
paddd %xmm6, %xmm3 #361.9 | |
pxor %xmm4, %xmm13 #363.14 | |
movdqa 3200(%rsp), %xmm6 #363.14 | |
movdqa %xmm9, %xmm0 #364.9 | |
paddd %xmm13, %xmm6 #363.14 | |
movdqa %xmm9, %xmm13 #364.9 | |
movdqa 1136(%rsp), %xmm14 #363.14 | |
psrld $6, %xmm13 #364.9 | |
pslld $26, %xmm0 #364.9 | |
movdqa %xmm9, %xmm2 #364.9 | |
movdqa %xmm9, %xmm4 #364.9 | |
paddd %xmm6, %xmm14 #363.14 | |
por %xmm0, %xmm13 #364.9 | |
psrld $11, %xmm2 #364.9 | |
pslld $21, %xmm4 #364.9 | |
movdqa %xmm9, %xmm0 #364.9 | |
movdqa %xmm9, %xmm6 #364.9 | |
por %xmm4, %xmm2 #364.9 | |
psrld $25, %xmm0 #364.9 | |
pslld $7, %xmm6 #364.9 | |
pxor %xmm2, %xmm13 #364.9 | |
por %xmm6, %xmm0 #364.9 | |
pxor %xmm0, %xmm13 #364.9 | |
movdqa %xmm9, %xmm0 #364.9 | |
movdqa %xmm9, %xmm2 #364.9 | |
pand %xmm8, %xmm0 #364.9 | |
pandn %xmm7, %xmm2 #364.9 | |
paddd %xmm13, %xmm5 #364.9 | |
movdqa .L_2il0floatpacket.6761(%rip), %xmm13 #364.9 | |
pxor %xmm2, %xmm0 #364.9 | |
paddd %xmm0, %xmm13 #364.9 | |
movdqa %xmm3, %xmm0 #364.9 | |
paddd %xmm13, %xmm5 #364.9 | |
movdqa %xmm3, %xmm13 #364.9 | |
movdqa %xmm10, 3552(%rsp) #360.9 | |
pand %xmm10, %xmm13 #364.9 | |
movdqa %xmm3, %xmm10 #364.9 | |
movdqa %xmm3, %xmm2 #364.9 | |
movdqa %xmm3, %xmm4 #364.9 | |
paddd %xmm14, %xmm5 #364.9 | |
movdqa %xmm14, 3600(%rsp) #363.14 | |
psrld $2, %xmm10 #364.9 | |
pslld $30, %xmm0 #364.9 | |
psrld $13, %xmm2 #364.9 | |
pslld $19, %xmm4 #364.9 | |
movdqa %xmm3, %xmm6 #364.9 | |
movdqa %xmm3, %xmm14 #364.9 | |
por %xmm0, %xmm10 #364.9 | |
movdqa %xmm3, 3584(%rsp) #361.9 | |
por %xmm4, %xmm2 #364.9 | |
psrld $22, %xmm6 #364.9 | |
pslld $10, %xmm14 #364.9 | |
pand %xmm11, %xmm3 #364.9 | |
pxor %xmm2, %xmm10 #364.9 | |
movdqa %xmm13, 3632(%rsp) #364.9 | |
por %xmm14, %xmm6 #364.9 | |
pxor %xmm3, %xmm13 #364.9 | |
pxor %xmm6, %xmm10 #364.9 | |
pxor %xmm1, %xmm13 #364.9 | |
paddd %xmm5, %xmm12 #364.9 | |
paddd %xmm13, %xmm10 #364.9 | |
movdqa %xmm12, %xmm6 #366.9 | |
paddd %xmm10, %xmm5 #364.9 | |
movdqa %xmm12, %xmm10 #366.9 | |
movdqa %xmm5, 3648(%rsp) #364.9 | |
movdqa %xmm12, %xmm5 #366.9 | |
movdqa 3232(%rsp), %xmm3 #365.14 | |
movdqa %xmm12, %xmm13 #366.9 | |
movdqa %xmm11, 3520(%rsp) #359.9 | |
movdqa %xmm3, %xmm11 #365.14 | |
movdqa %xmm3, %xmm0 #365.14 | |
movdqa %xmm3, %xmm1 #365.14 | |
movdqa %xmm3, %xmm2 #365.14 | |
psrld $6, %xmm5 #366.9 | |
pslld $26, %xmm6 #366.9 | |
psrld $11, %xmm10 #366.9 | |
pslld $21, %xmm13 #366.9 | |
psrld $7, %xmm11 #365.14 | |
pslld $25, %xmm0 #365.14 | |
psrld $18, %xmm1 #365.14 | |
pslld $14, %xmm2 #365.14 | |
por %xmm6, %xmm5 #366.9 | |
por %xmm13, %xmm10 #366.9 | |
por %xmm0, %xmm11 #365.14 | |
por %xmm2, %xmm1 #365.14 | |
pxor %xmm10, %xmm5 #366.9 | |
movdqa %xmm12, %xmm14 #366.9 | |
movdqa %xmm12, %xmm10 #366.9 | |
pxor %xmm1, %xmm11 #365.14 | |
psrld $3, %xmm3 #365.14 | |
psrld $25, %xmm14 #366.9 | |
pslld $7, %xmm10 #366.9 | |
pxor %xmm3, %xmm11 #365.14 | |
por %xmm10, %xmm14 #366.9 | |
movdqa %xmm12, %xmm0 #366.9 | |
paddd %xmm11, %xmm15 #365.14 | |
movdqa %xmm12, 3616(%rsp) #364.9 | |
pxor %xmm14, %xmm5 #366.9 | |
movdqa 1152(%rsp), %xmm4 #365.14 | |
pand %xmm9, %xmm0 #366.9 | |
pandn %xmm8, %xmm12 #366.9 | |
paddd %xmm15, %xmm4 #365.14 | |
paddd %xmm5, %xmm7 #366.9 | |
pxor %xmm12, %xmm0 #366.9 | |
movdqa %xmm8, 3536(%rsp) #360.9 | |
movdqa %xmm9, 3568(%rsp) #361.9 | |
movdqa %xmm4, 3664(%rsp) #365.14 | |
movdqa %xmm7, 3680(%rsp) #366.9 | |
movdqa %xmm0, 3696(%rsp) #366.9 | |
# LOE | |
..B2.13: # Preds ..B2.14 | |
movdqa 3696(%rsp), %xmm2 #366.9 | |
movdqa 3680(%rsp), %xmm5 #366.9 | |
paddd .L_2il0floatpacket.6762(%rip), %xmm2 #366.9 | |
paddd %xmm2, %xmm5 #366.9 | |
movdqa 3648(%rsp), %xmm2 #366.9 | |
movdqa %xmm2, %xmm1 #366.9 | |
movdqa %xmm2, %xmm9 #366.9 | |
movdqa %xmm2, %xmm10 #366.9 | |
movdqa %xmm2, %xmm6 #366.9 | |
psrld $2, %xmm1 #366.9 | |
pslld $30, %xmm9 #366.9 | |
psrld $13, %xmm10 #366.9 | |
pslld $19, %xmm6 #366.9 | |
movdqa %xmm2, %xmm3 #366.9 | |
movdqa %xmm2, %xmm4 #366.9 | |
movdqa 3664(%rsp), %xmm0 #366.9 | |
por %xmm9, %xmm1 #366.9 | |
por %xmm6, %xmm10 #366.9 | |
psrld $22, %xmm3 #366.9 | |
pslld $10, %xmm4 #366.9 | |
movdqa %xmm0, %xmm8 #366.9 | |
movdqa 3584(%rsp), %xmm12 #366.9 | |
movdqa %xmm2, %xmm11 #366.9 | |
pxor %xmm10, %xmm1 #366.9 | |
por %xmm4, %xmm3 #366.9 | |
movdqa 3600(%rsp), %xmm4 #367.14 | |
paddd %xmm5, %xmm8 #366.9 | |
movdqa 3552(%rsp), %xmm9 #366.9 | |
pand %xmm12, %xmm11 #366.9 | |
pxor %xmm3, %xmm1 #366.9 | |
movdqa %xmm2, %xmm15 #366.9 | |
movdqa %xmm4, %xmm5 #367.14 | |
movdqa %xmm4, %xmm10 #367.14 | |
movdqa %xmm4, %xmm6 #367.14 | |
movdqa %xmm4, %xmm3 #367.14 | |
pand %xmm9, %xmm15 #366.9 | |
movdqa %xmm11, %xmm14 #366.9 | |
psrld $17, %xmm5 #367.14 | |
pslld $15, %xmm10 #367.14 | |
psrld $19, %xmm6 #367.14 | |
pslld $13, %xmm3 #367.14 | |
movdqa 3632(%rsp), %xmm13 #366.9 | |
pxor %xmm15, %xmm14 #366.9 | |
por %xmm10, %xmm5 #367.14 | |
por %xmm3, %xmm6 #367.14 | |
pxor %xmm14, %xmm13 #366.9 | |
pxor %xmm6, %xmm5 #367.14 | |
psrld $10, %xmm4 #367.14 | |
paddd %xmm13, %xmm1 #366.9 | |
movdqa 3520(%rsp), %xmm7 #366.9 | |
pxor %xmm4, %xmm5 #367.14 | |
movdqa 880(%rsp), %xmm14 #367.14 | |
paddd %xmm8, %xmm7 #366.9 | |
movdqa 3248(%rsp), %xmm3 #367.14 | |
paddd %xmm1, %xmm8 #366.9 | |
paddd %xmm5, %xmm14 #367.14 | |
movdqa %xmm3, %xmm15 #367.14 | |
movdqa %xmm3, %xmm5 #367.14 | |
movdqa %xmm3, %xmm1 #367.14 | |
movdqa %xmm3, %xmm10 #367.14 | |
psrld $7, %xmm15 #367.14 | |
pslld $25, %xmm5 #367.14 | |
psrld $18, %xmm1 #367.14 | |
pslld $14, %xmm10 #367.14 | |
por %xmm5, %xmm15 #367.14 | |
por %xmm10, %xmm1 #367.14 | |
movdqa %xmm3, %xmm6 #367.14 | |
pxor %xmm1, %xmm15 #367.14 | |
psrld $3, %xmm6 #367.14 | |
movdqa 3232(%rsp), %xmm4 #367.14 | |
pxor %xmm6, %xmm15 #367.14 | |
paddd %xmm15, %xmm4 #367.14 | |
movdqa %xmm7, %xmm13 #368.9 | |
movdqa %xmm7, %xmm5 #368.9 | |
movdqa %xmm7, %xmm1 #368.9 | |
movdqa %xmm7, %xmm10 #368.9 | |
paddd %xmm4, %xmm14 #367.14 | |
psrld $6, %xmm13 #368.9 | |
pslld $26, %xmm5 #368.9 | |
psrld $11, %xmm1 #368.9 | |
pslld $21, %xmm10 #368.9 | |
movdqa %xmm7, %xmm6 #368.9 | |
movdqa %xmm7, %xmm4 #368.9 | |
por %xmm5, %xmm13 #368.9 | |
por %xmm10, %xmm1 #368.9 | |
psrld $25, %xmm6 #368.9 | |
pslld $7, %xmm4 #368.9 | |
pxor %xmm1, %xmm13 #368.9 | |
por %xmm4, %xmm6 #368.9 | |
movdqa 3616(%rsp), %xmm1 #368.9 | |
pxor %xmm6, %xmm13 #368.9 | |
movdqa 3568(%rsp), %xmm6 #368.9 | |
movdqa %xmm1, %xmm15 #368.9 | |
movdqa %xmm7, %xmm5 #368.9 | |
pand %xmm7, %xmm15 #368.9 | |
pandn %xmm6, %xmm5 #368.9 | |
movdqa .L_2il0floatpacket.6763(%rip), %xmm4 #368.9 | |
pxor %xmm5, %xmm15 #368.9 | |
movdqa 3536(%rsp), %xmm10 #368.9 | |
paddd %xmm15, %xmm4 #368.9 | |
movdqa %xmm8, %xmm5 #368.9 | |
movdqa %xmm8, %xmm15 #368.9 | |
paddd %xmm13, %xmm10 #368.9 | |
psrld $2, %xmm5 #368.9 | |
pslld $30, %xmm15 #368.9 | |
paddd %xmm4, %xmm10 #368.9 | |
por %xmm15, %xmm5 #368.9 | |
movdqa %xmm8, %xmm15 #368.9 | |
movdqa %xmm8, %xmm4 #368.9 | |
psrld $13, %xmm15 #368.9 | |
pslld $19, %xmm4 #368.9 | |
movdqa %xmm2, %xmm13 #368.9 | |
por %xmm4, %xmm15 #368.9 | |
movdqa %xmm8, %xmm4 #368.9 | |
pxor %xmm15, %xmm5 #368.9 | |
movdqa %xmm8, %xmm15 #368.9 | |
psrld $22, %xmm4 #368.9 | |
pslld $10, %xmm15 #368.9 | |
pand %xmm8, %xmm13 #368.9 | |
por %xmm15, %xmm4 #368.9 | |
movdqa %xmm12, %xmm15 #368.9 | |
pxor %xmm4, %xmm5 #368.9 | |
pand %xmm8, %xmm15 #368.9 | |
movdqa %xmm13, %xmm4 #368.9 | |
pxor %xmm15, %xmm4 #368.9 | |
paddd %xmm14, %xmm10 #368.9 | |
pxor %xmm11, %xmm4 #368.9 | |
paddd %xmm10, %xmm9 #368.9 | |
paddd %xmm4, %xmm5 #368.9 | |
movdqa %xmm0, %xmm11 #369.14 | |
paddd %xmm5, %xmm10 #368.9 | |
movdqa %xmm0, %xmm5 #369.14 | |
movdqa %xmm0, %xmm4 #369.14 | |
movdqa %xmm0, %xmm15 #369.14 | |
psrld $17, %xmm5 #369.14 | |
pslld $15, %xmm11 #369.14 | |
psrld $19, %xmm4 #369.14 | |
pslld $13, %xmm15 #369.14 | |
por %xmm11, %xmm5 #369.14 | |
por %xmm15, %xmm4 #369.14 | |
pxor %xmm4, %xmm5 #369.14 | |
psrld $10, %xmm0 #369.14 | |
movdqa 896(%rsp), %xmm11 #369.14 | |
pxor %xmm0, %xmm5 #369.14 | |
movdqa 3264(%rsp), %xmm15 #369.14 | |
paddd %xmm5, %xmm11 #369.14 | |
movdqa %xmm15, %xmm5 #369.14 | |
movdqa %xmm15, %xmm4 #369.14 | |
psrld $7, %xmm5 #369.14 | |
pslld $25, %xmm4 #369.14 | |
por %xmm4, %xmm5 #369.14 | |
movdqa %xmm15, %xmm0 #369.14 | |
movdqa %xmm15, %xmm4 #369.14 | |
psrld $18, %xmm0 #369.14 | |
pslld $14, %xmm4 #369.14 | |
por %xmm4, %xmm0 #369.14 | |
movdqa %xmm9, %xmm4 #370.9 | |
pxor %xmm0, %xmm5 #369.14 | |
movdqa %xmm15, %xmm0 #369.14 | |
psrld $3, %xmm0 #369.14 | |
psrld $11, %xmm4 #370.9 | |
pxor %xmm0, %xmm5 #369.14 | |
movdqa %xmm9, %xmm0 #370.9 | |
paddd %xmm5, %xmm3 #369.14 | |
movdqa %xmm9, %xmm5 #370.9 | |
psrld $6, %xmm5 #370.9 | |
pslld $26, %xmm0 #370.9 | |
por %xmm0, %xmm5 #370.9 | |
movdqa %xmm9, %xmm0 #370.9 | |
pslld $21, %xmm0 #370.9 | |
paddd %xmm3, %xmm11 #369.14 | |
por %xmm0, %xmm4 #370.9 | |
movdqa %xmm9, %xmm3 #370.9 | |
movdqa %xmm9, %xmm0 #370.9 | |
psrld $25, %xmm3 #370.9 | |
pslld $7, %xmm0 #370.9 | |
pxor %xmm4, %xmm5 #370.9 | |
por %xmm0, %xmm3 #370.9 | |
movdqa %xmm9, %xmm0 #370.9 | |
pxor %xmm3, %xmm5 #370.9 | |
movdqa %xmm9, %xmm3 #370.9 | |
pand %xmm7, %xmm0 #370.9 | |
pandn %xmm1, %xmm3 #370.9 | |
movdqa .L_2il0floatpacket.6764(%rip), %xmm4 #370.9 | |
pxor %xmm3, %xmm0 #370.9 | |
paddd %xmm0, %xmm4 #370.9 | |
movdqa %xmm10, %xmm3 #370.9 | |
movdqa %xmm10, %xmm0 #370.9 | |
paddd %xmm5, %xmm6 #370.9 | |
psrld $2, %xmm3 #370.9 | |
pslld $30, %xmm0 #370.9 | |
paddd %xmm4, %xmm6 #370.9 | |
por %xmm0, %xmm3 #370.9 | |
movdqa %xmm10, %xmm0 #370.9 | |
movdqa %xmm10, %xmm4 #370.9 | |
psrld $13, %xmm0 #370.9 | |
pslld $19, %xmm4 #370.9 | |
por %xmm4, %xmm0 #370.9 | |
movdqa %xmm10, %xmm4 #370.9 | |
pxor %xmm0, %xmm3 #370.9 | |
movdqa %xmm10, %xmm0 #370.9 | |
movdqa %xmm10, %xmm5 #370.9 | |
psrld $22, %xmm0 #370.9 | |
pslld $10, %xmm4 #370.9 | |
pand %xmm8, %xmm5 #370.9 | |
por %xmm4, %xmm0 #370.9 | |
movdqa %xmm2, %xmm4 #370.9 | |
pxor %xmm0, %xmm3 #370.9 | |
pand %xmm10, %xmm4 #370.9 | |
movdqa %xmm5, %xmm0 #370.9 | |
paddd %xmm11, %xmm6 #370.9 | |
pxor %xmm4, %xmm0 #370.9 | |
paddd %xmm6, %xmm12 #370.9 | |
pxor %xmm13, %xmm0 #370.9 | |
movdqa %xmm14, %xmm13 #371.14 | |
paddd %xmm0, %xmm3 #370.9 | |
movdqa %xmm14, %xmm0 #371.14 | |
paddd %xmm3, %xmm6 #370.9 | |
movdqa %xmm14, %xmm3 #371.14 | |
movdqa %xmm14, %xmm4 #371.14 | |
psrld $17, %xmm0 #371.14 | |
pslld $15, %xmm13 #371.14 | |
psrld $19, %xmm3 #371.14 | |
pslld $13, %xmm4 #371.14 | |
por %xmm13, %xmm0 #371.14 | |
por %xmm4, %xmm3 #371.14 | |
movdqa %xmm14, 3712(%rsp) #367.14 | |
pxor %xmm3, %xmm0 #371.14 | |
movdqa 3280(%rsp), %xmm4 #371.14 | |
psrld $10, %xmm14 #371.14 | |
pxor %xmm14, %xmm0 #371.14 | |
movdqa %xmm4, %xmm14 #371.14 | |
movdqa %xmm4, %xmm13 #371.14 | |
psrld $7, %xmm14 #371.14 | |
movdqa 912(%rsp), %xmm3 #371.14 | |
pslld $25, %xmm13 #371.14 | |
paddd %xmm0, %xmm3 #371.14 | |
por %xmm13, %xmm14 #371.14 | |
movdqa %xmm4, %xmm0 #371.14 | |
movdqa %xmm4, %xmm13 #371.14 | |
psrld $18, %xmm0 #371.14 | |
pslld $14, %xmm13 #371.14 | |
por %xmm13, %xmm0 #371.14 | |
movdqa %xmm12, %xmm13 #372.9 | |
pxor %xmm0, %xmm14 #371.14 | |
movdqa %xmm4, %xmm0 #371.14 | |
psrld $3, %xmm0 #371.14 | |
psrld $11, %xmm13 #372.9 | |
pxor %xmm0, %xmm14 #371.14 | |
movdqa %xmm12, %xmm0 #372.9 | |
paddd %xmm14, %xmm15 #371.14 | |
movdqa %xmm12, %xmm14 #372.9 | |
paddd %xmm15, %xmm3 #371.14 | |
movdqa %xmm12, %xmm15 #372.9 | |
psrld $6, %xmm0 #372.9 | |
pslld $26, %xmm14 #372.9 | |
pslld $21, %xmm15 #372.9 | |
por %xmm14, %xmm0 #372.9 | |
por %xmm15, %xmm13 #372.9 | |
movdqa %xmm12, %xmm14 #372.9 | |
movdqa %xmm12, %xmm15 #372.9 | |
psrld $25, %xmm14 #372.9 | |
pslld $7, %xmm15 #372.9 | |
pxor %xmm13, %xmm0 #372.9 | |
por %xmm15, %xmm14 #372.9 | |
movdqa %xmm12, %xmm15 #372.9 | |
pxor %xmm14, %xmm0 #372.9 | |
movdqa %xmm12, %xmm14 #372.9 | |
pand %xmm9, %xmm15 #372.9 | |
pandn %xmm7, %xmm14 #372.9 | |
movdqa .L_2il0floatpacket.6765(%rip), %xmm13 #372.9 | |
pxor %xmm14, %xmm15 #372.9 | |
paddd %xmm0, %xmm1 #372.9 | |
paddd %xmm15, %xmm13 #372.9 | |
paddd %xmm13, %xmm1 #372.9 | |
movdqa %xmm6, %xmm13 #372.9 | |
movdqa %xmm6, %xmm14 #372.9 | |
psrld $2, %xmm13 #372.9 | |
pslld $30, %xmm14 #372.9 | |
movdqa %xmm6, %xmm15 #372.9 | |
por %xmm14, %xmm13 #372.9 | |
movdqa %xmm6, %xmm14 #372.9 | |
psrld $13, %xmm14 #372.9 | |
pslld $19, %xmm15 #372.9 | |
por %xmm15, %xmm14 #372.9 | |
movdqa %xmm6, %xmm15 #372.9 | |
pxor %xmm14, %xmm13 #372.9 | |
movdqa %xmm6, %xmm14 #372.9 | |
movdqa %xmm6, %xmm0 #372.9 | |
psrld $22, %xmm15 #372.9 | |
pslld $10, %xmm14 #372.9 | |
pand %xmm10, %xmm0 #372.9 | |
por %xmm14, %xmm15 #372.9 | |
movdqa %xmm6, %xmm14 #372.9 | |
pxor %xmm15, %xmm13 #372.9 | |
pand %xmm8, %xmm14 #372.9 | |
movdqa %xmm0, %xmm15 #372.9 | |
paddd %xmm3, %xmm1 #372.9 | |
pxor %xmm14, %xmm15 #372.9 | |
paddd %xmm1, %xmm2 #372.9 | |
pxor %xmm5, %xmm15 #372.9 | |
movdqa %xmm11, %xmm5 #373.14 | |
paddd %xmm15, %xmm13 #372.9 | |
movdqa %xmm11, %xmm15 #373.14 | |
paddd %xmm13, %xmm1 #372.9 | |
movdqa %xmm11, %xmm14 #373.14 | |
movdqa %xmm11, %xmm13 #373.14 | |
psrld $17, %xmm15 #373.14 | |
pslld $15, %xmm5 #373.14 | |
psrld $19, %xmm14 #373.14 | |
pslld $13, %xmm13 #373.14 | |
por %xmm5, %xmm15 #373.14 | |
por %xmm13, %xmm14 #373.14 | |
movdqa %xmm11, 3728(%rsp) #369.14 | |
pxor %xmm14, %xmm15 #373.14 | |
psrld $10, %xmm11 #373.14 | |
movdqa 928(%rsp), %xmm5 #373.14 | |
pxor %xmm11, %xmm15 #373.14 | |
movdqa 3296(%rsp), %xmm11 #373.14 | |
paddd %xmm15, %xmm5 #373.14 | |
movdqa %xmm11, %xmm15 #373.14 | |
movdqa %xmm11, %xmm13 #373.14 | |
psrld $7, %xmm15 #373.14 | |
pslld $25, %xmm13 #373.14 | |
por %xmm13, %xmm15 #373.14 | |
movdqa %xmm11, %xmm13 #373.14 | |
movdqa %xmm11, %xmm14 #373.14 | |
psrld $18, %xmm13 #373.14 | |
pslld $14, %xmm14 #373.14 | |
por %xmm14, %xmm13 #373.14 | |
movdqa %xmm11, %xmm14 #373.14 | |
pxor %xmm13, %xmm15 #373.14 | |
psrld $3, %xmm14 #373.14 | |
pxor %xmm14, %xmm15 #373.14 | |
movdqa %xmm2, %xmm13 #374.9 | |
paddd %xmm15, %xmm4 #373.14 | |
movdqa %xmm2, %xmm15 #374.9 | |
paddd %xmm4, %xmm5 #373.14 | |
movdqa %xmm2, %xmm4 #374.9 | |
movdqa %xmm2, %xmm14 #374.9 | |
psrld $6, %xmm4 #374.9 | |
pslld $26, %xmm13 #374.9 | |
psrld $11, %xmm15 #374.9 | |
pslld $21, %xmm14 #374.9 | |
por %xmm13, %xmm4 #374.9 | |
por %xmm14, %xmm15 #374.9 | |
movdqa %xmm2, %xmm13 #374.9 | |
pxor %xmm15, %xmm4 #374.9 | |
movdqa %xmm2, %xmm15 #374.9 | |
psrld $25, %xmm13 #374.9 | |
pslld $7, %xmm15 #374.9 | |
por %xmm15, %xmm13 #374.9 | |
movdqa %xmm2, %xmm15 #374.9 | |
movdqa %xmm2, %xmm14 #374.9 | |
pand %xmm12, %xmm15 #374.9 | |
pandn %xmm9, %xmm14 #374.9 | |
pxor %xmm13, %xmm4 #374.9 | |
movdqa .L_2il0floatpacket.6766(%rip), %xmm13 #374.9 | |
pxor %xmm14, %xmm15 #374.9 | |
paddd %xmm15, %xmm13 #374.9 | |
movdqa %xmm1, %xmm15 #374.9 | |
movdqa %xmm1, %xmm14 #374.9 | |
paddd %xmm4, %xmm7 #374.9 | |
psrld $2, %xmm15 #374.9 | |
pslld $30, %xmm14 #374.9 | |
paddd %xmm13, %xmm7 #374.9 | |
por %xmm14, %xmm15 #374.9 | |
movdqa %xmm1, %xmm14 #374.9 | |
movdqa %xmm1, %xmm13 #374.9 | |
psrld $13, %xmm14 #374.9 | |
pslld $19, %xmm13 #374.9 | |
por %xmm13, %xmm14 #374.9 | |
movdqa %xmm1, %xmm13 #374.9 | |
pxor %xmm14, %xmm15 #374.9 | |
movdqa %xmm1, %xmm14 #374.9 | |
movdqa %xmm1, %xmm4 #374.9 | |
psrld $22, %xmm13 #374.9 | |
pslld $10, %xmm14 #374.9 | |
pand %xmm6, %xmm4 #374.9 | |
por %xmm14, %xmm13 #374.9 | |
movdqa %xmm1, %xmm14 #374.9 | |
pxor %xmm13, %xmm15 #374.9 | |
pand %xmm10, %xmm14 #374.9 | |
movdqa %xmm4, %xmm13 #374.9 | |
paddd %xmm5, %xmm7 #374.9 | |
pxor %xmm14, %xmm13 #374.9 | |
paddd %xmm7, %xmm8 #374.9 | |
pxor %xmm0, %xmm13 #374.9 | |
movdqa %xmm3, %xmm0 #375.14 | |
paddd %xmm13, %xmm15 #374.9 | |
movdqa %xmm3, %xmm14 #375.14 | |
paddd %xmm15, %xmm7 #374.9 | |
movdqa %xmm3, %xmm15 #375.14 | |
movdqa %xmm3, %xmm13 #375.14 | |
psrld $17, %xmm15 #375.14 | |
pslld $15, %xmm0 #375.14 | |
psrld $19, %xmm14 #375.14 | |
pslld $13, %xmm13 #375.14 | |
por %xmm0, %xmm15 #375.14 | |
por %xmm13, %xmm14 #375.14 | |
movdqa %xmm3, 3744(%rsp) #371.14 | |
pxor %xmm14, %xmm15 #375.14 | |
psrld $10, %xmm3 #375.14 | |
movdqa 944(%rsp), %xmm0 #375.14 | |
pxor %xmm3, %xmm15 #375.14 | |
paddd %xmm15, %xmm0 #375.14 | |
movdqa 3312(%rsp), %xmm15 #375.14 | |
movdqa %xmm15, %xmm3 #375.14 | |
movdqa %xmm15, %xmm13 #375.14 | |
psrld $7, %xmm3 #375.14 | |
pslld $25, %xmm13 #375.14 | |
por %xmm13, %xmm3 #375.14 | |
movdqa %xmm15, %xmm14 #375.14 | |
movdqa %xmm15, %xmm13 #375.14 | |
psrld $18, %xmm14 #375.14 | |
pslld $14, %xmm13 #375.14 | |
por %xmm13, %xmm14 #375.14 | |
movdqa %xmm15, %xmm13 #375.14 | |
pxor %xmm14, %xmm3 #375.14 | |
psrld $3, %xmm13 #375.14 | |
pxor %xmm13, %xmm3 #375.14 | |
movdqa %xmm8, %xmm14 #376.9 | |
paddd %xmm3, %xmm11 #375.14 | |
movdqa %xmm8, %xmm3 #376.9 | |
psrld $6, %xmm3 #376.9 | |
pslld $26, %xmm14 #376.9 | |
por %xmm14, %xmm3 #376.9 | |
movdqa %xmm8, %xmm13 #376.9 | |
movdqa %xmm8, %xmm14 #376.9 | |
psrld $11, %xmm13 #376.9 | |
pslld $21, %xmm14 #376.9 | |
paddd %xmm11, %xmm0 #375.14 | |
por %xmm14, %xmm13 #376.9 | |
movdqa %xmm8, %xmm11 #376.9 | |
movdqa %xmm8, %xmm14 #376.9 | |
psrld $25, %xmm11 #376.9 | |
pslld $7, %xmm14 #376.9 | |
pxor %xmm13, %xmm3 #376.9 | |
por %xmm14, %xmm11 #376.9 | |
movdqa %xmm8, %xmm14 #376.9 | |
movdqa %xmm8, %xmm13 #376.9 | |
pand %xmm2, %xmm14 #376.9 | |
pandn %xmm12, %xmm13 #376.9 | |
pxor %xmm11, %xmm3 #376.9 | |
movdqa .L_2il0floatpacket.6767(%rip), %xmm11 #376.9 | |
pxor %xmm13, %xmm14 #376.9 | |
paddd %xmm14, %xmm11 #376.9 | |
movdqa %xmm7, %xmm13 #376.9 | |
movdqa %xmm7, %xmm14 #376.9 | |
paddd %xmm3, %xmm9 #376.9 | |
psrld $2, %xmm13 #376.9 | |
pslld $30, %xmm14 #376.9 | |
paddd %xmm11, %xmm9 #376.9 | |
por %xmm14, %xmm13 #376.9 | |
movdqa %xmm7, %xmm11 #376.9 | |
movdqa %xmm7, %xmm14 #376.9 | |
psrld $13, %xmm11 #376.9 | |
pslld $19, %xmm14 #376.9 | |
por %xmm14, %xmm11 #376.9 | |
movdqa %xmm7, %xmm14 #376.9 | |
pxor %xmm11, %xmm13 #376.9 | |
movdqa %xmm7, %xmm11 #376.9 | |
movdqa %xmm7, %xmm3 #376.9 | |
psrld $22, %xmm14 #376.9 | |
pslld $10, %xmm11 #376.9 | |
pand %xmm1, %xmm3 #376.9 | |
por %xmm11, %xmm14 #376.9 | |
movdqa %xmm7, %xmm11 #376.9 | |
pxor %xmm14, %xmm13 #376.9 | |
pand %xmm6, %xmm11 #376.9 | |
movdqa %xmm3, %xmm14 #376.9 | |
paddd %xmm0, %xmm9 #376.9 | |
pxor %xmm11, %xmm14 #376.9 | |
paddd %xmm9, %xmm10 #376.9 | |
pxor %xmm4, %xmm14 #376.9 | |
movdqa %xmm5, %xmm4 #377.14 | |
paddd %xmm14, %xmm13 #376.9 | |
movdqa %xmm5, %xmm14 #377.14 | |
paddd %xmm13, %xmm9 #376.9 | |
movdqa %xmm5, %xmm11 #377.14 | |
movdqa %xmm5, %xmm13 #377.14 | |
psrld $17, %xmm14 #377.14 | |
pslld $15, %xmm4 #377.14 | |
psrld $19, %xmm11 #377.14 | |
pslld $13, %xmm13 #377.14 | |
por %xmm4, %xmm14 #377.14 | |
por %xmm13, %xmm11 #377.14 | |
movdqa %xmm10, %xmm13 #378.9 | |
movdqa %xmm5, 3760(%rsp) #373.14 | |
pxor %xmm11, %xmm14 #377.14 | |
psrld $10, %xmm5 #377.14 | |
psrld $6, %xmm13 #378.9 | |
movdqa 3600(%rsp), %xmm4 #377.14 | |
pxor %xmm5, %xmm14 #377.14 | |
movdqa 1168(%rsp), %xmm11 #377.14 | |
paddd %xmm14, %xmm4 #377.14 | |
paddd %xmm15, %xmm11 #377.14 | |
movdqa %xmm10, %xmm15 #378.9 | |
paddd %xmm11, %xmm4 #377.14 | |
movdqa %xmm10, %xmm5 #378.9 | |
movdqa %xmm10, %xmm11 #378.9 | |
pslld $26, %xmm15 #378.9 | |
psrld $11, %xmm5 #378.9 | |
pslld $21, %xmm11 #378.9 | |
por %xmm15, %xmm13 #378.9 | |
por %xmm11, %xmm5 #378.9 | |
pxor %xmm5, %xmm13 #378.9 | |
movdqa %xmm10, %xmm14 #378.9 | |
movdqa %xmm10, %xmm5 #378.9 | |
psrld $25, %xmm14 #378.9 | |
pslld $7, %xmm5 #378.9 | |
movdqa %xmm10, %xmm11 #378.9 | |
por %xmm5, %xmm14 #378.9 | |
movdqa %xmm10, %xmm5 #378.9 | |
pand %xmm8, %xmm5 #378.9 | |
pandn %xmm2, %xmm11 #378.9 | |
pxor %xmm14, %xmm13 #378.9 | |
pxor %xmm11, %xmm5 #378.9 | |
movdqa .L_2il0floatpacket.6768(%rip), %xmm11 #378.9 | |
paddd %xmm13, %xmm12 #378.9 | |
paddd %xmm5, %xmm11 #378.9 | |
movdqa %xmm9, %xmm13 #378.9 | |
paddd %xmm11, %xmm12 #378.9 | |
movdqa %xmm9, %xmm11 #378.9 | |
movdqa %xmm4, 3856(%rsp) #377.14 | |
paddd %xmm4, %xmm12 #378.9 | |
movdqa %xmm9, %xmm4 #378.9 | |
movdqa %xmm9, %xmm14 #378.9 | |
psrld $2, %xmm4 #378.9 | |
pslld $30, %xmm11 #378.9 | |
psrld $13, %xmm13 #378.9 | |
pslld $19, %xmm14 #378.9 | |
movdqa %xmm9, %xmm5 #378.9 | |
por %xmm11, %xmm4 #378.9 | |
por %xmm14, %xmm13 #378.9 | |
pand %xmm7, %xmm5 #378.9 | |
pxor %xmm13, %xmm4 #378.9 | |
movdqa %xmm9, %xmm15 #378.9 | |
movdqa %xmm9, %xmm13 #378.9 | |
movdqa %xmm9, %xmm11 #378.9 | |
psrld $22, %xmm15 #378.9 | |
pslld $10, %xmm13 #378.9 | |
pand %xmm1, %xmm11 #378.9 | |
movdqa %xmm5, %xmm14 #378.9 | |
por %xmm13, %xmm15 #378.9 | |
pxor %xmm11, %xmm14 #378.9 | |
pxor %xmm15, %xmm4 #378.9 | |
pxor %xmm3, %xmm14 #378.9 | |
paddd %xmm14, %xmm4 #378.9 | |
paddd %xmm12, %xmm6 #378.9 | |
paddd %xmm4, %xmm12 #378.9 | |
movdqa %xmm0, %xmm3 #379.14 | |
movdqa %xmm0, %xmm4 #379.14 | |
movdqa %xmm0, %xmm11 #379.14 | |
movdqa %xmm0, %xmm13 #379.14 | |
psrld $17, %xmm3 #379.14 | |
pslld $15, %xmm4 #379.14 | |
psrld $19, %xmm11 #379.14 | |
pslld $13, %xmm13 #379.14 | |
por %xmm4, %xmm3 #379.14 | |
por %xmm13, %xmm11 #379.14 | |
movdqa %xmm6, %xmm15 #380.9 | |
movdqa %xmm0, 3808(%rsp) #375.14 | |
pxor %xmm11, %xmm3 #379.14 | |
psrld $10, %xmm0 #379.14 | |
movdqa %xmm6, %xmm4 #380.9 | |
movdqa 3664(%rsp), %xmm14 #379.14 | |
pxor %xmm0, %xmm3 #379.14 | |
movdqa 1184(%rsp), %xmm0 #379.14 | |
paddd %xmm3, %xmm14 #379.14 | |
movdqa %xmm6, %xmm3 #380.9 | |
movdqa %xmm6, %xmm11 #380.9 | |
paddd %xmm14, %xmm0 #379.14 | |
psrld $6, %xmm15 #380.9 | |
pslld $26, %xmm3 #380.9 | |
psrld $11, %xmm4 #380.9 | |
pslld $21, %xmm11 #380.9 | |
movdqa %xmm6, %xmm13 #380.9 | |
movdqa %xmm6, %xmm14 #380.9 | |
por %xmm3, %xmm15 #380.9 | |
por %xmm11, %xmm4 #380.9 | |
psrld $25, %xmm13 #380.9 | |
pslld $7, %xmm14 #380.9 | |
movdqa %xmm6, %xmm3 #380.9 | |
movdqa %xmm6, 3872(%rsp) #378.9 | |
pxor %xmm4, %xmm15 #380.9 | |
por %xmm14, %xmm13 #380.9 | |
pand %xmm10, %xmm3 #380.9 | |
pandn %xmm8, %xmm6 #380.9 | |
pxor %xmm13, %xmm15 #380.9 | |
movdqa %xmm8, 3776(%rsp) #374.9 | |
pxor %xmm6, %xmm3 #380.9 | |
movdqa .L_2il0floatpacket.6769(%rip), %xmm8 #380.9 | |
paddd %xmm15, %xmm2 #380.9 | |
paddd %xmm3, %xmm8 #380.9 | |
movdqa %xmm12, %xmm3 #380.9 | |
paddd %xmm8, %xmm2 #380.9 | |
movdqa %xmm12, %xmm4 #380.9 | |
paddd %xmm0, %xmm2 #380.9 | |
movdqa %xmm12, %xmm6 #380.9 | |
paddd %xmm2, %xmm1 #380.9 | |
pslld $30, %xmm3 #380.9 | |
movdqa %xmm0, 3904(%rsp) #379.14 | |
movdqa %xmm12, %xmm0 #380.9 | |
movdqa %xmm1, 3936(%rsp) #380.9 | |
movdqa %xmm12, %xmm1 #380.9 | |
movdqa %xmm10, 3824(%rsp) #376.9 | |
pand %xmm9, %xmm1 #380.9 | |
movdqa %xmm9, 3840(%rsp) #376.9 | |
psrld $2, %xmm0 #380.9 | |
psrld $13, %xmm4 #380.9 | |
pslld $19, %xmm6 #380.9 | |
movdqa %xmm12, %xmm9 #380.9 | |
movdqa %xmm12, %xmm10 #380.9 | |
movdqa %xmm12, 3888(%rsp) #378.9 | |
por %xmm3, %xmm0 #380.9 | |
por %xmm6, %xmm4 #380.9 | |
psrld $22, %xmm9 #380.9 | |
pslld $10, %xmm10 #380.9 | |
pand %xmm7, %xmm12 #380.9 | |
movdqa %xmm1, 3952(%rsp) #380.9 | |
pxor %xmm4, %xmm0 #380.9 | |
por %xmm10, %xmm9 #380.9 | |
pxor %xmm12, %xmm1 #380.9 | |
pxor %xmm9, %xmm0 #380.9 | |
pxor %xmm5, %xmm1 #380.9 | |
paddd %xmm1, %xmm0 #380.9 | |
movdqa %xmm7, 3792(%rsp) #374.9 | |
movdqa %xmm2, 3920(%rsp) #380.9 | |
movdqa %xmm0, 3968(%rsp) #380.9 | |
# LOE | |
..B2.12: # Preds ..B2.13 | |
movdqa 3856(%rsp), %xmm7 #381.14 | |
movdqa %xmm7, %xmm9 #381.14 | |
movdqa %xmm7, %xmm1 #381.14 | |
psrld $19, %xmm9 #381.14 | |
pslld $13, %xmm1 #381.14 | |
por %xmm1, %xmm9 #381.14 | |
movdqa %xmm7, %xmm14 #381.14 | |
movdqa 3936(%rsp), %xmm1 #382.9 | |
movdqa %xmm7, %xmm0 #381.14 | |
movdqa %xmm1, %xmm6 #382.9 | |
movdqa %xmm1, %xmm4 #382.9 | |
movdqa %xmm1, %xmm5 #382.9 | |
movdqa %xmm1, %xmm3 #382.9 | |
psrld $17, %xmm14 #381.14 | |
pslld $15, %xmm0 #381.14 | |
psrld $6, %xmm6 #382.9 | |
pslld $26, %xmm4 #382.9 | |
psrld $11, %xmm5 #382.9 | |
pslld $21, %xmm3 #382.9 | |
movdqa %xmm1, %xmm13 #382.9 | |
movdqa %xmm1, %xmm12 #382.9 | |
por %xmm0, %xmm14 #381.14 | |
movdqa %xmm7, %xmm11 #381.14 | |
por %xmm4, %xmm6 #382.9 | |
por %xmm3, %xmm5 #382.9 | |
psrld $25, %xmm13 #382.9 | |
pslld $7, %xmm12 #382.9 | |
pxor %xmm9, %xmm14 #381.14 | |
psrld $10, %xmm11 #381.14 | |
pxor %xmm5, %xmm6 #382.9 | |
por %xmm12, %xmm13 #382.9 | |
movdqa 3712(%rsp), %xmm8 #381.14 | |
pxor %xmm11, %xmm14 #381.14 | |
movdqa 3776(%rsp), %xmm10 #382.9 | |
pxor %xmm13, %xmm6 #382.9 | |
movdqa 3920(%rsp), %xmm2 #380.9 | |
paddd %xmm14, %xmm8 #381.14 | |
movdqa 3872(%rsp), %xmm4 #382.9 | |
paddd %xmm6, %xmm10 #382.9 | |
movdqa 3824(%rsp), %xmm6 #382.9 | |
movdqa %xmm1, %xmm14 #382.9 | |
movdqa %xmm1, %xmm0 #382.9 | |
pand %xmm4, %xmm14 #382.9 | |
paddd 3968(%rsp), %xmm2 #380.9 | |
pandn %xmm6, %xmm0 #382.9 | |
movdqa 1200(%rsp), %xmm15 #381.14 | |
pxor %xmm0, %xmm14 #382.9 | |
movdqa .L_2il0floatpacket.6770(%rip), %xmm9 #382.9 | |
paddd %xmm8, %xmm15 #381.14 | |
movdqa %xmm2, %xmm5 #382.9 | |
movdqa %xmm2, %xmm8 #382.9 | |
paddd %xmm14, %xmm9 #382.9 | |
psrld $2, %xmm5 #382.9 | |
pslld $30, %xmm8 #382.9 | |
movdqa %xmm2, %xmm3 #382.9 | |
movdqa %xmm2, %xmm14 #382.9 | |
por %xmm8, %xmm5 #382.9 | |
psrld $13, %xmm3 #382.9 | |
pslld $19, %xmm14 #382.9 | |
movdqa %xmm2, %xmm8 #382.9 | |
movdqa %xmm2, %xmm13 #382.9 | |
por %xmm14, %xmm3 #382.9 | |
psrld $22, %xmm8 #382.9 | |
pslld $10, %xmm13 #382.9 | |
pxor %xmm3, %xmm5 #382.9 | |
movdqa 3888(%rsp), %xmm11 #382.9 | |
por %xmm13, %xmm8 #382.9 | |
movdqa %xmm11, %xmm0 #382.9 | |
pxor %xmm8, %xmm5 #382.9 | |
movdqa 3840(%rsp), %xmm8 #382.9 | |
pand %xmm2, %xmm0 #382.9 | |
movdqa %xmm8, %xmm12 #382.9 | |
movdqa %xmm0, %xmm14 #382.9 | |
pand %xmm2, %xmm12 #382.9 | |
paddd %xmm9, %xmm10 #382.9 | |
movdqa 3952(%rsp), %xmm3 #382.9 | |
pxor %xmm12, %xmm14 #382.9 | |
pxor %xmm14, %xmm3 #382.9 | |
paddd %xmm15, %xmm10 #382.9 | |
movdqa 3792(%rsp), %xmm9 #382.9 | |
paddd %xmm3, %xmm5 #382.9 | |
movdqa 3904(%rsp), %xmm14 #383.15 | |
paddd %xmm10, %xmm9 #382.9 | |
paddd %xmm5, %xmm10 #382.9 | |
movdqa %xmm14, %xmm13 #383.15 | |
movdqa %xmm14, %xmm12 #383.15 | |
movdqa %xmm14, %xmm5 #383.15 | |
movdqa %xmm14, %xmm3 #383.15 | |
psrld $17, %xmm13 #383.15 | |
pslld $15, %xmm12 #383.15 | |
psrld $19, %xmm5 #383.15 | |
pslld $13, %xmm3 #383.15 | |
por %xmm12, %xmm13 #383.15 | |
por %xmm3, %xmm5 #383.15 | |
psrld $10, %xmm14 #383.15 | |
pxor %xmm5, %xmm13 #383.15 | |
movdqa %xmm9, %xmm12 #384.9 | |
movdqa 3728(%rsp), %xmm5 #383.15 | |
pxor %xmm14, %xmm13 #383.15 | |
paddd %xmm13, %xmm5 #383.15 | |
movdqa %xmm9, %xmm3 #384.9 | |
movdqa 1216(%rsp), %xmm13 #383.15 | |
movdqa %xmm9, %xmm14 #384.9 | |
paddd %xmm5, %xmm13 #383.15 | |
movdqa %xmm9, %xmm5 #384.9 | |
psrld $6, %xmm12 #384.9 | |
pslld $26, %xmm3 #384.9 | |
psrld $11, %xmm14 #384.9 | |
pslld $21, %xmm5 #384.9 | |
por %xmm3, %xmm12 #384.9 | |
por %xmm5, %xmm14 #384.9 | |
pxor %xmm14, %xmm12 #384.9 | |
movdqa %xmm9, %xmm3 #384.9 | |
movdqa %xmm9, %xmm14 #384.9 | |
psrld $25, %xmm3 #384.9 | |
pslld $7, %xmm14 #384.9 | |
movdqa %xmm9, %xmm5 #384.9 | |
por %xmm14, %xmm3 #384.9 | |
movdqa %xmm1, %xmm14 #384.9 | |
pand %xmm9, %xmm14 #384.9 | |
pandn %xmm4, %xmm5 #384.9 | |
pxor %xmm3, %xmm12 #384.9 | |
pxor %xmm5, %xmm14 #384.9 | |
movdqa .L_2il0floatpacket.6771(%rip), %xmm3 #384.9 | |
movdqa %xmm10, %xmm5 #384.9 | |
paddd %xmm14, %xmm3 #384.9 | |
movdqa %xmm10, %xmm14 #384.9 | |
paddd %xmm12, %xmm6 #384.9 | |
psrld $2, %xmm5 #384.9 | |
pslld $30, %xmm14 #384.9 | |
paddd %xmm3, %xmm6 #384.9 | |
por %xmm14, %xmm5 #384.9 | |
movdqa %xmm10, %xmm3 #384.9 | |
movdqa %xmm10, %xmm14 #384.9 | |
psrld $13, %xmm3 #384.9 | |
pslld $19, %xmm14 #384.9 | |
movdqa %xmm10, %xmm12 #384.9 | |
por %xmm14, %xmm3 #384.9 | |
movdqa %xmm10, %xmm14 #384.9 | |
pxor %xmm3, %xmm5 #384.9 | |
movdqa %xmm10, %xmm3 #384.9 | |
psrld $22, %xmm14 #384.9 | |
pslld $10, %xmm3 #384.9 | |
pand %xmm2, %xmm12 #384.9 | |
por %xmm3, %xmm14 #384.9 | |
movdqa %xmm11, %xmm3 #384.9 | |
pxor %xmm14, %xmm5 #384.9 | |
pand %xmm10, %xmm3 #384.9 | |
movdqa %xmm12, %xmm14 #384.9 | |
pxor %xmm3, %xmm14 #384.9 | |
movdqa %xmm15, %xmm3 #385.15 | |
pxor %xmm0, %xmm14 #384.9 | |
movdqa %xmm15, %xmm0 #385.15 | |
paddd %xmm14, %xmm5 #384.9 | |
movdqa %xmm15, %xmm14 #385.15 | |
psrld $17, %xmm0 #385.15 | |
pslld $15, %xmm14 #385.15 | |
por %xmm14, %xmm0 #385.15 | |
movdqa %xmm15, %xmm14 #385.15 | |
psrld $19, %xmm3 #385.15 | |
pslld $13, %xmm14 #385.15 | |
por %xmm14, %xmm3 #385.15 | |
paddd %xmm13, %xmm6 #384.9 | |
movdqa %xmm15, 3984(%rsp) #381.14 | |
pxor %xmm3, %xmm0 #385.15 | |
psrld $10, %xmm15 #385.15 | |
paddd %xmm6, %xmm8 #384.9 | |
movdqa 3744(%rsp), %xmm14 #385.15 | |
pxor %xmm15, %xmm0 #385.15 | |
paddd %xmm5, %xmm6 #384.9 | |
paddd %xmm0, %xmm14 #385.15 | |
movdqa 1232(%rsp), %xmm5 #385.15 | |
movdqa %xmm8, %xmm3 #386.9 | |
paddd %xmm14, %xmm5 #385.15 | |
movdqa %xmm8, %xmm14 #386.9 | |
movdqa %xmm8, %xmm0 #386.9 | |
movdqa %xmm8, %xmm15 #386.9 | |
psrld $6, %xmm14 #386.9 | |
pslld $26, %xmm3 #386.9 | |
psrld $11, %xmm0 #386.9 | |
pslld $21, %xmm15 #386.9 | |
por %xmm3, %xmm14 #386.9 | |
por %xmm15, %xmm0 #386.9 | |
pxor %xmm0, %xmm14 #386.9 | |
movdqa %xmm8, %xmm3 #386.9 | |
movdqa %xmm8, %xmm0 #386.9 | |
psrld $25, %xmm3 #386.9 | |
pslld $7, %xmm0 #386.9 | |
movdqa %xmm8, %xmm15 #386.9 | |
por %xmm0, %xmm3 #386.9 | |
movdqa %xmm8, %xmm0 #386.9 | |
pand %xmm9, %xmm0 #386.9 | |
pandn %xmm1, %xmm15 #386.9 | |
pxor %xmm3, %xmm14 #386.9 | |
pxor %xmm15, %xmm0 #386.9 | |
movdqa .L_2il0floatpacket.6772(%rip), %xmm3 #386.9 | |
movdqa %xmm6, %xmm15 #386.9 | |
paddd %xmm0, %xmm3 #386.9 | |
movdqa %xmm6, %xmm0 #386.9 | |
paddd %xmm14, %xmm4 #386.9 | |
psrld $2, %xmm15 #386.9 | |
pslld $30, %xmm0 #386.9 | |
paddd %xmm3, %xmm4 #386.9 | |
por %xmm0, %xmm15 #386.9 | |
movdqa %xmm6, %xmm0 #386.9 | |
movdqa %xmm6, %xmm3 #386.9 | |
psrld $13, %xmm0 #386.9 | |
pslld $19, %xmm3 #386.9 | |
movdqa %xmm6, %xmm14 #386.9 | |
por %xmm3, %xmm0 #386.9 | |
movdqa %xmm6, %xmm3 #386.9 | |
pxor %xmm0, %xmm15 #386.9 | |
movdqa %xmm6, %xmm0 #386.9 | |
psrld $22, %xmm0 #386.9 | |
pslld $10, %xmm3 #386.9 | |
pand %xmm10, %xmm14 #386.9 | |
por %xmm3, %xmm0 #386.9 | |
movdqa %xmm6, %xmm3 #386.9 | |
pxor %xmm0, %xmm15 #386.9 | |
pand %xmm2, %xmm3 #386.9 | |
movdqa %xmm14, %xmm0 #386.9 | |
pxor %xmm3, %xmm0 #386.9 | |
paddd %xmm5, %xmm4 #386.9 | |
pxor %xmm12, %xmm0 #386.9 | |
paddd %xmm4, %xmm11 #386.9 | |
paddd %xmm0, %xmm15 #386.9 | |
movdqa %xmm13, %xmm3 #387.15 | |
paddd %xmm15, %xmm4 #386.9 | |
movdqa %xmm13, %xmm12 #387.15 | |
movdqa %xmm13, %xmm0 #387.15 | |
movdqa %xmm13, %xmm15 #387.15 | |
psrld $17, %xmm3 #387.15 | |
pslld $15, %xmm12 #387.15 | |
psrld $19, %xmm0 #387.15 | |
pslld $13, %xmm15 #387.15 | |
por %xmm12, %xmm3 #387.15 | |
por %xmm15, %xmm0 #387.15 | |
movdqa %xmm13, 4000(%rsp) #383.15 | |
pxor %xmm0, %xmm3 #387.15 | |
psrld $10, %xmm13 #387.15 | |
movdqa %xmm11, %xmm15 #388.9 | |
movdqa 3760(%rsp), %xmm0 #387.15 | |
pxor %xmm13, %xmm3 #387.15 | |
paddd %xmm3, %xmm0 #387.15 | |
movdqa %xmm11, %xmm12 #388.9 | |
movdqa 1248(%rsp), %xmm3 #387.15 | |
movdqa %xmm11, %xmm13 #388.9 | |
paddd %xmm0, %xmm3 #387.15 | |
movdqa %xmm11, %xmm0 #388.9 | |
psrld $6, %xmm15 #388.9 | |
pslld $26, %xmm12 #388.9 | |
psrld $11, %xmm0 #388.9 | |
pslld $21, %xmm13 #388.9 | |
por %xmm12, %xmm15 #388.9 | |
por %xmm13, %xmm0 #388.9 | |
pxor %xmm0, %xmm15 #388.9 | |
movdqa %xmm11, %xmm12 #388.9 | |
movdqa %xmm11, %xmm0 #388.9 | |
psrld $25, %xmm12 #388.9 | |
pslld $7, %xmm0 #388.9 | |
movdqa %xmm11, %xmm13 #388.9 | |
por %xmm0, %xmm12 #388.9 | |
movdqa %xmm11, %xmm0 #388.9 | |
pand %xmm8, %xmm0 #388.9 | |
pandn %xmm9, %xmm13 #388.9 | |
pxor %xmm12, %xmm15 #388.9 | |
pxor %xmm13, %xmm0 #388.9 | |
movdqa .L_2il0floatpacket.6773(%rip), %xmm12 #388.9 | |
paddd %xmm15, %xmm1 #388.9 | |
paddd %xmm0, %xmm12 #388.9 | |
movdqa %xmm4, %xmm13 #388.9 | |
paddd %xmm12, %xmm1 #388.9 | |
movdqa %xmm4, %xmm12 #388.9 | |
psrld $2, %xmm12 #388.9 | |
pslld $30, %xmm13 #388.9 | |
por %xmm13, %xmm12 #388.9 | |
movdqa %xmm4, %xmm13 #388.9 | |
movdqa %xmm4, %xmm0 #388.9 | |
psrld $13, %xmm13 #388.9 | |
pslld $19, %xmm0 #388.9 | |
movdqa %xmm4, %xmm15 #388.9 | |
por %xmm0, %xmm13 #388.9 | |
movdqa %xmm4, %xmm0 #388.9 | |
pxor %xmm13, %xmm12 #388.9 | |
movdqa %xmm4, %xmm13 #388.9 | |
psrld $22, %xmm0 #388.9 | |
pslld $10, %xmm13 #388.9 | |
pand %xmm6, %xmm15 #388.9 | |
por %xmm13, %xmm0 #388.9 | |
movdqa %xmm4, %xmm13 #388.9 | |
pxor %xmm0, %xmm12 #388.9 | |
pand %xmm10, %xmm13 #388.9 | |
movdqa %xmm15, %xmm0 #388.9 | |
pxor %xmm13, %xmm0 #388.9 | |
movdqa %xmm5, %xmm13 #389.15 | |
pxor %xmm14, %xmm0 #388.9 | |
movdqa %xmm5, %xmm14 #389.15 | |
paddd %xmm0, %xmm12 #388.9 | |
movdqa %xmm5, %xmm0 #389.15 | |
psrld $17, %xmm14 #389.15 | |
pslld $15, %xmm0 #389.15 | |
por %xmm0, %xmm14 #389.15 | |
movdqa %xmm5, %xmm0 #389.15 | |
psrld $19, %xmm13 #389.15 | |
pslld $13, %xmm0 #389.15 | |
por %xmm0, %xmm13 #389.15 | |
paddd %xmm3, %xmm1 #388.9 | |
movdqa %xmm5, 4016(%rsp) #385.15 | |
pxor %xmm13, %xmm14 #389.15 | |
psrld $10, %xmm5 #389.15 | |
paddd %xmm1, %xmm2 #388.9 | |
movdqa 3808(%rsp), %xmm13 #389.15 | |
pxor %xmm5, %xmm14 #389.15 | |
movdqa 1264(%rsp), %xmm0 #389.15 | |
paddd %xmm14, %xmm13 #389.15 | |
paddd %xmm12, %xmm1 #388.9 | |
paddd %xmm13, %xmm0 #389.15 | |
movdqa %xmm2, %xmm5 #390.9 | |
movdqa %xmm2, %xmm12 #390.9 | |
movdqa %xmm2, %xmm14 #390.9 | |
movdqa %xmm2, %xmm13 #390.9 | |
psrld $6, %xmm5 #390.9 | |
pslld $26, %xmm12 #390.9 | |
psrld $11, %xmm14 #390.9 | |
pslld $21, %xmm13 #390.9 | |
por %xmm12, %xmm5 #390.9 | |
por %xmm13, %xmm14 #390.9 | |
pxor %xmm14, %xmm5 #390.9 | |
movdqa %xmm2, %xmm12 #390.9 | |
movdqa %xmm2, %xmm14 #390.9 | |
psrld $25, %xmm12 #390.9 | |
pslld $7, %xmm14 #390.9 | |
movdqa %xmm2, %xmm13 #390.9 | |
por %xmm14, %xmm12 #390.9 | |
movdqa %xmm2, %xmm14 #390.9 | |
pand %xmm11, %xmm14 #390.9 | |
pandn %xmm8, %xmm13 #390.9 | |
pxor %xmm12, %xmm5 #390.9 | |
pxor %xmm13, %xmm14 #390.9 | |
movdqa .L_2il0floatpacket.6774(%rip), %xmm12 #390.9 | |
movdqa %xmm1, %xmm13 #390.9 | |
paddd %xmm14, %xmm12 #390.9 | |
movdqa %xmm1, %xmm14 #390.9 | |
paddd %xmm5, %xmm9 #390.9 | |
psrld $2, %xmm13 #390.9 | |
pslld $30, %xmm14 #390.9 | |
paddd %xmm12, %xmm9 #390.9 | |
por %xmm14, %xmm13 #390.9 | |
movdqa %xmm1, %xmm14 #390.9 | |
movdqa %xmm1, %xmm12 #390.9 | |
psrld $13, %xmm14 #390.9 | |
pslld $19, %xmm12 #390.9 | |
movdqa %xmm1, %xmm5 #390.9 | |
por %xmm12, %xmm14 #390.9 | |
movdqa %xmm1, %xmm12 #390.9 | |
pxor %xmm14, %xmm13 #390.9 | |
movdqa %xmm1, %xmm14 #390.9 | |
psrld $22, %xmm14 #390.9 | |
pslld $10, %xmm12 #390.9 | |
pand %xmm4, %xmm5 #390.9 | |
por %xmm12, %xmm14 #390.9 | |
movdqa %xmm1, %xmm12 #390.9 | |
pxor %xmm14, %xmm13 #390.9 | |
pand %xmm6, %xmm12 #390.9 | |
movdqa %xmm5, %xmm14 #390.9 | |
pxor %xmm12, %xmm14 #390.9 | |
paddd %xmm0, %xmm9 #390.9 | |
pxor %xmm15, %xmm14 #390.9 | |
paddd %xmm9, %xmm10 #390.9 | |
paddd %xmm14, %xmm13 #390.9 | |
movdqa %xmm3, %xmm14 #391.15 | |
paddd %xmm13, %xmm9 #390.9 | |
movdqa %xmm3, %xmm15 #391.15 | |
movdqa %xmm3, %xmm13 #391.15 | |
movdqa %xmm3, %xmm12 #391.15 | |
psrld $17, %xmm14 #391.15 | |
pslld $15, %xmm15 #391.15 | |
psrld $19, %xmm13 #391.15 | |
pslld $13, %xmm12 #391.15 | |
por %xmm15, %xmm14 #391.15 | |
por %xmm12, %xmm13 #391.15 | |
movdqa %xmm3, 4032(%rsp) #387.15 | |
pxor %xmm13, %xmm14 #391.15 | |
psrld $10, %xmm3 #391.15 | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 30, w14); | |
movdqa %xmm10, %xmm15 #392.9 | |
pxor %xmm3, %xmm14 #391.15 | |
pslld $26, %xmm15 #392.9 | |
paddd %xmm14, %xmm7 #391.15 | |
movdqa %xmm10, %xmm14 #392.9 | |
movdqa 1280(%rsp), %xmm3 #391.15 | |
psrld $6, %xmm14 #392.9 | |
movdqa %xmm10, %xmm13 #392.9 | |
movdqa %xmm10, %xmm12 #392.9 | |
paddd %xmm7, %xmm3 #391.15 | |
por %xmm15, %xmm14 #392.9 | |
psrld $11, %xmm13 #392.9 | |
pslld $21, %xmm12 #392.9 | |
movdqa %xmm10, %xmm7 #392.9 | |
movdqa %xmm10, %xmm15 #392.9 | |
por %xmm12, %xmm13 #392.9 | |
psrld $25, %xmm7 #392.9 | |
pslld $7, %xmm15 #392.9 | |
pxor %xmm13, %xmm14 #392.9 | |
por %xmm15, %xmm7 #392.9 | |
movdqa %xmm10, %xmm12 #392.9 | |
pxor %xmm7, %xmm14 #392.9 | |
movdqa %xmm10, %xmm7 #392.9 | |
pand %xmm2, %xmm7 #392.9 | |
pandn %xmm11, %xmm12 #392.9 | |
paddd %xmm14, %xmm8 #392.9 | |
pxor %xmm12, %xmm7 #392.9 | |
movdqa .L_2il0floatpacket.6775(%rip), %xmm14 #392.9 | |
movdqa %xmm9, %xmm15 #392.9 | |
paddd %xmm7, %xmm14 #392.9 | |
movdqa %xmm9, %xmm13 #392.9 | |
paddd %xmm14, %xmm8 #392.9 | |
movdqa %xmm9, %xmm14 #392.9 | |
movdqa %xmm9, %xmm12 #392.9 | |
psrld $2, %xmm15 #392.9 | |
pslld $30, %xmm13 #392.9 | |
psrld $13, %xmm14 #392.9 | |
pslld $19, %xmm12 #392.9 | |
por %xmm13, %xmm15 #392.9 | |
por %xmm12, %xmm14 #392.9 | |
movdqa %xmm9, %xmm7 #392.9 | |
pxor %xmm14, %xmm15 #392.9 | |
movdqa %xmm9, %xmm13 #392.9 | |
movdqa %xmm9, %xmm14 #392.9 | |
pand %xmm1, %xmm7 #392.9 | |
psrld $22, %xmm13 #392.9 | |
pslld $10, %xmm14 #392.9 | |
movdqa %xmm9, %xmm12 #392.9 | |
por %xmm14, %xmm13 #392.9 | |
pand %xmm4, %xmm12 #392.9 | |
movdqa %xmm7, %xmm14 #392.9 | |
pxor %xmm12, %xmm14 #392.9 | |
pxor %xmm13, %xmm15 #392.9 | |
pxor %xmm5, %xmm14 #392.9 | |
paddd %xmm3, %xmm8 #392.9 | |
paddd %xmm14, %xmm15 #392.9 | |
paddd %xmm8, %xmm6 #392.9 | |
paddd %xmm15, %xmm8 #392.9 | |
### w15 = add4(SIGMA1_256(w13), w8, SIGMA0_256(w0), w15); | |
movdqa %xmm0, %xmm5 #393.15 | |
movdqa %xmm0, %xmm14 #393.15 | |
movdqa %xmm0, %xmm15 #393.15 | |
movdqa %xmm0, %xmm13 #393.15 | |
psrld $17, %xmm5 #393.15 | |
pslld $15, %xmm14 #393.15 | |
psrld $19, %xmm15 #393.15 | |
pslld $13, %xmm13 #393.15 | |
por %xmm14, %xmm5 #393.15 | |
por %xmm13, %xmm15 #393.15 | |
movdqa %xmm0, 4048(%rsp) #389.15 | |
pxor %xmm15, %xmm5 #393.15 | |
psrld $10, %xmm0 #393.15 | |
movdqa 3904(%rsp), %xmm14 #393.15 | |
pxor %xmm0, %xmm5 #393.15 | |
movdqa 3600(%rsp), %xmm15 #393.15 | |
paddd %xmm5, %xmm14 #393.15 | |
movdqa %xmm15, %xmm0 #393.15 | |
movdqa %xmm15, %xmm12 #393.15 | |
movdqa %xmm15, %xmm5 #393.15 | |
movdqa %xmm15, %xmm13 #393.15 | |
psrld $7, %xmm0 #393.15 | |
pslld $25, %xmm12 #393.15 | |
psrld $18, %xmm5 #393.15 | |
pslld $14, %xmm13 #393.15 | |
por %xmm12, %xmm0 #393.15 | |
por %xmm13, %xmm5 #393.15 | |
movdqa %xmm15, %xmm12 #393.15 | |
pxor %xmm5, %xmm0 #393.15 | |
psrld $3, %xmm12 #393.15 | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 31, w15); | |
movdqa %xmm6, %xmm13 #394.9 | |
movdqa 944(%rsp), %xmm5 #393.15 | |
pxor %xmm12, %xmm0 #393.15 | |
paddd %xmm0, %xmm5 #393.15 | |
movdqa %xmm6, %xmm0 #394.9 | |
paddd %xmm5, %xmm14 #393.15 | |
movdqa %xmm6, %xmm5 #394.9 | |
movdqa %xmm6, %xmm12 #394.9 | |
psrld $6, %xmm0 #394.9 | |
pslld $26, %xmm13 #394.9 | |
psrld $11, %xmm5 #394.9 | |
pslld $21, %xmm12 #394.9 | |
por %xmm13, %xmm0 #394.9 | |
por %xmm12, %xmm5 #394.9 | |
movdqa %xmm6, %xmm13 #394.9 | |
pxor %xmm5, %xmm0 #394.9 | |
movdqa %xmm6, %xmm5 #394.9 | |
psrld $25, %xmm13 #394.9 | |
pslld $7, %xmm5 #394.9 | |
por %xmm5, %xmm13 #394.9 | |
movdqa %xmm6, %xmm5 #394.9 | |
pxor %xmm13, %xmm0 #394.9 | |
movdqa %xmm6, %xmm13 #394.9 | |
pand %xmm10, %xmm5 #394.9 | |
pandn %xmm2, %xmm13 #394.9 | |
movdqa .L_2il0floatpacket.6776(%rip), %xmm12 #394.9 | |
pxor %xmm13, %xmm5 #394.9 | |
paddd %xmm5, %xmm12 #394.9 | |
movdqa %xmm8, %xmm13 #394.9 | |
movdqa %xmm8, %xmm5 #394.9 | |
paddd %xmm0, %xmm11 #394.9 | |
psrld $2, %xmm13 #394.9 | |
pslld $30, %xmm5 #394.9 | |
paddd %xmm12, %xmm11 #394.9 | |
por %xmm5, %xmm13 #394.9 | |
movdqa %xmm8, %xmm5 #394.9 | |
movdqa %xmm8, %xmm12 #394.9 | |
psrld $13, %xmm5 #394.9 | |
pslld $19, %xmm12 #394.9 | |
por %xmm12, %xmm5 #394.9 | |
movdqa %xmm8, %xmm12 #394.9 | |
pxor %xmm5, %xmm13 #394.9 | |
movdqa %xmm8, %xmm5 #394.9 | |
movdqa %xmm8, %xmm0 #394.9 | |
psrld $22, %xmm12 #394.9 | |
pslld $10, %xmm5 #394.9 | |
pand %xmm9, %xmm0 #394.9 | |
por %xmm5, %xmm12 #394.9 | |
movdqa %xmm8, %xmm5 #394.9 | |
pxor %xmm12, %xmm13 #394.9 | |
pand %xmm1, %xmm5 #394.9 | |
movdqa %xmm0, %xmm12 #394.9 | |
paddd %xmm14, %xmm11 #394.9 | |
pxor %xmm5, %xmm12 #394.9 | |
### | |
### w0 = add4(SIGMA1_256(w14), w9, SIGMA0_256(w1), w0); | |
movdqa %xmm3, %xmm5 #396.14 | |
pxor %xmm7, %xmm12 #394.9 | |
movdqa %xmm3, %xmm7 #396.14 | |
psrld $17, %xmm7 #396.14 | |
pslld $15, %xmm5 #396.14 | |
paddd %xmm12, %xmm13 #394.9 | |
por %xmm5, %xmm7 #396.14 | |
movdqa %xmm3, %xmm12 #396.14 | |
movdqa %xmm3, %xmm5 #396.14 | |
psrld $19, %xmm12 #396.14 | |
pslld $13, %xmm5 #396.14 | |
por %xmm5, %xmm12 #396.14 | |
paddd %xmm11, %xmm4 #394.9 | |
movdqa 3664(%rsp), %xmm5 #396.14 | |
paddd %xmm13, %xmm11 #394.9 | |
pxor %xmm12, %xmm7 #396.14 | |
movdqa %xmm5, %xmm13 #396.14 | |
movdqa %xmm5, %xmm12 #396.14 | |
psrld $7, %xmm13 #396.14 | |
movdqa %xmm3, 4096(%rsp) #391.15 | |
psrld $10, %xmm3 #396.14 | |
pslld $25, %xmm12 #396.14 | |
pxor %xmm3, %xmm7 #396.14 | |
por %xmm12, %xmm13 #396.14 | |
movdqa %xmm5, %xmm3 #396.14 | |
movdqa %xmm5, %xmm12 #396.14 | |
psrld $18, %xmm3 #396.14 | |
pslld $14, %xmm12 #396.14 | |
psrld $3, %xmm5 #396.14 | |
por %xmm12, %xmm3 #396.14 | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 32, w0); | |
movdqa %xmm4, %xmm12 #397.9 | |
pxor %xmm3, %xmm13 #396.14 | |
movdqa %xmm4, %xmm3 #397.9 | |
pxor %xmm5, %xmm13 #396.14 | |
movdqa %xmm4, %xmm5 #397.9 | |
paddd 3984(%rsp), %xmm7 #396.14 | |
paddd %xmm13, %xmm15 #396.14 | |
psrld $6, %xmm3 #397.9 | |
pslld $26, %xmm5 #397.9 | |
movdqa %xmm4, %xmm13 #397.9 | |
paddd %xmm15, %xmm7 #396.14 | |
por %xmm5, %xmm3 #397.9 | |
psrld $11, %xmm12 #397.9 | |
pslld $21, %xmm13 #397.9 | |
movdqa %xmm4, %xmm5 #397.9 | |
movdqa %xmm4, %xmm15 #397.9 | |
por %xmm13, %xmm12 #397.9 | |
psrld $25, %xmm5 #397.9 | |
pslld $7, %xmm15 #397.9 | |
pxor %xmm12, %xmm3 #397.9 | |
por %xmm15, %xmm5 #397.9 | |
pxor %xmm5, %xmm3 #397.9 | |
movdqa %xmm11, %xmm5 #397.9 | |
paddd %xmm3, %xmm2 #397.9 | |
movdqa %xmm4, %xmm3 #397.9 | |
movdqa %xmm4, 4160(%rsp) #394.9 | |
pand %xmm6, %xmm3 #397.9 | |
pandn %xmm10, %xmm4 #397.9 | |
psrld $22, %xmm5 #397.9 | |
pxor %xmm4, %xmm3 #397.9 | |
movdqa .L_2il0floatpacket.6777(%rip), %xmm4 #397.9 | |
paddd %xmm3, %xmm4 #397.9 | |
movdqa %xmm11, %xmm3 #397.9 | |
paddd %xmm4, %xmm2 #397.9 | |
movdqa %xmm11, %xmm4 #397.9 | |
paddd %xmm7, %xmm2 #397.9 | |
psrld $13, %xmm3 #397.9 | |
movdqa %xmm10, 4064(%rsp) #390.9 | |
movdqa %xmm11, %xmm10 #397.9 | |
movdqa %xmm6, 4112(%rsp) #392.9 | |
movdqa %xmm11, %xmm6 #397.9 | |
paddd %xmm2, %xmm1 #397.9 | |
psrld $2, %xmm6 #397.9 | |
movdqa %xmm7, 4192(%rsp) #396.14 | |
pslld $30, %xmm10 #397.9 | |
movdqa %xmm1, 4208(%rsp) #397.9 | |
movdqa %xmm11, %xmm1 #397.9 | |
pslld $19, %xmm4 #397.9 | |
movdqa %xmm11, %xmm7 #397.9 | |
movdqa %xmm11, 4176(%rsp) #394.9 | |
pand %xmm8, %xmm1 #397.9 | |
por %xmm10, %xmm6 #397.9 | |
por %xmm4, %xmm3 #397.9 | |
pslld $10, %xmm7 #397.9 | |
pand %xmm9, %xmm11 #397.9 | |
movdqa %xmm1, 4224(%rsp) #397.9 | |
pxor %xmm3, %xmm6 #397.9 | |
por %xmm7, %xmm5 #397.9 | |
pxor %xmm11, %xmm1 #397.9 | |
pxor %xmm5, %xmm6 #397.9 | |
pxor %xmm0, %xmm1 #397.9 | |
### w1 = add4(SIGMA1_256(w15), w10, SIGMA0_256(w2), w1); | |
movdqa %xmm14, %xmm0 #398.14 | |
paddd %xmm1, %xmm6 #397.9 | |
movdqa %xmm14, 4144(%rsp) #393.15 | |
psrld $17, %xmm0 #398.14 | |
pslld $15, %xmm14 #398.14 | |
paddd %xmm6, %xmm2 #397.9 | |
por %xmm14, %xmm0 #398.14 | |
movdqa %xmm9, 4080(%rsp) #390.9 | |
movdqa %xmm8, 4128(%rsp) #392.9 | |
movdqa %xmm2, 4240(%rsp) #397.9 | |
movdqa %xmm0, 4256(%rsp) #398.14 | |
# LOE | |
..B2.11: # Preds ..B2.12 | |
movdqa 3712(%rsp), %xmm14 #398.14 | |
movdqa %xmm14, %xmm1 #398.14 | |
movdqa %xmm14, %xmm10 #398.14 | |
movdqa %xmm14, %xmm8 #398.14 | |
movdqa %xmm14, %xmm6 #398.14 | |
psrld $7, %xmm1 #398.14 | |
pslld $25, %xmm10 #398.14 | |
psrld $18, %xmm8 #398.14 | |
pslld $14, %xmm6 #398.14 | |
movdqa 4144(%rsp), %xmm7 #398.14 | |
por %xmm10, %xmm1 #398.14 | |
por %xmm6, %xmm8 #398.14 | |
movdqa %xmm14, %xmm5 #398.14 | |
movdqa %xmm7, %xmm2 #398.14 | |
movdqa %xmm7, %xmm0 #398.14 | |
pxor %xmm8, %xmm1 #398.14 | |
psrld $3, %xmm5 #398.14 | |
movdqa 3664(%rsp), %xmm9 #398.14 | |
psrld $19, %xmm2 #398.14 | |
pslld $13, %xmm0 #398.14 | |
pxor %xmm5, %xmm1 #398.14 | |
movdqa 4256(%rsp), %xmm3 #398.14 | |
por %xmm0, %xmm2 #398.14 | |
paddd %xmm1, %xmm9 #398.14 | |
pxor %xmm2, %xmm3 #398.14 | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 33, w1); | |
movdqa 4208(%rsp), %xmm1 #399.9 | |
psrld $10, %xmm7 #398.14 | |
movdqa %xmm1, %xmm11 #399.9 | |
movdqa %xmm1, %xmm13 #399.9 | |
movdqa %xmm1, %xmm12 #399.9 | |
movdqa %xmm1, %xmm4 #399.9 | |
movdqa 4000(%rsp), %xmm0 #398.14 | |
pxor %xmm7, %xmm3 #398.14 | |
psrld $6, %xmm11 #399.9 | |
pslld $26, %xmm13 #399.9 | |
psrld $11, %xmm12 #399.9 | |
pslld $21, %xmm4 #399.9 | |
movdqa %xmm1, %xmm15 #399.9 | |
movdqa %xmm1, %xmm7 #399.9 | |
paddd %xmm3, %xmm0 #398.14 | |
por %xmm13, %xmm11 #399.9 | |
por %xmm4, %xmm12 #399.9 | |
psrld $25, %xmm15 #399.9 | |
pslld $7, %xmm7 #399.9 | |
movdqa %xmm0, %xmm3 #398.14 | |
pxor %xmm12, %xmm11 #399.9 | |
por %xmm7, %xmm15 #399.9 | |
paddd %xmm9, %xmm3 #398.14 | |
pxor %xmm15, %xmm11 #399.9 | |
movdqa 4064(%rsp), %xmm9 #399.9 | |
movdqa %xmm1, %xmm2 #399.9 | |
movdqa 4160(%rsp), %xmm6 #399.9 | |
paddd %xmm11, %xmm9 #399.9 | |
movdqa 4112(%rsp), %xmm11 #399.9 | |
movdqa %xmm1, %xmm10 #399.9 | |
pand %xmm6, %xmm2 #399.9 | |
pandn %xmm11, %xmm10 #399.9 | |
movdqa .L_2il0floatpacket.6778(%rip), %xmm8 #399.9 | |
pxor %xmm10, %xmm2 #399.9 | |
paddd %xmm2, %xmm8 #399.9 | |
movdqa 4240(%rsp), %xmm2 #399.9 | |
paddd %xmm8, %xmm9 #399.9 | |
movdqa %xmm2, %xmm5 #399.9 | |
movdqa %xmm2, %xmm13 #399.9 | |
psrld $2, %xmm5 #399.9 | |
pslld $30, %xmm13 #399.9 | |
movdqa %xmm2, %xmm12 #399.9 | |
movdqa %xmm2, %xmm4 #399.9 | |
por %xmm13, %xmm5 #399.9 | |
psrld $13, %xmm12 #399.9 | |
pslld $19, %xmm4 #399.9 | |
movdqa %xmm2, %xmm10 #399.9 | |
movdqa %xmm2, %xmm13 #399.9 | |
por %xmm4, %xmm12 #399.9 | |
psrld $22, %xmm10 #399.9 | |
pslld $10, %xmm13 #399.9 | |
movdqa 4176(%rsp), %xmm7 #399.9 | |
movdqa %xmm2, %xmm0 #399.9 | |
pxor %xmm12, %xmm5 #399.9 | |
por %xmm13, %xmm10 #399.9 | |
pand %xmm7, %xmm0 #399.9 | |
pxor %xmm10, %xmm5 #399.9 | |
movdqa 4128(%rsp), %xmm10 #399.9 | |
movdqa %xmm2, %xmm15 #399.9 | |
pand %xmm10, %xmm15 #399.9 | |
movdqa %xmm0, %xmm13 #399.9 | |
movdqa 4224(%rsp), %xmm12 #399.9 | |
pxor %xmm15, %xmm13 #399.9 | |
pxor %xmm13, %xmm12 #399.9 | |
paddd %xmm3, %xmm9 #399.9 | |
movdqa 4080(%rsp), %xmm8 #399.9 | |
paddd %xmm12, %xmm5 #399.9 | |
### w2 = add4(SIGMA1_256(w0), w11, SIGMA0_256(w3), w2); | |
movdqa 4192(%rsp), %xmm13 #400.14 | |
paddd %xmm9, %xmm8 #399.9 | |
paddd %xmm5, %xmm9 #399.9 | |
movdqa %xmm13, %xmm5 #400.14 | |
movdqa %xmm13, %xmm4 #400.14 | |
movdqa %xmm13, %xmm12 #400.14 | |
movdqa %xmm13, %xmm15 #400.14 | |
psrld $17, %xmm5 #400.14 | |
pslld $15, %xmm4 #400.14 | |
psrld $19, %xmm12 #400.14 | |
pslld $13, %xmm15 #400.14 | |
por %xmm4, %xmm5 #400.14 | |
por %xmm15, %xmm12 #400.14 | |
psrld $10, %xmm13 #400.14 | |
pxor %xmm12, %xmm5 #400.14 | |
movdqa 4016(%rsp), %xmm15 #400.14 | |
pxor %xmm13, %xmm5 #400.14 | |
paddd %xmm5, %xmm15 #400.14 | |
movdqa 3728(%rsp), %xmm5 #400.14 | |
movdqa %xmm5, %xmm13 #400.14 | |
movdqa %xmm5, %xmm4 #400.14 | |
psrld $7, %xmm13 #400.14 | |
pslld $25, %xmm4 #400.14 | |
por %xmm4, %xmm13 #400.14 | |
movdqa %xmm5, %xmm4 #400.14 | |
movdqa %xmm5, %xmm12 #400.14 | |
psrld $18, %xmm4 #400.14 | |
pslld $14, %xmm12 #400.14 | |
por %xmm12, %xmm4 #400.14 | |
movdqa %xmm5, %xmm12 #400.14 | |
pxor %xmm4, %xmm13 #400.14 | |
psrld $3, %xmm12 #400.14 | |
pxor %xmm12, %xmm13 #400.14 | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 34, w2); | |
movdqa %xmm8, %xmm4 #401.9 | |
paddd %xmm13, %xmm14 #400.14 | |
movdqa %xmm8, %xmm13 #401.9 | |
paddd %xmm14, %xmm15 #400.14 | |
movdqa %xmm8, %xmm14 #401.9 | |
movdqa %xmm8, %xmm12 #401.9 | |
psrld $6, %xmm14 #401.9 | |
pslld $26, %xmm4 #401.9 | |
psrld $11, %xmm13 #401.9 | |
pslld $21, %xmm12 #401.9 | |
por %xmm4, %xmm14 #401.9 | |
por %xmm12, %xmm13 #401.9 | |
movdqa %xmm8, %xmm4 #401.9 | |
pxor %xmm13, %xmm14 #401.9 | |
movdqa %xmm8, %xmm13 #401.9 | |
psrld $25, %xmm4 #401.9 | |
pslld $7, %xmm13 #401.9 | |
por %xmm13, %xmm4 #401.9 | |
movdqa %xmm1, %xmm13 #401.9 | |
movdqa %xmm8, %xmm12 #401.9 | |
pand %xmm8, %xmm13 #401.9 | |
pandn %xmm6, %xmm12 #401.9 | |
pxor %xmm4, %xmm14 #401.9 | |
movdqa .L_2il0floatpacket.6779(%rip), %xmm4 #401.9 | |
pxor %xmm12, %xmm13 #401.9 | |
paddd %xmm14, %xmm11 #401.9 | |
paddd %xmm13, %xmm4 #401.9 | |
paddd %xmm4, %xmm11 #401.9 | |
movdqa %xmm9, %xmm4 #401.9 | |
movdqa %xmm9, %xmm12 #401.9 | |
psrld $2, %xmm4 #401.9 | |
pslld $30, %xmm12 #401.9 | |
movdqa %xmm9, %xmm13 #401.9 | |
por %xmm12, %xmm4 #401.9 | |
movdqa %xmm9, %xmm12 #401.9 | |
psrld $13, %xmm13 #401.9 | |
pslld $19, %xmm12 #401.9 | |
por %xmm12, %xmm13 #401.9 | |
movdqa %xmm9, %xmm12 #401.9 | |
pxor %xmm13, %xmm4 #401.9 | |
movdqa %xmm9, %xmm13 #401.9 | |
movdqa %xmm2, %xmm14 #401.9 | |
psrld $22, %xmm12 #401.9 | |
pslld $10, %xmm13 #401.9 | |
pand %xmm9, %xmm14 #401.9 | |
por %xmm13, %xmm12 #401.9 | |
movdqa %xmm7, %xmm13 #401.9 | |
pxor %xmm12, %xmm4 #401.9 | |
pand %xmm9, %xmm13 #401.9 | |
movdqa %xmm14, %xmm12 #401.9 | |
paddd %xmm15, %xmm11 #401.9 | |
pxor %xmm13, %xmm12 #401.9 | |
### w3 = add4(SIGMA1_256(w1), w12, SIGMA0_256(w4), w3); | |
movdqa %xmm3, %xmm13 #402.14 | |
pxor %xmm0, %xmm12 #401.9 | |
movdqa %xmm3, %xmm0 #402.14 | |
psrld $17, %xmm0 #402.14 | |
pslld $15, %xmm13 #402.14 | |
paddd %xmm12, %xmm4 #401.9 | |
por %xmm13, %xmm0 #402.14 | |
movdqa %xmm3, %xmm12 #402.14 | |
movdqa %xmm3, %xmm13 #402.14 | |
psrld $19, %xmm12 #402.14 | |
pslld $13, %xmm13 #402.14 | |
por %xmm13, %xmm12 #402.14 | |
paddd %xmm11, %xmm10 #401.9 | |
movdqa %xmm3, 4272(%rsp) #398.14 | |
pxor %xmm12, %xmm0 #402.14 | |
movdqa 3744(%rsp), %xmm13 #402.14 | |
psrld $10, %xmm3 #402.14 | |
paddd %xmm4, %xmm11 #401.9 | |
pxor %xmm3, %xmm0 #402.14 | |
movdqa %xmm13, %xmm4 #402.14 | |
movdqa %xmm13, %xmm3 #402.14 | |
movdqa 4032(%rsp), %xmm12 #402.14 | |
psrld $7, %xmm4 #402.14 | |
pslld $25, %xmm3 #402.14 | |
paddd %xmm0, %xmm12 #402.14 | |
por %xmm3, %xmm4 #402.14 | |
movdqa %xmm13, %xmm0 #402.14 | |
movdqa %xmm13, %xmm3 #402.14 | |
psrld $18, %xmm0 #402.14 | |
pslld $14, %xmm3 #402.14 | |
por %xmm3, %xmm0 #402.14 | |
movdqa %xmm13, %xmm3 #402.14 | |
pxor %xmm0, %xmm4 #402.14 | |
psrld $3, %xmm3 #402.14 | |
pxor %xmm3, %xmm4 #402.14 | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 35, w3); | |
movdqa %xmm10, %xmm0 #403.9 | |
paddd %xmm4, %xmm5 #402.14 | |
movdqa %xmm10, %xmm3 #403.9 | |
paddd %xmm5, %xmm12 #402.14 | |
movdqa %xmm10, %xmm5 #403.9 | |
movdqa %xmm10, %xmm4 #403.9 | |
psrld $6, %xmm5 #403.9 | |
pslld $26, %xmm0 #403.9 | |
psrld $11, %xmm3 #403.9 | |
pslld $21, %xmm4 #403.9 | |
por %xmm0, %xmm5 #403.9 | |
por %xmm4, %xmm3 #403.9 | |
movdqa %xmm10, %xmm0 #403.9 | |
pxor %xmm3, %xmm5 #403.9 | |
movdqa %xmm10, %xmm3 #403.9 | |
psrld $25, %xmm0 #403.9 | |
pslld $7, %xmm3 #403.9 | |
por %xmm3, %xmm0 #403.9 | |
movdqa %xmm10, %xmm3 #403.9 | |
pxor %xmm0, %xmm5 #403.9 | |
movdqa %xmm10, %xmm0 #403.9 | |
pand %xmm8, %xmm0 #403.9 | |
pandn %xmm1, %xmm3 #403.9 | |
movdqa .L_2il0floatpacket.6780(%rip), %xmm4 #403.9 | |
pxor %xmm3, %xmm0 #403.9 | |
paddd %xmm0, %xmm4 #403.9 | |
movdqa %xmm11, %xmm3 #403.9 | |
movdqa %xmm11, %xmm0 #403.9 | |
paddd %xmm5, %xmm6 #403.9 | |
psrld $2, %xmm3 #403.9 | |
pslld $30, %xmm0 #403.9 | |
paddd %xmm4, %xmm6 #403.9 | |
por %xmm0, %xmm3 #403.9 | |
movdqa %xmm11, %xmm0 #403.9 | |
movdqa %xmm11, %xmm4 #403.9 | |
psrld $13, %xmm0 #403.9 | |
pslld $19, %xmm4 #403.9 | |
por %xmm4, %xmm0 #403.9 | |
movdqa %xmm11, %xmm4 #403.9 | |
pxor %xmm0, %xmm3 #403.9 | |
movdqa %xmm11, %xmm0 #403.9 | |
movdqa %xmm11, %xmm5 #403.9 | |
psrld $22, %xmm0 #403.9 | |
pslld $10, %xmm4 #403.9 | |
pand %xmm9, %xmm5 #403.9 | |
por %xmm4, %xmm0 #403.9 | |
movdqa %xmm2, %xmm4 #403.9 | |
pxor %xmm0, %xmm3 #403.9 | |
pand %xmm11, %xmm4 #403.9 | |
movdqa %xmm5, %xmm0 #403.9 | |
paddd %xmm12, %xmm6 #403.9 | |
pxor %xmm4, %xmm0 #403.9 | |
paddd %xmm6, %xmm7 #403.9 | |
pxor %xmm14, %xmm0 #403.9 | |
### w4 = add4(SIGMA1_256(w2), w13, SIGMA0_256(w5), w4); | |
movdqa %xmm15, %xmm14 #404.14 | |
paddd %xmm0, %xmm3 #403.9 | |
movdqa %xmm15, %xmm0 #404.14 | |
paddd %xmm3, %xmm6 #403.9 | |
movdqa %xmm15, %xmm3 #404.14 | |
movdqa %xmm15, %xmm4 #404.14 | |
psrld $17, %xmm0 #404.14 | |
pslld $15, %xmm14 #404.14 | |
psrld $19, %xmm3 #404.14 | |
pslld $13, %xmm4 #404.14 | |
por %xmm14, %xmm0 #404.14 | |
por %xmm4, %xmm3 #404.14 | |
movdqa %xmm15, 4288(%rsp) #400.14 | |
pxor %xmm3, %xmm0 #404.14 | |
movdqa 3760(%rsp), %xmm4 #404.14 | |
psrld $10, %xmm15 #404.14 | |
pxor %xmm15, %xmm0 #404.14 | |
movdqa %xmm4, %xmm15 #404.14 | |
movdqa %xmm4, %xmm14 #404.14 | |
psrld $7, %xmm15 #404.14 | |
movdqa 4048(%rsp), %xmm3 #404.14 | |
pslld $25, %xmm14 #404.14 | |
paddd %xmm0, %xmm3 #404.14 | |
por %xmm14, %xmm15 #404.14 | |
movdqa %xmm4, %xmm0 #404.14 | |
movdqa %xmm4, %xmm14 #404.14 | |
psrld $18, %xmm0 #404.14 | |
pslld $14, %xmm14 #404.14 | |
por %xmm14, %xmm0 #404.14 | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 36, w4); | |
movdqa %xmm7, %xmm14 #405.9 | |
pxor %xmm0, %xmm15 #404.14 | |
movdqa %xmm4, %xmm0 #404.14 | |
psrld $3, %xmm0 #404.14 | |
psrld $11, %xmm14 #405.9 | |
pxor %xmm0, %xmm15 #404.14 | |
movdqa %xmm7, %xmm0 #405.9 | |
paddd %xmm15, %xmm13 #404.14 | |
movdqa %xmm7, %xmm15 #405.9 | |
paddd %xmm13, %xmm3 #404.14 | |
movdqa %xmm7, %xmm13 #405.9 | |
psrld $6, %xmm0 #405.9 | |
pslld $26, %xmm15 #405.9 | |
pslld $21, %xmm13 #405.9 | |
por %xmm15, %xmm0 #405.9 | |
por %xmm13, %xmm14 #405.9 | |
movdqa %xmm7, %xmm15 #405.9 | |
movdqa %xmm7, %xmm13 #405.9 | |
psrld $25, %xmm15 #405.9 | |
pslld $7, %xmm13 #405.9 | |
pxor %xmm14, %xmm0 #405.9 | |
por %xmm13, %xmm15 #405.9 | |
movdqa %xmm7, %xmm13 #405.9 | |
pxor %xmm15, %xmm0 #405.9 | |
movdqa %xmm7, %xmm15 #405.9 | |
pand %xmm10, %xmm13 #405.9 | |
pandn %xmm8, %xmm15 #405.9 | |
movdqa .L_2il0floatpacket.6781(%rip), %xmm14 #405.9 | |
pxor %xmm15, %xmm13 #405.9 | |
paddd %xmm13, %xmm14 #405.9 | |
movdqa %xmm6, %xmm13 #405.9 | |
movdqa %xmm6, %xmm15 #405.9 | |
paddd %xmm0, %xmm1 #405.9 | |
psrld $2, %xmm13 #405.9 | |
pslld $30, %xmm15 #405.9 | |
paddd %xmm14, %xmm1 #405.9 | |
por %xmm15, %xmm13 #405.9 | |
movdqa %xmm6, %xmm15 #405.9 | |
movdqa %xmm6, %xmm14 #405.9 | |
psrld $13, %xmm15 #405.9 | |
pslld $19, %xmm14 #405.9 | |
por %xmm14, %xmm15 #405.9 | |
movdqa %xmm6, %xmm14 #405.9 | |
pxor %xmm15, %xmm13 #405.9 | |
movdqa %xmm6, %xmm15 #405.9 | |
movdqa %xmm6, %xmm0 #405.9 | |
psrld $22, %xmm14 #405.9 | |
pslld $10, %xmm15 #405.9 | |
pand %xmm11, %xmm0 #405.9 | |
por %xmm15, %xmm14 #405.9 | |
movdqa %xmm6, %xmm15 #405.9 | |
pxor %xmm14, %xmm13 #405.9 | |
pand %xmm9, %xmm15 #405.9 | |
movdqa %xmm0, %xmm14 #405.9 | |
paddd %xmm3, %xmm1 #405.9 | |
pxor %xmm15, %xmm14 #405.9 | |
paddd %xmm1, %xmm2 #405.9 | |
pxor %xmm5, %xmm14 #405.9 | |
### w5 = add4(SIGMA1_256(w3), w14, SIGMA0_256(w6), w5); | |
movdqa %xmm12, %xmm5 #406.14 | |
paddd %xmm14, %xmm13 #405.9 | |
movdqa %xmm12, %xmm15 #406.14 | |
paddd %xmm13, %xmm1 #405.9 | |
movdqa %xmm12, %xmm13 #406.14 | |
movdqa %xmm12, %xmm14 #406.14 | |
psrld $17, %xmm13 #406.14 | |
pslld $15, %xmm5 #406.14 | |
psrld $19, %xmm15 #406.14 | |
pslld $13, %xmm14 #406.14 | |
por %xmm5, %xmm13 #406.14 | |
por %xmm14, %xmm15 #406.14 | |
movdqa %xmm12, 4304(%rsp) #402.14 | |
pxor %xmm15, %xmm13 #406.14 | |
psrld $10, %xmm12 #406.14 | |
pxor %xmm12, %xmm13 #406.14 | |
movdqa 3808(%rsp), %xmm12 #406.14 | |
movdqa %xmm12, %xmm15 #406.14 | |
movdqa %xmm12, %xmm14 #406.14 | |
movdqa 4096(%rsp), %xmm5 #406.14 | |
psrld $7, %xmm15 #406.14 | |
pslld $25, %xmm14 #406.14 | |
paddd %xmm13, %xmm5 #406.14 | |
por %xmm14, %xmm15 #406.14 | |
movdqa %xmm12, %xmm13 #406.14 | |
movdqa %xmm12, %xmm14 #406.14 | |
psrld $18, %xmm13 #406.14 | |
pslld $14, %xmm14 #406.14 | |
por %xmm14, %xmm13 #406.14 | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 37, w5); | |
movdqa %xmm2, %xmm14 #407.9 | |
pxor %xmm13, %xmm15 #406.14 | |
movdqa %xmm12, %xmm13 #406.14 | |
psrld $3, %xmm13 #406.14 | |
pslld $26, %xmm14 #407.9 | |
pxor %xmm13, %xmm15 #406.14 | |
movdqa %xmm2, %xmm13 #407.9 | |
paddd %xmm15, %xmm4 #406.14 | |
movdqa %xmm2, %xmm15 #407.9 | |
paddd %xmm4, %xmm5 #406.14 | |
movdqa %xmm2, %xmm4 #407.9 | |
psrld $6, %xmm4 #407.9 | |
psrld $11, %xmm13 #407.9 | |
pslld $21, %xmm15 #407.9 | |
por %xmm14, %xmm4 #407.9 | |
por %xmm15, %xmm13 #407.9 | |
movdqa %xmm2, %xmm14 #407.9 | |
pxor %xmm13, %xmm4 #407.9 | |
movdqa %xmm2, %xmm13 #407.9 | |
psrld $25, %xmm14 #407.9 | |
pslld $7, %xmm13 #407.9 | |
por %xmm13, %xmm14 #407.9 | |
movdqa %xmm2, %xmm13 #407.9 | |
movdqa %xmm2, %xmm15 #407.9 | |
pand %xmm7, %xmm13 #407.9 | |
pandn %xmm10, %xmm15 #407.9 | |
pxor %xmm14, %xmm4 #407.9 | |
movdqa .L_2il0floatpacket.6782(%rip), %xmm14 #407.9 | |
pxor %xmm15, %xmm13 #407.9 | |
paddd %xmm13, %xmm14 #407.9 | |
movdqa %xmm1, %xmm15 #407.9 | |
movdqa %xmm1, %xmm13 #407.9 | |
paddd %xmm4, %xmm8 #407.9 | |
psrld $2, %xmm15 #407.9 | |
pslld $30, %xmm13 #407.9 | |
paddd %xmm14, %xmm8 #407.9 | |
por %xmm13, %xmm15 #407.9 | |
movdqa %xmm1, %xmm13 #407.9 | |
movdqa %xmm1, %xmm14 #407.9 | |
psrld $13, %xmm13 #407.9 | |
pslld $19, %xmm14 #407.9 | |
por %xmm14, %xmm13 #407.9 | |
movdqa %xmm1, %xmm14 #407.9 | |
pxor %xmm13, %xmm15 #407.9 | |
movdqa %xmm1, %xmm13 #407.9 | |
movdqa %xmm1, %xmm4 #407.9 | |
psrld $22, %xmm13 #407.9 | |
pslld $10, %xmm14 #407.9 | |
pand %xmm6, %xmm4 #407.9 | |
por %xmm14, %xmm13 #407.9 | |
movdqa %xmm1, %xmm14 #407.9 | |
pxor %xmm13, %xmm15 #407.9 | |
pand %xmm11, %xmm14 #407.9 | |
movdqa %xmm4, %xmm13 #407.9 | |
paddd %xmm5, %xmm8 #407.9 | |
pxor %xmm14, %xmm13 #407.9 | |
paddd %xmm8, %xmm9 #407.9 | |
pxor %xmm0, %xmm13 #407.9 | |
### w6 = add4(SIGMA1_256(w4), w15, SIGMA0_256(w7), w6); | |
movdqa %xmm3, %xmm0 #408.14 | |
paddd %xmm13, %xmm15 #407.9 | |
movdqa %xmm3, %xmm13 #408.14 | |
paddd %xmm15, %xmm8 #407.9 | |
movdqa %xmm3, %xmm15 #408.14 | |
movdqa %xmm3, %xmm14 #408.14 | |
psrld $17, %xmm13 #408.14 | |
pslld $15, %xmm0 #408.14 | |
psrld $19, %xmm15 #408.14 | |
pslld $13, %xmm14 #408.14 | |
por %xmm0, %xmm13 #408.14 | |
por %xmm14, %xmm15 #408.14 | |
movdqa %xmm3, 4320(%rsp) #404.14 | |
pxor %xmm15, %xmm13 #408.14 | |
psrld $10, %xmm3 #408.14 | |
movdqa 4144(%rsp), %xmm0 #408.14 | |
pxor %xmm3, %xmm13 #408.14 | |
paddd %xmm13, %xmm0 #408.14 | |
movdqa 3856(%rsp), %xmm13 #408.14 | |
movdqa %xmm13, %xmm15 #408.14 | |
movdqa %xmm13, %xmm14 #408.14 | |
psrld $7, %xmm15 #408.14 | |
pslld $25, %xmm14 #408.14 | |
por %xmm14, %xmm15 #408.14 | |
movdqa %xmm13, %xmm3 #408.14 | |
movdqa %xmm13, %xmm14 #408.14 | |
psrld $18, %xmm3 #408.14 | |
pslld $14, %xmm14 #408.14 | |
por %xmm14, %xmm3 #408.14 | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 38, w6); | |
movdqa %xmm9, %xmm14 #409.9 | |
pxor %xmm3, %xmm15 #408.14 | |
movdqa %xmm13, %xmm3 #408.14 | |
psrld $3, %xmm3 #408.14 | |
psrld $11, %xmm14 #409.9 | |
pxor %xmm3, %xmm15 #408.14 | |
movdqa %xmm9, %xmm3 #409.9 | |
paddd %xmm15, %xmm12 #408.14 | |
movdqa %xmm9, %xmm15 #409.9 | |
paddd %xmm12, %xmm0 #408.14 | |
movdqa %xmm9, %xmm12 #409.9 | |
psrld $6, %xmm3 #409.9 | |
pslld $26, %xmm15 #409.9 | |
pslld $21, %xmm12 #409.9 | |
por %xmm15, %xmm3 #409.9 | |
por %xmm12, %xmm14 #409.9 | |
movdqa %xmm9, %xmm15 #409.9 | |
movdqa %xmm9, %xmm12 #409.9 | |
psrld $25, %xmm15 #409.9 | |
pslld $7, %xmm12 #409.9 | |
pxor %xmm14, %xmm3 #409.9 | |
por %xmm12, %xmm15 #409.9 | |
movdqa %xmm9, %xmm12 #409.9 | |
pxor %xmm15, %xmm3 #409.9 | |
movdqa %xmm9, %xmm15 #409.9 | |
pand %xmm2, %xmm12 #409.9 | |
pandn %xmm7, %xmm15 #409.9 | |
movdqa .L_2il0floatpacket.6783(%rip), %xmm14 #409.9 | |
pxor %xmm15, %xmm12 #409.9 | |
paddd %xmm12, %xmm14 #409.9 | |
movdqa %xmm8, %xmm15 #409.9 | |
movdqa %xmm8, %xmm12 #409.9 | |
paddd %xmm3, %xmm10 #409.9 | |
psrld $2, %xmm15 #409.9 | |
pslld $30, %xmm12 #409.9 | |
paddd %xmm14, %xmm10 #409.9 | |
por %xmm12, %xmm15 #409.9 | |
movdqa %xmm8, %xmm12 #409.9 | |
movdqa %xmm8, %xmm14 #409.9 | |
psrld $13, %xmm12 #409.9 | |
pslld $19, %xmm14 #409.9 | |
por %xmm14, %xmm12 #409.9 | |
movdqa %xmm8, %xmm14 #409.9 | |
pxor %xmm12, %xmm15 #409.9 | |
movdqa %xmm8, %xmm12 #409.9 | |
movdqa %xmm8, %xmm3 #409.9 | |
psrld $22, %xmm14 #409.9 | |
pslld $10, %xmm12 #409.9 | |
pand %xmm1, %xmm3 #409.9 | |
por %xmm12, %xmm14 #409.9 | |
movdqa %xmm8, %xmm12 #409.9 | |
pxor %xmm14, %xmm15 #409.9 | |
pand %xmm6, %xmm12 #409.9 | |
movdqa %xmm3, %xmm14 #409.9 | |
paddd %xmm0, %xmm10 #409.9 | |
pxor %xmm12, %xmm14 #409.9 | |
### w7 = add4(SIGMA1_256(w5), w0, SIGMA0_256(w8), w7); | |
movdqa %xmm5, %xmm12 #410.14 | |
pxor %xmm4, %xmm14 #409.9 | |
movdqa %xmm5, %xmm4 #410.14 | |
psrld $17, %xmm4 #410.14 | |
pslld $15, %xmm12 #410.14 | |
paddd %xmm14, %xmm15 #409.9 | |
por %xmm12, %xmm4 #410.14 | |
movdqa %xmm5, %xmm14 #410.14 | |
movdqa %xmm5, %xmm12 #410.14 | |
psrld $19, %xmm14 #410.14 | |
pslld $13, %xmm12 #410.14 | |
por %xmm12, %xmm14 #410.14 | |
paddd %xmm10, %xmm11 #409.9 | |
movdqa 3904(%rsp), %xmm12 #410.14 | |
paddd %xmm15, %xmm10 #409.9 | |
movdqa %xmm5, 4368(%rsp) #406.14 | |
pxor %xmm14, %xmm4 #410.14 | |
psrld $10, %xmm5 #410.14 | |
movdqa %xmm12, %xmm15 #410.14 | |
movdqa %xmm12, %xmm14 #410.14 | |
pxor %xmm5, %xmm4 #410.14 | |
movdqa 4192(%rsp), %xmm5 #410.14 | |
psrld $7, %xmm15 #410.14 | |
pslld $25, %xmm14 #410.14 | |
paddd %xmm4, %xmm5 #410.14 | |
por %xmm14, %xmm15 #410.14 | |
movdqa %xmm12, %xmm4 #410.14 | |
movdqa %xmm12, %xmm14 #410.14 | |
psrld $18, %xmm4 #410.14 | |
pslld $14, %xmm14 #410.14 | |
psrld $3, %xmm12 #410.14 | |
por %xmm14, %xmm4 #410.14 | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 39, w7); | |
movdqa %xmm11, %xmm14 #411.9 | |
pxor %xmm4, %xmm15 #410.14 | |
movdqa %xmm11, %xmm4 #411.9 | |
pxor %xmm12, %xmm15 #410.14 | |
movdqa %xmm11, %xmm12 #411.9 | |
paddd %xmm15, %xmm13 #410.14 | |
psrld $6, %xmm4 #411.9 | |
pslld $26, %xmm12 #411.9 | |
movdqa %xmm11, %xmm15 #411.9 | |
paddd %xmm13, %xmm5 #410.14 | |
por %xmm12, %xmm4 #411.9 | |
psrld $11, %xmm14 #411.9 | |
pslld $21, %xmm15 #411.9 | |
movdqa %xmm11, %xmm12 #411.9 | |
movdqa %xmm11, %xmm13 #411.9 | |
por %xmm15, %xmm14 #411.9 | |
psrld $25, %xmm12 #411.9 | |
pslld $7, %xmm13 #411.9 | |
pxor %xmm14, %xmm4 #411.9 | |
por %xmm13, %xmm12 #411.9 | |
pxor %xmm12, %xmm4 #411.9 | |
paddd %xmm4, %xmm7 #411.9 | |
movdqa %xmm11, %xmm4 #411.9 | |
movdqa %xmm11, 4432(%rsp) #409.9 | |
pand %xmm9, %xmm4 #411.9 | |
pandn %xmm2, %xmm11 #411.9 | |
movdqa %xmm2, 4336(%rsp) #405.9 | |
pxor %xmm11, %xmm4 #411.9 | |
movdqa .L_2il0floatpacket.6784(%rip), %xmm2 #411.9 | |
movdqa %xmm10, %xmm11 #411.9 | |
paddd %xmm4, %xmm2 #411.9 | |
movdqa %xmm10, %xmm4 #411.9 | |
paddd %xmm2, %xmm7 #411.9 | |
movdqa %xmm10, %xmm2 #411.9 | |
paddd %xmm5, %xmm7 #411.9 | |
pand %xmm8, %xmm2 #411.9 | |
paddd %xmm7, %xmm6 #411.9 | |
pslld $30, %xmm4 #411.9 | |
movdqa %xmm9, 4384(%rsp) #407.9 | |
movdqa %xmm10, %xmm9 #411.9 | |
movdqa %xmm5, 4464(%rsp) #410.14 | |
movdqa %xmm10, %xmm5 #411.9 | |
movdqa %xmm6, 4480(%rsp) #411.9 | |
movdqa %xmm10, %xmm6 #411.9 | |
movdqa %xmm8, 4400(%rsp) #407.9 | |
psrld $2, %xmm9 #411.9 | |
psrld $13, %xmm5 #411.9 | |
pslld $19, %xmm6 #411.9 | |
movdqa %xmm10, %xmm8 #411.9 | |
por %xmm4, %xmm9 #411.9 | |
movdqa %xmm10, 4448(%rsp) #409.9 | |
por %xmm6, %xmm5 #411.9 | |
psrld $22, %xmm8 #411.9 | |
pslld $10, %xmm11 #411.9 | |
pand %xmm1, %xmm10 #411.9 | |
pxor %xmm5, %xmm9 #411.9 | |
movdqa %xmm2, 4496(%rsp) #411.9 | |
por %xmm11, %xmm8 #411.9 | |
pxor %xmm10, %xmm2 #411.9 | |
pxor %xmm8, %xmm9 #411.9 | |
movdqa %xmm1, 4352(%rsp) #405.9 | |
pxor %xmm3, %xmm2 #411.9 | |
### w8 = add4(SIGMA1_256(w6), w1, SIGMA0_256(w9), w8); | |
movdqa %xmm0, %xmm1 #412.14 | |
movdqa %xmm0, %xmm3 #412.14 | |
paddd %xmm2, %xmm9 #411.9 | |
psrld $17, %xmm1 #412.14 | |
pslld $15, %xmm3 #412.14 | |
paddd %xmm9, %xmm7 #411.9 | |
movdqa %xmm0, 4416(%rsp) #408.14 | |
por %xmm3, %xmm1 #412.14 | |
psrld $19, %xmm0 #412.14 | |
movdqa %xmm7, 4512(%rsp) #411.9 | |
movdqa %xmm1, 4528(%rsp) #412.14 | |
movdqa %xmm0, 4544(%rsp) #412.14 | |
# LOE | |
..B2.10: # Preds ..B2.11 | |
movdqa 3984(%rsp), %xmm14 #412.14 | |
movdqa %xmm14, %xmm1 #412.14 | |
movdqa %xmm14, %xmm10 #412.14 | |
movdqa %xmm14, %xmm8 #412.14 | |
movdqa %xmm14, %xmm6 #412.14 | |
psrld $7, %xmm1 #412.14 | |
pslld $25, %xmm10 #412.14 | |
psrld $18, %xmm8 #412.14 | |
pslld $14, %xmm6 #412.14 | |
movdqa 4416(%rsp), %xmm7 #412.14 | |
por %xmm10, %xmm1 #412.14 | |
por %xmm6, %xmm8 #412.14 | |
movdqa %xmm14, %xmm5 #412.14 | |
movdqa %xmm7, %xmm2 #412.14 | |
pxor %xmm8, %xmm1 #412.14 | |
psrld $3, %xmm5 #412.14 | |
pslld $13, %xmm2 #412.14 | |
movdqa 4544(%rsp), %xmm0 #412.14 | |
pxor %xmm5, %xmm1 #412.14 | |
movdqa 3904(%rsp), %xmm9 #412.14 | |
por %xmm2, %xmm0 #412.14 | |
movdqa 4528(%rsp), %xmm3 #412.14 | |
paddd %xmm1, %xmm9 #412.14 | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 40, w8); | |
movdqa 4480(%rsp), %xmm1 #413.9 | |
pxor %xmm0, %xmm3 #412.14 | |
psrld $10, %xmm7 #412.14 | |
movdqa %xmm1, %xmm11 #413.9 | |
movdqa %xmm1, %xmm13 #413.9 | |
movdqa %xmm1, %xmm12 #413.9 | |
movdqa %xmm1, %xmm4 #413.9 | |
pxor %xmm7, %xmm3 #412.14 | |
movdqa 4272(%rsp), %xmm0 #412.14 | |
psrld $6, %xmm11 #413.9 | |
pslld $26, %xmm13 #413.9 | |
psrld $11, %xmm12 #413.9 | |
pslld $21, %xmm4 #413.9 | |
movdqa %xmm1, %xmm15 #413.9 | |
movdqa %xmm1, %xmm7 #413.9 | |
paddd %xmm3, %xmm0 #412.14 | |
por %xmm13, %xmm11 #413.9 | |
por %xmm4, %xmm12 #413.9 | |
psrld $25, %xmm15 #413.9 | |
pslld $7, %xmm7 #413.9 | |
movdqa %xmm0, %xmm3 #412.14 | |
pxor %xmm12, %xmm11 #413.9 | |
por %xmm7, %xmm15 #413.9 | |
paddd %xmm9, %xmm3 #412.14 | |
movdqa 4336(%rsp), %xmm9 #413.9 | |
pxor %xmm15, %xmm11 #413.9 | |
movdqa 4432(%rsp), %xmm6 #413.9 | |
paddd %xmm11, %xmm9 #413.9 | |
movdqa 4384(%rsp), %xmm11 #413.9 | |
movdqa %xmm1, %xmm2 #413.9 | |
movdqa %xmm1, %xmm10 #413.9 | |
pand %xmm6, %xmm2 #413.9 | |
pandn %xmm11, %xmm10 #413.9 | |
movdqa .L_2il0floatpacket.6785(%rip), %xmm8 #413.9 | |
pxor %xmm10, %xmm2 #413.9 | |
paddd %xmm2, %xmm8 #413.9 | |
movdqa 4512(%rsp), %xmm2 #413.9 | |
paddd %xmm8, %xmm9 #413.9 | |
movdqa %xmm2, %xmm5 #413.9 | |
movdqa %xmm2, %xmm13 #413.9 | |
psrld $2, %xmm5 #413.9 | |
pslld $30, %xmm13 #413.9 | |
movdqa %xmm2, %xmm12 #413.9 | |
movdqa %xmm2, %xmm4 #413.9 | |
por %xmm13, %xmm5 #413.9 | |
psrld $13, %xmm12 #413.9 | |
pslld $19, %xmm4 #413.9 | |
movdqa %xmm2, %xmm10 #413.9 | |
movdqa %xmm2, %xmm13 #413.9 | |
por %xmm4, %xmm12 #413.9 | |
psrld $22, %xmm10 #413.9 | |
pslld $10, %xmm13 #413.9 | |
movdqa 4448(%rsp), %xmm7 #413.9 | |
movdqa %xmm2, %xmm0 #413.9 | |
pxor %xmm12, %xmm5 #413.9 | |
por %xmm13, %xmm10 #413.9 | |
pand %xmm7, %xmm0 #413.9 | |
pxor %xmm10, %xmm5 #413.9 | |
movdqa 4400(%rsp), %xmm10 #413.9 | |
movdqa %xmm2, %xmm15 #413.9 | |
pand %xmm10, %xmm15 #413.9 | |
movdqa %xmm0, %xmm13 #413.9 | |
movdqa 4496(%rsp), %xmm12 #413.9 | |
pxor %xmm15, %xmm13 #413.9 | |
pxor %xmm13, %xmm12 #413.9 | |
paddd %xmm3, %xmm9 #413.9 | |
movdqa 4352(%rsp), %xmm8 #413.9 | |
paddd %xmm12, %xmm5 #413.9 | |
### w9 = add4(SIGMA1_256(w7), w2, SIGMA0_256(w10), w9); | |
movdqa 4464(%rsp), %xmm13 #414.14 | |
paddd %xmm9, %xmm8 #413.9 | |
paddd %xmm5, %xmm9 #413.9 | |
movdqa %xmm13, %xmm5 #414.14 | |
movdqa %xmm13, %xmm4 #414.14 | |
movdqa %xmm13, %xmm12 #414.14 | |
movdqa %xmm13, %xmm15 #414.14 | |
psrld $17, %xmm5 #414.14 | |
pslld $15, %xmm4 #414.14 | |
psrld $19, %xmm12 #414.14 | |
pslld $13, %xmm15 #414.14 | |
por %xmm4, %xmm5 #414.14 | |
por %xmm15, %xmm12 #414.14 | |
psrld $10, %xmm13 #414.14 | |
pxor %xmm12, %xmm5 #414.14 | |
movdqa 4288(%rsp), %xmm15 #414.14 | |
pxor %xmm13, %xmm5 #414.14 | |
paddd %xmm5, %xmm15 #414.14 | |
movdqa 4000(%rsp), %xmm5 #414.14 | |
movdqa %xmm5, %xmm13 #414.14 | |
movdqa %xmm5, %xmm4 #414.14 | |
psrld $7, %xmm13 #414.14 | |
pslld $25, %xmm4 #414.14 | |
por %xmm4, %xmm13 #414.14 | |
movdqa %xmm5, %xmm4 #414.14 | |
movdqa %xmm5, %xmm12 #414.14 | |
psrld $18, %xmm4 #414.14 | |
pslld $14, %xmm12 #414.14 | |
por %xmm12, %xmm4 #414.14 | |
movdqa %xmm5, %xmm12 #414.14 | |
pxor %xmm4, %xmm13 #414.14 | |
psrld $3, %xmm12 #414.14 | |
pxor %xmm12, %xmm13 #414.14 | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 41, w9); | |
movdqa %xmm8, %xmm4 #415.9 | |
paddd %xmm13, %xmm14 #414.14 | |
movdqa %xmm8, %xmm13 #415.9 | |
paddd %xmm14, %xmm15 #414.14 | |
movdqa %xmm8, %xmm14 #415.9 | |
movdqa %xmm8, %xmm12 #415.9 | |
psrld $6, %xmm14 #415.9 | |
pslld $26, %xmm4 #415.9 | |
psrld $11, %xmm13 #415.9 | |
pslld $21, %xmm12 #415.9 | |
por %xmm4, %xmm14 #415.9 | |
por %xmm12, %xmm13 #415.9 | |
movdqa %xmm8, %xmm4 #415.9 | |
pxor %xmm13, %xmm14 #415.9 | |
movdqa %xmm8, %xmm13 #415.9 | |
psrld $25, %xmm4 #415.9 | |
pslld $7, %xmm13 #415.9 | |
por %xmm13, %xmm4 #415.9 | |
movdqa %xmm1, %xmm13 #415.9 | |
movdqa %xmm8, %xmm12 #415.9 | |
pand %xmm8, %xmm13 #415.9 | |
pandn %xmm6, %xmm12 #415.9 | |
pxor %xmm4, %xmm14 #415.9 | |
movdqa .L_2il0floatpacket.6786(%rip), %xmm4 #415.9 | |
pxor %xmm12, %xmm13 #415.9 | |
paddd %xmm14, %xmm11 #415.9 | |
paddd %xmm13, %xmm4 #415.9 | |
paddd %xmm4, %xmm11 #415.9 | |
movdqa %xmm9, %xmm4 #415.9 | |
movdqa %xmm9, %xmm12 #415.9 | |
psrld $2, %xmm4 #415.9 | |
pslld $30, %xmm12 #415.9 | |
movdqa %xmm9, %xmm13 #415.9 | |
por %xmm12, %xmm4 #415.9 | |
movdqa %xmm9, %xmm12 #415.9 | |
psrld $13, %xmm13 #415.9 | |
pslld $19, %xmm12 #415.9 | |
por %xmm12, %xmm13 #415.9 | |
movdqa %xmm9, %xmm12 #415.9 | |
pxor %xmm13, %xmm4 #415.9 | |
movdqa %xmm9, %xmm13 #415.9 | |
movdqa %xmm2, %xmm14 #415.9 | |
psrld $22, %xmm12 #415.9 | |
pslld $10, %xmm13 #415.9 | |
pand %xmm9, %xmm14 #415.9 | |
por %xmm13, %xmm12 #415.9 | |
movdqa %xmm7, %xmm13 #415.9 | |
pxor %xmm12, %xmm4 #415.9 | |
pand %xmm9, %xmm13 #415.9 | |
movdqa %xmm14, %xmm12 #415.9 | |
paddd %xmm15, %xmm11 #415.9 | |
pxor %xmm13, %xmm12 #415.9 | |
### w10 = add4(SIGMA1_256(w8), w3, SIGMA0_256(w11), w10); | |
movdqa %xmm3, %xmm13 #416.15 | |
pxor %xmm0, %xmm12 #415.9 | |
movdqa %xmm3, %xmm0 #416.15 | |
psrld $17, %xmm0 #416.15 | |
pslld $15, %xmm13 #416.15 | |
paddd %xmm12, %xmm4 #415.9 | |
por %xmm13, %xmm0 #416.15 | |
movdqa %xmm3, %xmm12 #416.15 | |
movdqa %xmm3, %xmm13 #416.15 | |
psrld $19, %xmm12 #416.15 | |
pslld $13, %xmm13 #416.15 | |
por %xmm13, %xmm12 #416.15 | |
paddd %xmm11, %xmm10 #415.9 | |
movdqa %xmm3, 4560(%rsp) #412.14 | |
pxor %xmm12, %xmm0 #416.15 | |
movdqa 4016(%rsp), %xmm13 #416.15 | |
psrld $10, %xmm3 #416.15 | |
paddd %xmm4, %xmm11 #415.9 | |
pxor %xmm3, %xmm0 #416.15 | |
movdqa %xmm13, %xmm4 #416.15 | |
movdqa %xmm13, %xmm3 #416.15 | |
movdqa 4304(%rsp), %xmm12 #416.15 | |
psrld $7, %xmm4 #416.15 | |
pslld $25, %xmm3 #416.15 | |
paddd %xmm0, %xmm12 #416.15 | |
por %xmm3, %xmm4 #416.15 | |
movdqa %xmm13, %xmm0 #416.15 | |
movdqa %xmm13, %xmm3 #416.15 | |
psrld $18, %xmm0 #416.15 | |
pslld $14, %xmm3 #416.15 | |
por %xmm3, %xmm0 #416.15 | |
movdqa %xmm13, %xmm3 #416.15 | |
pxor %xmm0, %xmm4 #416.15 | |
psrld $3, %xmm3 #416.15 | |
pxor %xmm3, %xmm4 #416.15 | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 42, w10); | |
movdqa %xmm10, %xmm0 #417.9 | |
paddd %xmm4, %xmm5 #416.15 | |
movdqa %xmm10, %xmm3 #417.9 | |
paddd %xmm5, %xmm12 #416.15 | |
movdqa %xmm10, %xmm5 #417.9 | |
movdqa %xmm10, %xmm4 #417.9 | |
psrld $6, %xmm5 #417.9 | |
pslld $26, %xmm0 #417.9 | |
psrld $11, %xmm3 #417.9 | |
pslld $21, %xmm4 #417.9 | |
por %xmm0, %xmm5 #417.9 | |
por %xmm4, %xmm3 #417.9 | |
movdqa %xmm10, %xmm0 #417.9 | |
pxor %xmm3, %xmm5 #417.9 | |
movdqa %xmm10, %xmm3 #417.9 | |
psrld $25, %xmm0 #417.9 | |
pslld $7, %xmm3 #417.9 | |
por %xmm3, %xmm0 #417.9 | |
movdqa %xmm10, %xmm3 #417.9 | |
pxor %xmm0, %xmm5 #417.9 | |
movdqa %xmm10, %xmm0 #417.9 | |
pand %xmm8, %xmm0 #417.9 | |
pandn %xmm1, %xmm3 #417.9 | |
movdqa .L_2il0floatpacket.6787(%rip), %xmm4 #417.9 | |
pxor %xmm3, %xmm0 #417.9 | |
paddd %xmm0, %xmm4 #417.9 | |
movdqa %xmm11, %xmm3 #417.9 | |
movdqa %xmm11, %xmm0 #417.9 | |
paddd %xmm5, %xmm6 #417.9 | |
psrld $2, %xmm3 #417.9 | |
pslld $30, %xmm0 #417.9 | |
paddd %xmm4, %xmm6 #417.9 | |
por %xmm0, %xmm3 #417.9 | |
movdqa %xmm11, %xmm0 #417.9 | |
movdqa %xmm11, %xmm4 #417.9 | |
psrld $13, %xmm0 #417.9 | |
pslld $19, %xmm4 #417.9 | |
por %xmm4, %xmm0 #417.9 | |
movdqa %xmm11, %xmm4 #417.9 | |
pxor %xmm0, %xmm3 #417.9 | |
movdqa %xmm11, %xmm0 #417.9 | |
movdqa %xmm11, %xmm5 #417.9 | |
psrld $22, %xmm0 #417.9 | |
pslld $10, %xmm4 #417.9 | |
pand %xmm9, %xmm5 #417.9 | |
por %xmm4, %xmm0 #417.9 | |
movdqa %xmm2, %xmm4 #417.9 | |
pxor %xmm0, %xmm3 #417.9 | |
pand %xmm11, %xmm4 #417.9 | |
movdqa %xmm5, %xmm0 #417.9 | |
paddd %xmm12, %xmm6 #417.9 | |
pxor %xmm4, %xmm0 #417.9 | |
paddd %xmm6, %xmm7 #417.9 | |
pxor %xmm14, %xmm0 #417.9 | |
### w11 = add4(SIGMA1_256(w9), w4, SIGMA0_256(w12), w11); | |
movdqa %xmm15, %xmm14 #418.15 | |
paddd %xmm0, %xmm3 #417.9 | |
movdqa %xmm15, %xmm0 #418.15 | |
paddd %xmm3, %xmm6 #417.9 | |
movdqa %xmm15, %xmm3 #418.15 | |
movdqa %xmm15, %xmm4 #418.15 | |
psrld $17, %xmm0 #418.15 | |
pslld $15, %xmm14 #418.15 | |
psrld $19, %xmm3 #418.15 | |
pslld $13, %xmm4 #418.15 | |
por %xmm14, %xmm0 #418.15 | |
por %xmm4, %xmm3 #418.15 | |
movdqa %xmm15, 4576(%rsp) #414.14 | |
pxor %xmm3, %xmm0 #418.15 | |
movdqa 4032(%rsp), %xmm4 #418.15 | |
psrld $10, %xmm15 #418.15 | |
pxor %xmm15, %xmm0 #418.15 | |
movdqa %xmm4, %xmm15 #418.15 | |
movdqa %xmm4, %xmm14 #418.15 | |
psrld $7, %xmm15 #418.15 | |
movdqa 4320(%rsp), %xmm3 #418.15 | |
pslld $25, %xmm14 #418.15 | |
paddd %xmm0, %xmm3 #418.15 | |
por %xmm14, %xmm15 #418.15 | |
movdqa %xmm4, %xmm0 #418.15 | |
movdqa %xmm4, %xmm14 #418.15 | |
psrld $18, %xmm0 #418.15 | |
pslld $14, %xmm14 #418.15 | |
por %xmm14, %xmm0 #418.15 | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 43, w11); | |
movdqa %xmm7, %xmm14 #419.9 | |
pxor %xmm0, %xmm15 #418.15 | |
movdqa %xmm4, %xmm0 #418.15 | |
psrld $3, %xmm0 #418.15 | |
psrld $11, %xmm14 #419.9 | |
pxor %xmm0, %xmm15 #418.15 | |
movdqa %xmm7, %xmm0 #419.9 | |
paddd %xmm15, %xmm13 #418.15 | |
movdqa %xmm7, %xmm15 #419.9 | |
paddd %xmm13, %xmm3 #418.15 | |
movdqa %xmm7, %xmm13 #419.9 | |
psrld $6, %xmm0 #419.9 | |
pslld $26, %xmm15 #419.9 | |
pslld $21, %xmm13 #419.9 | |
por %xmm15, %xmm0 #419.9 | |
por %xmm13, %xmm14 #419.9 | |
movdqa %xmm7, %xmm15 #419.9 | |
movdqa %xmm7, %xmm13 #419.9 | |
psrld $25, %xmm15 #419.9 | |
pslld $7, %xmm13 #419.9 | |
pxor %xmm14, %xmm0 #419.9 | |
por %xmm13, %xmm15 #419.9 | |
movdqa %xmm7, %xmm13 #419.9 | |
pxor %xmm15, %xmm0 #419.9 | |
movdqa %xmm7, %xmm15 #419.9 | |
pand %xmm10, %xmm13 #419.9 | |
pandn %xmm8, %xmm15 #419.9 | |
movdqa .L_2il0floatpacket.6788(%rip), %xmm14 #419.9 | |
pxor %xmm15, %xmm13 #419.9 | |
paddd %xmm13, %xmm14 #419.9 | |
movdqa %xmm6, %xmm13 #419.9 | |
movdqa %xmm6, %xmm15 #419.9 | |
paddd %xmm0, %xmm1 #419.9 | |
psrld $2, %xmm13 #419.9 | |
pslld $30, %xmm15 #419.9 | |
paddd %xmm14, %xmm1 #419.9 | |
por %xmm15, %xmm13 #419.9 | |
movdqa %xmm6, %xmm15 #419.9 | |
movdqa %xmm6, %xmm14 #419.9 | |
psrld $13, %xmm15 #419.9 | |
pslld $19, %xmm14 #419.9 | |
por %xmm14, %xmm15 #419.9 | |
movdqa %xmm6, %xmm14 #419.9 | |
pxor %xmm15, %xmm13 #419.9 | |
movdqa %xmm6, %xmm15 #419.9 | |
movdqa %xmm6, %xmm0 #419.9 | |
psrld $22, %xmm14 #419.9 | |
pslld $10, %xmm15 #419.9 | |
pand %xmm11, %xmm0 #419.9 | |
por %xmm15, %xmm14 #419.9 | |
movdqa %xmm6, %xmm15 #419.9 | |
pxor %xmm14, %xmm13 #419.9 | |
pand %xmm9, %xmm15 #419.9 | |
movdqa %xmm0, %xmm14 #419.9 | |
paddd %xmm3, %xmm1 #419.9 | |
pxor %xmm15, %xmm14 #419.9 | |
paddd %xmm1, %xmm2 #419.9 | |
pxor %xmm5, %xmm14 #419.9 | |
### w12 = add4(SIGMA1_256(w10), w5, SIGMA0_256(w13), w12); | |
movdqa %xmm12, %xmm5 #420.15 | |
paddd %xmm14, %xmm13 #419.9 | |
movdqa %xmm12, %xmm15 #420.15 | |
paddd %xmm13, %xmm1 #419.9 | |
movdqa %xmm12, %xmm13 #420.15 | |
movdqa %xmm12, %xmm14 #420.15 | |
psrld $17, %xmm13 #420.15 | |
pslld $15, %xmm5 #420.15 | |
psrld $19, %xmm15 #420.15 | |
pslld $13, %xmm14 #420.15 | |
por %xmm5, %xmm13 #420.15 | |
por %xmm14, %xmm15 #420.15 | |
movdqa %xmm12, 4592(%rsp) #416.15 | |
pxor %xmm15, %xmm13 #420.15 | |
psrld $10, %xmm12 #420.15 | |
pxor %xmm12, %xmm13 #420.15 | |
movdqa 4048(%rsp), %xmm12 #420.15 | |
movdqa %xmm12, %xmm15 #420.15 | |
movdqa %xmm12, %xmm14 #420.15 | |
movdqa 4368(%rsp), %xmm5 #420.15 | |
psrld $7, %xmm15 #420.15 | |
pslld $25, %xmm14 #420.15 | |
paddd %xmm13, %xmm5 #420.15 | |
por %xmm14, %xmm15 #420.15 | |
movdqa %xmm12, %xmm13 #420.15 | |
movdqa %xmm12, %xmm14 #420.15 | |
psrld $18, %xmm13 #420.15 | |
pslld $14, %xmm14 #420.15 | |
por %xmm14, %xmm13 #420.15 | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 44, w12); | |
movdqa %xmm2, %xmm14 #421.9 | |
pxor %xmm13, %xmm15 #420.15 | |
movdqa %xmm12, %xmm13 #420.15 | |
psrld $3, %xmm13 #420.15 | |
pslld $26, %xmm14 #421.9 | |
pxor %xmm13, %xmm15 #420.15 | |
movdqa %xmm2, %xmm13 #421.9 | |
paddd %xmm15, %xmm4 #420.15 | |
movdqa %xmm2, %xmm15 #421.9 | |
paddd %xmm4, %xmm5 #420.15 | |
movdqa %xmm2, %xmm4 #421.9 | |
psrld $6, %xmm4 #421.9 | |
psrld $11, %xmm13 #421.9 | |
pslld $21, %xmm15 #421.9 | |
por %xmm14, %xmm4 #421.9 | |
por %xmm15, %xmm13 #421.9 | |
movdqa %xmm2, %xmm14 #421.9 | |
pxor %xmm13, %xmm4 #421.9 | |
movdqa %xmm2, %xmm13 #421.9 | |
psrld $25, %xmm14 #421.9 | |
pslld $7, %xmm13 #421.9 | |
por %xmm13, %xmm14 #421.9 | |
movdqa %xmm2, %xmm13 #421.9 | |
movdqa %xmm2, %xmm15 #421.9 | |
pand %xmm7, %xmm13 #421.9 | |
pandn %xmm10, %xmm15 #421.9 | |
pxor %xmm14, %xmm4 #421.9 | |
movdqa .L_2il0floatpacket.6789(%rip), %xmm14 #421.9 | |
pxor %xmm15, %xmm13 #421.9 | |
paddd %xmm13, %xmm14 #421.9 | |
movdqa %xmm1, %xmm15 #421.9 | |
movdqa %xmm1, %xmm13 #421.9 | |
paddd %xmm4, %xmm8 #421.9 | |
psrld $2, %xmm15 #421.9 | |
pslld $30, %xmm13 #421.9 | |
paddd %xmm14, %xmm8 #421.9 | |
por %xmm13, %xmm15 #421.9 | |
movdqa %xmm1, %xmm13 #421.9 | |
movdqa %xmm1, %xmm14 #421.9 | |
psrld $13, %xmm13 #421.9 | |
pslld $19, %xmm14 #421.9 | |
por %xmm14, %xmm13 #421.9 | |
movdqa %xmm1, %xmm14 #421.9 | |
pxor %xmm13, %xmm15 #421.9 | |
movdqa %xmm1, %xmm13 #421.9 | |
movdqa %xmm1, %xmm4 #421.9 | |
psrld $22, %xmm13 #421.9 | |
pslld $10, %xmm14 #421.9 | |
pand %xmm6, %xmm4 #421.9 | |
por %xmm14, %xmm13 #421.9 | |
movdqa %xmm1, %xmm14 #421.9 | |
pxor %xmm13, %xmm15 #421.9 | |
pand %xmm11, %xmm14 #421.9 | |
movdqa %xmm4, %xmm13 #421.9 | |
paddd %xmm5, %xmm8 #421.9 | |
pxor %xmm14, %xmm13 #421.9 | |
paddd %xmm8, %xmm9 #421.9 | |
pxor %xmm0, %xmm13 #421.9 | |
### w13 = add4(SIGMA1_256(w11), w6, SIGMA0_256(w14), w13); | |
movdqa %xmm3, %xmm0 #422.15 | |
paddd %xmm13, %xmm15 #421.9 | |
movdqa %xmm3, %xmm13 #422.15 | |
paddd %xmm15, %xmm8 #421.9 | |
movdqa %xmm3, %xmm15 #422.15 | |
movdqa %xmm3, %xmm14 #422.15 | |
psrld $17, %xmm13 #422.15 | |
pslld $15, %xmm0 #422.15 | |
psrld $19, %xmm15 #422.15 | |
pslld $13, %xmm14 #422.15 | |
por %xmm0, %xmm13 #422.15 | |
por %xmm14, %xmm15 #422.15 | |
movdqa %xmm3, 4608(%rsp) #418.15 | |
pxor %xmm15, %xmm13 #422.15 | |
psrld $10, %xmm3 #422.15 | |
movdqa 4416(%rsp), %xmm0 #422.15 | |
pxor %xmm3, %xmm13 #422.15 | |
paddd %xmm13, %xmm0 #422.15 | |
movdqa 4096(%rsp), %xmm13 #422.15 | |
movdqa %xmm13, %xmm15 #422.15 | |
movdqa %xmm13, %xmm14 #422.15 | |
psrld $7, %xmm15 #422.15 | |
pslld $25, %xmm14 #422.15 | |
por %xmm14, %xmm15 #422.15 | |
movdqa %xmm13, %xmm3 #422.15 | |
movdqa %xmm13, %xmm14 #422.15 | |
psrld $18, %xmm3 #422.15 | |
pslld $14, %xmm14 #422.15 | |
por %xmm14, %xmm3 #422.15 | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 45, w13); | |
movdqa %xmm9, %xmm14 #423.9 | |
pxor %xmm3, %xmm15 #422.15 | |
movdqa %xmm13, %xmm3 #422.15 | |
psrld $3, %xmm3 #422.15 | |
psrld $11, %xmm14 #423.9 | |
pxor %xmm3, %xmm15 #422.15 | |
movdqa %xmm9, %xmm3 #423.9 | |
paddd %xmm15, %xmm12 #422.15 | |
movdqa %xmm9, %xmm15 #423.9 | |
paddd %xmm12, %xmm0 #422.15 | |
movdqa %xmm9, %xmm12 #423.9 | |
psrld $6, %xmm3 #423.9 | |
pslld $26, %xmm15 #423.9 | |
pslld $21, %xmm12 #423.9 | |
por %xmm15, %xmm3 #423.9 | |
por %xmm12, %xmm14 #423.9 | |
movdqa %xmm9, %xmm15 #423.9 | |
movdqa %xmm9, %xmm12 #423.9 | |
psrld $25, %xmm15 #423.9 | |
pslld $7, %xmm12 #423.9 | |
pxor %xmm14, %xmm3 #423.9 | |
por %xmm12, %xmm15 #423.9 | |
movdqa %xmm9, %xmm12 #423.9 | |
pxor %xmm15, %xmm3 #423.9 | |
movdqa %xmm9, %xmm15 #423.9 | |
pand %xmm2, %xmm12 #423.9 | |
pandn %xmm7, %xmm15 #423.9 | |
movdqa .L_2il0floatpacket.6790(%rip), %xmm14 #423.9 | |
pxor %xmm15, %xmm12 #423.9 | |
paddd %xmm12, %xmm14 #423.9 | |
movdqa %xmm8, %xmm15 #423.9 | |
movdqa %xmm8, %xmm12 #423.9 | |
paddd %xmm3, %xmm10 #423.9 | |
psrld $2, %xmm15 #423.9 | |
pslld $30, %xmm12 #423.9 | |
paddd %xmm14, %xmm10 #423.9 | |
por %xmm12, %xmm15 #423.9 | |
movdqa %xmm8, %xmm12 #423.9 | |
movdqa %xmm8, %xmm14 #423.9 | |
psrld $13, %xmm12 #423.9 | |
pslld $19, %xmm14 #423.9 | |
por %xmm14, %xmm12 #423.9 | |
movdqa %xmm8, %xmm14 #423.9 | |
pxor %xmm12, %xmm15 #423.9 | |
movdqa %xmm8, %xmm12 #423.9 | |
movdqa %xmm8, %xmm3 #423.9 | |
psrld $22, %xmm14 #423.9 | |
pslld $10, %xmm12 #423.9 | |
pand %xmm1, %xmm3 #423.9 | |
por %xmm12, %xmm14 #423.9 | |
movdqa %xmm8, %xmm12 #423.9 | |
pxor %xmm14, %xmm15 #423.9 | |
pand %xmm6, %xmm12 #423.9 | |
movdqa %xmm3, %xmm14 #423.9 | |
paddd %xmm0, %xmm10 #423.9 | |
pxor %xmm12, %xmm14 #423.9 | |
### w14 = add4(SIGMA1_256(w12), w7, SIGMA0_256(w15), w14); | |
movdqa %xmm5, %xmm12 #424.15 | |
pxor %xmm4, %xmm14 #423.9 | |
movdqa %xmm5, %xmm4 #424.15 | |
psrld $17, %xmm4 #424.15 | |
pslld $15, %xmm12 #424.15 | |
paddd %xmm14, %xmm15 #423.9 | |
por %xmm12, %xmm4 #424.15 | |
movdqa %xmm5, %xmm14 #424.15 | |
movdqa %xmm5, %xmm12 #424.15 | |
psrld $19, %xmm14 #424.15 | |
pslld $13, %xmm12 #424.15 | |
por %xmm12, %xmm14 #424.15 | |
paddd %xmm10, %xmm11 #423.9 | |
movdqa 4144(%rsp), %xmm12 #424.15 | |
paddd %xmm15, %xmm10 #423.9 | |
movdqa %xmm5, 4656(%rsp) #420.15 | |
pxor %xmm14, %xmm4 #424.15 | |
psrld $10, %xmm5 #424.15 | |
movdqa %xmm12, %xmm15 #424.15 | |
movdqa %xmm12, %xmm14 #424.15 | |
pxor %xmm5, %xmm4 #424.15 | |
movdqa 4464(%rsp), %xmm5 #424.15 | |
psrld $7, %xmm15 #424.15 | |
pslld $25, %xmm14 #424.15 | |
paddd %xmm4, %xmm5 #424.15 | |
por %xmm14, %xmm15 #424.15 | |
movdqa %xmm12, %xmm4 #424.15 | |
movdqa %xmm12, %xmm14 #424.15 | |
psrld $18, %xmm4 #424.15 | |
pslld $14, %xmm14 #424.15 | |
psrld $3, %xmm12 #424.15 | |
por %xmm14, %xmm4 #424.15 | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 46, w14); | |
movdqa %xmm11, %xmm14 #425.9 | |
pxor %xmm4, %xmm15 #424.15 | |
movdqa %xmm11, %xmm4 #425.9 | |
pxor %xmm12, %xmm15 #424.15 | |
movdqa %xmm11, %xmm12 #425.9 | |
paddd %xmm15, %xmm13 #424.15 | |
psrld $6, %xmm4 #425.9 | |
pslld $26, %xmm12 #425.9 | |
movdqa %xmm11, %xmm15 #425.9 | |
paddd %xmm13, %xmm5 #424.15 | |
por %xmm12, %xmm4 #425.9 | |
psrld $11, %xmm14 #425.9 | |
pslld $21, %xmm15 #425.9 | |
movdqa %xmm11, %xmm12 #425.9 | |
movdqa %xmm11, %xmm13 #425.9 | |
por %xmm15, %xmm14 #425.9 | |
psrld $25, %xmm12 #425.9 | |
pslld $7, %xmm13 #425.9 | |
pxor %xmm14, %xmm4 #425.9 | |
por %xmm13, %xmm12 #425.9 | |
pxor %xmm12, %xmm4 #425.9 | |
### w15 = add4(SIGMA1_256(w13), w8, SIGMA0_256(w0), w15); | |
movdqa %xmm0, %xmm12 #426.15 | |
paddd %xmm4, %xmm7 #425.9 | |
movdqa %xmm11, %xmm4 #425.9 | |
movdqa %xmm11, 4720(%rsp) #423.9 | |
pand %xmm9, %xmm4 #425.9 | |
pandn %xmm2, %xmm11 #425.9 | |
psrld $19, %xmm12 #426.15 | |
movdqa %xmm2, 4624(%rsp) #419.9 | |
pxor %xmm11, %xmm4 #425.9 | |
movdqa .L_2il0floatpacket.6791(%rip), %xmm2 #425.9 | |
movdqa %xmm10, %xmm11 #425.9 | |
paddd %xmm4, %xmm2 #425.9 | |
movdqa %xmm10, %xmm4 #425.9 | |
paddd %xmm2, %xmm7 #425.9 | |
movdqa %xmm10, %xmm2 #425.9 | |
paddd %xmm5, %xmm7 #425.9 | |
pand %xmm8, %xmm2 #425.9 | |
paddd %xmm7, %xmm6 #425.9 | |
pslld $30, %xmm4 #425.9 | |
movdqa %xmm9, 4672(%rsp) #421.9 | |
movdqa %xmm10, %xmm9 #425.9 | |
movdqa %xmm5, 4752(%rsp) #424.15 | |
movdqa %xmm10, %xmm5 #425.9 | |
movdqa %xmm6, 4768(%rsp) #425.9 | |
movdqa %xmm10, %xmm6 #425.9 | |
movdqa %xmm8, 4688(%rsp) #421.9 | |
psrld $2, %xmm9 #425.9 | |
psrld $13, %xmm5 #425.9 | |
pslld $19, %xmm6 #425.9 | |
movdqa %xmm10, %xmm8 #425.9 | |
por %xmm4, %xmm9 #425.9 | |
movdqa %xmm10, 4736(%rsp) #423.9 | |
por %xmm6, %xmm5 #425.9 | |
psrld $22, %xmm8 #425.9 | |
pslld $10, %xmm11 #425.9 | |
pand %xmm1, %xmm10 #425.9 | |
pxor %xmm5, %xmm9 #425.9 | |
movdqa %xmm2, 4784(%rsp) #425.9 | |
por %xmm11, %xmm8 #425.9 | |
pxor %xmm10, %xmm2 #425.9 | |
pxor %xmm8, %xmm9 #425.9 | |
movdqa %xmm1, 4640(%rsp) #419.9 | |
pxor %xmm3, %xmm2 #425.9 | |
movdqa %xmm0, %xmm1 #426.15 | |
movdqa %xmm0, %xmm3 #426.15 | |
paddd %xmm2, %xmm9 #425.9 | |
psrld $17, %xmm1 #426.15 | |
pslld $15, %xmm3 #426.15 | |
paddd %xmm9, %xmm7 #425.9 | |
movdqa %xmm0, 4704(%rsp) #422.15 | |
por %xmm3, %xmm1 #426.15 | |
pslld $13, %xmm0 #426.15 | |
movdqa %xmm7, 4800(%rsp) #425.9 | |
movdqa %xmm1, 4816(%rsp) #426.15 | |
movdqa %xmm12, 4832(%rsp) #426.15 | |
movdqa %xmm0, 4848(%rsp) #426.15 | |
# LOE | |
..B2.9: # Preds ..B2.10 | |
movdqa 4192(%rsp), %xmm14 #426.15 | |
movdqa %xmm14, %xmm1 #426.15 | |
movdqa %xmm14, %xmm10 #426.15 | |
movdqa %xmm14, %xmm8 #426.15 | |
movdqa %xmm14, %xmm6 #426.15 | |
psrld $7, %xmm1 #426.15 | |
pslld $25, %xmm10 #426.15 | |
psrld $18, %xmm8 #426.15 | |
pslld $14, %xmm6 #426.15 | |
por %xmm10, %xmm1 #426.15 | |
por %xmm6, %xmm8 #426.15 | |
movdqa %xmm14, %xmm5 #426.15 | |
pxor %xmm8, %xmm1 #426.15 | |
movdqa 4832(%rsp), %xmm2 #426.15 | |
psrld $3, %xmm5 #426.15 | |
por 4848(%rsp), %xmm2 #426.15 | |
pxor %xmm5, %xmm1 #426.15 | |
movdqa 4816(%rsp), %xmm0 #426.15 | |
movdqa 4704(%rsp), %xmm7 #426.15 | |
pxor %xmm2, %xmm0 #426.15 | |
movdqa 4144(%rsp), %xmm9 #426.15 | |
psrld $10, %xmm7 #426.15 | |
paddd %xmm1, %xmm9 #426.15 | |
pxor %xmm7, %xmm0 #426.15 | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 47, w15); | |
movdqa 4768(%rsp), %xmm1 #427.9 | |
movdqa 4560(%rsp), %xmm3 #426.15 | |
movdqa %xmm1, %xmm11 #427.9 | |
movdqa %xmm1, %xmm13 #427.9 | |
movdqa %xmm1, %xmm12 #427.9 | |
movdqa %xmm1, %xmm4 #427.9 | |
paddd %xmm0, %xmm3 #426.15 | |
psrld $6, %xmm11 #427.9 | |
pslld $26, %xmm13 #427.9 | |
psrld $11, %xmm12 #427.9 | |
pslld $21, %xmm4 #427.9 | |
movdqa %xmm1, %xmm15 #427.9 | |
movdqa %xmm1, %xmm0 #427.9 | |
por %xmm13, %xmm11 #427.9 | |
por %xmm4, %xmm12 #427.9 | |
psrld $25, %xmm15 #427.9 | |
pslld $7, %xmm0 #427.9 | |
pxor %xmm12, %xmm11 #427.9 | |
por %xmm0, %xmm15 #427.9 | |
paddd %xmm9, %xmm3 #426.15 | |
pxor %xmm15, %xmm11 #427.9 | |
movdqa 4624(%rsp), %xmm9 #427.9 | |
movdqa %xmm1, %xmm7 #427.9 | |
movdqa 4720(%rsp), %xmm6 #427.9 | |
paddd %xmm11, %xmm9 #427.9 | |
movdqa 4672(%rsp), %xmm11 #427.9 | |
movdqa %xmm1, %xmm2 #427.9 | |
pand %xmm6, %xmm7 #427.9 | |
pandn %xmm11, %xmm2 #427.9 | |
pxor %xmm2, %xmm7 #427.9 | |
movdqa 4800(%rsp), %xmm2 #427.9 | |
movdqa .L_2il0floatpacket.6792(%rip), %xmm10 #427.9 | |
movdqa %xmm2, %xmm5 #427.9 | |
movdqa %xmm2, %xmm13 #427.9 | |
paddd %xmm7, %xmm10 #427.9 | |
psrld $2, %xmm5 #427.9 | |
pslld $30, %xmm13 #427.9 | |
movdqa %xmm2, %xmm12 #427.9 | |
movdqa %xmm2, %xmm4 #427.9 | |
paddd %xmm10, %xmm9 #427.9 | |
por %xmm13, %xmm5 #427.9 | |
psrld $13, %xmm12 #427.9 | |
pslld $19, %xmm4 #427.9 | |
movdqa %xmm2, %xmm10 #427.9 | |
movdqa %xmm2, %xmm13 #427.9 | |
por %xmm4, %xmm12 #427.9 | |
psrld $22, %xmm10 #427.9 | |
pslld $10, %xmm13 #427.9 | |
movdqa %xmm2, %xmm0 #427.9 | |
movdqa 4736(%rsp), %xmm7 #427.9 | |
pxor %xmm12, %xmm5 #427.9 | |
por %xmm13, %xmm10 #427.9 | |
pand %xmm7, %xmm0 #427.9 | |
pxor %xmm10, %xmm5 #427.9 | |
movdqa %xmm2, %xmm15 #427.9 | |
movdqa 4688(%rsp), %xmm10 #427.9 | |
movdqa %xmm0, %xmm13 #427.9 | |
pand %xmm10, %xmm15 #427.9 | |
paddd %xmm3, %xmm9 #427.9 | |
movdqa 4784(%rsp), %xmm12 #427.9 | |
pxor %xmm15, %xmm13 #427.9 | |
pxor %xmm13, %xmm12 #427.9 | |
movdqa 4640(%rsp), %xmm8 #427.9 | |
paddd %xmm12, %xmm5 #427.9 | |
### | |
### w0 = add4(SIGMA1_256(w14), w9, SIGMA0_256(w1), w0); | |
movdqa 4752(%rsp), %xmm13 #429.14 | |
paddd %xmm9, %xmm8 #427.9 | |
paddd %xmm5, %xmm9 #427.9 | |
movdqa %xmm13, %xmm5 #429.14 | |
movdqa %xmm13, %xmm4 #429.14 | |
movdqa %xmm13, %xmm12 #429.14 | |
movdqa %xmm13, %xmm15 #429.14 | |
psrld $17, %xmm5 #429.14 | |
pslld $15, %xmm4 #429.14 | |
psrld $19, %xmm12 #429.14 | |
pslld $13, %xmm15 #429.14 | |
por %xmm4, %xmm5 #429.14 | |
por %xmm15, %xmm12 #429.14 | |
psrld $10, %xmm13 #429.14 | |
pxor %xmm12, %xmm5 #429.14 | |
movdqa 4576(%rsp), %xmm15 #429.14 | |
pxor %xmm13, %xmm5 #429.14 | |
paddd %xmm5, %xmm15 #429.14 | |
movdqa 4272(%rsp), %xmm5 #429.14 | |
movdqa %xmm5, %xmm13 #429.14 | |
movdqa %xmm5, %xmm4 #429.14 | |
psrld $7, %xmm13 #429.14 | |
pslld $25, %xmm4 #429.14 | |
por %xmm4, %xmm13 #429.14 | |
movdqa %xmm5, %xmm4 #429.14 | |
movdqa %xmm5, %xmm12 #429.14 | |
psrld $18, %xmm4 #429.14 | |
pslld $14, %xmm12 #429.14 | |
por %xmm12, %xmm4 #429.14 | |
movdqa %xmm5, %xmm12 #429.14 | |
pxor %xmm4, %xmm13 #429.14 | |
psrld $3, %xmm12 #429.14 | |
pxor %xmm12, %xmm13 #429.14 | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 48, w0); | |
movdqa %xmm8, %xmm4 #430.9 | |
paddd %xmm13, %xmm14 #429.14 | |
movdqa %xmm8, %xmm13 #430.9 | |
paddd %xmm14, %xmm15 #429.14 | |
movdqa %xmm8, %xmm14 #430.9 | |
movdqa %xmm8, %xmm12 #430.9 | |
psrld $6, %xmm14 #430.9 | |
pslld $26, %xmm4 #430.9 | |
psrld $11, %xmm13 #430.9 | |
pslld $21, %xmm12 #430.9 | |
por %xmm4, %xmm14 #430.9 | |
por %xmm12, %xmm13 #430.9 | |
movdqa %xmm8, %xmm4 #430.9 | |
pxor %xmm13, %xmm14 #430.9 | |
movdqa %xmm8, %xmm13 #430.9 | |
psrld $25, %xmm4 #430.9 | |
pslld $7, %xmm13 #430.9 | |
por %xmm13, %xmm4 #430.9 | |
movdqa %xmm1, %xmm13 #430.9 | |
movdqa %xmm8, %xmm12 #430.9 | |
pand %xmm8, %xmm13 #430.9 | |
pandn %xmm6, %xmm12 #430.9 | |
pxor %xmm4, %xmm14 #430.9 | |
movdqa .L_2il0floatpacket.6793(%rip), %xmm4 #430.9 | |
pxor %xmm12, %xmm13 #430.9 | |
paddd %xmm14, %xmm11 #430.9 | |
paddd %xmm13, %xmm4 #430.9 | |
paddd %xmm4, %xmm11 #430.9 | |
movdqa %xmm9, %xmm4 #430.9 | |
movdqa %xmm9, %xmm12 #430.9 | |
psrld $2, %xmm4 #430.9 | |
pslld $30, %xmm12 #430.9 | |
movdqa %xmm9, %xmm13 #430.9 | |
por %xmm12, %xmm4 #430.9 | |
movdqa %xmm9, %xmm12 #430.9 | |
psrld $13, %xmm13 #430.9 | |
pslld $19, %xmm12 #430.9 | |
por %xmm12, %xmm13 #430.9 | |
movdqa %xmm9, %xmm12 #430.9 | |
pxor %xmm13, %xmm4 #430.9 | |
movdqa %xmm9, %xmm13 #430.9 | |
movdqa %xmm2, %xmm14 #430.9 | |
psrld $22, %xmm12 #430.9 | |
pslld $10, %xmm13 #430.9 | |
pand %xmm9, %xmm14 #430.9 | |
por %xmm13, %xmm12 #430.9 | |
movdqa %xmm7, %xmm13 #430.9 | |
pxor %xmm12, %xmm4 #430.9 | |
pand %xmm9, %xmm13 #430.9 | |
movdqa %xmm14, %xmm12 #430.9 | |
paddd %xmm15, %xmm11 #430.9 | |
pxor %xmm13, %xmm12 #430.9 | |
### w1 = add4(SIGMA1_256(w15), w10, SIGMA0_256(w2), w1); | |
movdqa %xmm3, %xmm13 #431.14 | |
pxor %xmm0, %xmm12 #430.9 | |
movdqa %xmm3, %xmm0 #431.14 | |
psrld $17, %xmm0 #431.14 | |
pslld $15, %xmm13 #431.14 | |
paddd %xmm12, %xmm4 #430.9 | |
por %xmm13, %xmm0 #431.14 | |
movdqa %xmm3, %xmm12 #431.14 | |
movdqa %xmm3, %xmm13 #431.14 | |
psrld $19, %xmm12 #431.14 | |
pslld $13, %xmm13 #431.14 | |
por %xmm13, %xmm12 #431.14 | |
paddd %xmm11, %xmm10 #430.9 | |
movdqa %xmm3, 4864(%rsp) #426.15 | |
pxor %xmm12, %xmm0 #431.14 | |
movdqa 4288(%rsp), %xmm13 #431.14 | |
psrld $10, %xmm3 #431.14 | |
paddd %xmm4, %xmm11 #430.9 | |
pxor %xmm3, %xmm0 #431.14 | |
movdqa %xmm13, %xmm4 #431.14 | |
movdqa %xmm13, %xmm3 #431.14 | |
movdqa 4592(%rsp), %xmm12 #431.14 | |
psrld $7, %xmm4 #431.14 | |
pslld $25, %xmm3 #431.14 | |
paddd %xmm0, %xmm12 #431.14 | |
por %xmm3, %xmm4 #431.14 | |
movdqa %xmm13, %xmm0 #431.14 | |
movdqa %xmm13, %xmm3 #431.14 | |
psrld $18, %xmm0 #431.14 | |
pslld $14, %xmm3 #431.14 | |
por %xmm3, %xmm0 #431.14 | |
movdqa %xmm13, %xmm3 #431.14 | |
pxor %xmm0, %xmm4 #431.14 | |
psrld $3, %xmm3 #431.14 | |
pxor %xmm3, %xmm4 #431.14 | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 49, w1); | |
movdqa %xmm10, %xmm0 #432.9 | |
paddd %xmm4, %xmm5 #431.14 | |
movdqa %xmm10, %xmm3 #432.9 | |
paddd %xmm5, %xmm12 #431.14 | |
movdqa %xmm10, %xmm5 #432.9 | |
movdqa %xmm10, %xmm4 #432.9 | |
psrld $6, %xmm5 #432.9 | |
pslld $26, %xmm0 #432.9 | |
psrld $11, %xmm3 #432.9 | |
pslld $21, %xmm4 #432.9 | |
por %xmm0, %xmm5 #432.9 | |
por %xmm4, %xmm3 #432.9 | |
movdqa %xmm10, %xmm0 #432.9 | |
pxor %xmm3, %xmm5 #432.9 | |
movdqa %xmm10, %xmm3 #432.9 | |
psrld $25, %xmm0 #432.9 | |
pslld $7, %xmm3 #432.9 | |
por %xmm3, %xmm0 #432.9 | |
movdqa %xmm10, %xmm3 #432.9 | |
pxor %xmm0, %xmm5 #432.9 | |
movdqa %xmm10, %xmm0 #432.9 | |
pand %xmm8, %xmm0 #432.9 | |
pandn %xmm1, %xmm3 #432.9 | |
movdqa .L_2il0floatpacket.6794(%rip), %xmm4 #432.9 | |
pxor %xmm3, %xmm0 #432.9 | |
paddd %xmm0, %xmm4 #432.9 | |
movdqa %xmm11, %xmm3 #432.9 | |
movdqa %xmm11, %xmm0 #432.9 | |
paddd %xmm5, %xmm6 #432.9 | |
psrld $2, %xmm3 #432.9 | |
pslld $30, %xmm0 #432.9 | |
paddd %xmm4, %xmm6 #432.9 | |
por %xmm0, %xmm3 #432.9 | |
movdqa %xmm11, %xmm0 #432.9 | |
movdqa %xmm11, %xmm4 #432.9 | |
psrld $13, %xmm0 #432.9 | |
pslld $19, %xmm4 #432.9 | |
por %xmm4, %xmm0 #432.9 | |
movdqa %xmm11, %xmm4 #432.9 | |
pxor %xmm0, %xmm3 #432.9 | |
movdqa %xmm11, %xmm0 #432.9 | |
movdqa %xmm11, %xmm5 #432.9 | |
psrld $22, %xmm0 #432.9 | |
pslld $10, %xmm4 #432.9 | |
pand %xmm9, %xmm5 #432.9 | |
por %xmm4, %xmm0 #432.9 | |
movdqa %xmm2, %xmm4 #432.9 | |
pxor %xmm0, %xmm3 #432.9 | |
pand %xmm11, %xmm4 #432.9 | |
movdqa %xmm5, %xmm0 #432.9 | |
paddd %xmm12, %xmm6 #432.9 | |
pxor %xmm4, %xmm0 #432.9 | |
paddd %xmm6, %xmm7 #432.9 | |
pxor %xmm14, %xmm0 #432.9 | |
### w2 = add4(SIGMA1_256(w0), w11, SIGMA0_256(w3), w2); | |
movdqa %xmm15, %xmm14 #433.14 | |
paddd %xmm0, %xmm3 #432.9 | |
movdqa %xmm15, %xmm0 #433.14 | |
paddd %xmm3, %xmm6 #432.9 | |
movdqa %xmm15, %xmm3 #433.14 | |
movdqa %xmm15, %xmm4 #433.14 | |
psrld $17, %xmm0 #433.14 | |
pslld $15, %xmm14 #433.14 | |
psrld $19, %xmm3 #433.14 | |
pslld $13, %xmm4 #433.14 | |
por %xmm14, %xmm0 #433.14 | |
por %xmm4, %xmm3 #433.14 | |
movdqa %xmm15, 4880(%rsp) #429.14 | |
pxor %xmm3, %xmm0 #433.14 | |
movdqa 4304(%rsp), %xmm4 #433.14 | |
psrld $10, %xmm15 #433.14 | |
pxor %xmm15, %xmm0 #433.14 | |
movdqa %xmm4, %xmm15 #433.14 | |
movdqa %xmm4, %xmm14 #433.14 | |
psrld $7, %xmm15 #433.14 | |
movdqa 4608(%rsp), %xmm3 #433.14 | |
pslld $25, %xmm14 #433.14 | |
paddd %xmm0, %xmm3 #433.14 | |
por %xmm14, %xmm15 #433.14 | |
movdqa %xmm4, %xmm0 #433.14 | |
movdqa %xmm4, %xmm14 #433.14 | |
psrld $18, %xmm0 #433.14 | |
pslld $14, %xmm14 #433.14 | |
por %xmm14, %xmm0 #433.14 | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 50, w2); | |
movdqa %xmm7, %xmm14 #434.9 | |
pxor %xmm0, %xmm15 #433.14 | |
movdqa %xmm4, %xmm0 #433.14 | |
psrld $3, %xmm0 #433.14 | |
psrld $11, %xmm14 #434.9 | |
pxor %xmm0, %xmm15 #433.14 | |
movdqa %xmm7, %xmm0 #434.9 | |
paddd %xmm15, %xmm13 #433.14 | |
movdqa %xmm7, %xmm15 #434.9 | |
paddd %xmm13, %xmm3 #433.14 | |
movdqa %xmm7, %xmm13 #434.9 | |
psrld $6, %xmm0 #434.9 | |
pslld $26, %xmm15 #434.9 | |
pslld $21, %xmm13 #434.9 | |
por %xmm15, %xmm0 #434.9 | |
por %xmm13, %xmm14 #434.9 | |
movdqa %xmm7, %xmm15 #434.9 | |
movdqa %xmm7, %xmm13 #434.9 | |
psrld $25, %xmm15 #434.9 | |
pslld $7, %xmm13 #434.9 | |
pxor %xmm14, %xmm0 #434.9 | |
por %xmm13, %xmm15 #434.9 | |
movdqa %xmm7, %xmm13 #434.9 | |
pxor %xmm15, %xmm0 #434.9 | |
movdqa %xmm7, %xmm15 #434.9 | |
pand %xmm10, %xmm13 #434.9 | |
pandn %xmm8, %xmm15 #434.9 | |
movdqa .L_2il0floatpacket.6795(%rip), %xmm14 #434.9 | |
pxor %xmm15, %xmm13 #434.9 | |
paddd %xmm13, %xmm14 #434.9 | |
movdqa %xmm6, %xmm13 #434.9 | |
movdqa %xmm6, %xmm15 #434.9 | |
paddd %xmm0, %xmm1 #434.9 | |
psrld $2, %xmm13 #434.9 | |
pslld $30, %xmm15 #434.9 | |
paddd %xmm14, %xmm1 #434.9 | |
por %xmm15, %xmm13 #434.9 | |
movdqa %xmm6, %xmm15 #434.9 | |
movdqa %xmm6, %xmm14 #434.9 | |
psrld $13, %xmm15 #434.9 | |
pslld $19, %xmm14 #434.9 | |
por %xmm14, %xmm15 #434.9 | |
movdqa %xmm6, %xmm14 #434.9 | |
pxor %xmm15, %xmm13 #434.9 | |
movdqa %xmm6, %xmm15 #434.9 | |
movdqa %xmm6, %xmm0 #434.9 | |
psrld $22, %xmm14 #434.9 | |
pslld $10, %xmm15 #434.9 | |
pand %xmm11, %xmm0 #434.9 | |
por %xmm15, %xmm14 #434.9 | |
movdqa %xmm6, %xmm15 #434.9 | |
pxor %xmm14, %xmm13 #434.9 | |
pand %xmm9, %xmm15 #434.9 | |
movdqa %xmm0, %xmm14 #434.9 | |
paddd %xmm3, %xmm1 #434.9 | |
pxor %xmm15, %xmm14 #434.9 | |
paddd %xmm1, %xmm2 #434.9 | |
pxor %xmm5, %xmm14 #434.9 | |
### w3 = add4(SIGMA1_256(w1), w12, SIGMA0_256(w4), w3); | |
movdqa %xmm12, %xmm5 #435.14 | |
paddd %xmm14, %xmm13 #434.9 | |
movdqa %xmm12, %xmm15 #435.14 | |
paddd %xmm13, %xmm1 #434.9 | |
movdqa %xmm12, %xmm13 #435.14 | |
movdqa %xmm12, %xmm14 #435.14 | |
psrld $17, %xmm13 #435.14 | |
pslld $15, %xmm5 #435.14 | |
psrld $19, %xmm15 #435.14 | |
pslld $13, %xmm14 #435.14 | |
por %xmm5, %xmm13 #435.14 | |
por %xmm14, %xmm15 #435.14 | |
movdqa %xmm12, 4896(%rsp) #431.14 | |
pxor %xmm15, %xmm13 #435.14 | |
psrld $10, %xmm12 #435.14 | |
pxor %xmm12, %xmm13 #435.14 | |
movdqa 4320(%rsp), %xmm12 #435.14 | |
movdqa %xmm12, %xmm15 #435.14 | |
movdqa %xmm12, %xmm14 #435.14 | |
movdqa 4656(%rsp), %xmm5 #435.14 | |
psrld $7, %xmm15 #435.14 | |
pslld $25, %xmm14 #435.14 | |
paddd %xmm13, %xmm5 #435.14 | |
por %xmm14, %xmm15 #435.14 | |
movdqa %xmm12, %xmm13 #435.14 | |
movdqa %xmm12, %xmm14 #435.14 | |
psrld $18, %xmm13 #435.14 | |
pslld $14, %xmm14 #435.14 | |
por %xmm14, %xmm13 #435.14 | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 51, w3); | |
movdqa %xmm2, %xmm14 #436.9 | |
pxor %xmm13, %xmm15 #435.14 | |
movdqa %xmm12, %xmm13 #435.14 | |
psrld $3, %xmm13 #435.14 | |
pslld $26, %xmm14 #436.9 | |
pxor %xmm13, %xmm15 #435.14 | |
movdqa %xmm2, %xmm13 #436.9 | |
paddd %xmm15, %xmm4 #435.14 | |
movdqa %xmm2, %xmm15 #436.9 | |
paddd %xmm4, %xmm5 #435.14 | |
movdqa %xmm2, %xmm4 #436.9 | |
psrld $6, %xmm4 #436.9 | |
psrld $11, %xmm13 #436.9 | |
pslld $21, %xmm15 #436.9 | |
por %xmm14, %xmm4 #436.9 | |
por %xmm15, %xmm13 #436.9 | |
movdqa %xmm2, %xmm14 #436.9 | |
pxor %xmm13, %xmm4 #436.9 | |
movdqa %xmm2, %xmm13 #436.9 | |
psrld $25, %xmm14 #436.9 | |
pslld $7, %xmm13 #436.9 | |
por %xmm13, %xmm14 #436.9 | |
movdqa %xmm2, %xmm13 #436.9 | |
movdqa %xmm2, %xmm15 #436.9 | |
pand %xmm7, %xmm13 #436.9 | |
pandn %xmm10, %xmm15 #436.9 | |
pxor %xmm14, %xmm4 #436.9 | |
movdqa .L_2il0floatpacket.6796(%rip), %xmm14 #436.9 | |
pxor %xmm15, %xmm13 #436.9 | |
paddd %xmm13, %xmm14 #436.9 | |
movdqa %xmm1, %xmm15 #436.9 | |
movdqa %xmm1, %xmm13 #436.9 | |
paddd %xmm4, %xmm8 #436.9 | |
psrld $2, %xmm15 #436.9 | |
pslld $30, %xmm13 #436.9 | |
paddd %xmm14, %xmm8 #436.9 | |
por %xmm13, %xmm15 #436.9 | |
movdqa %xmm1, %xmm13 #436.9 | |
movdqa %xmm1, %xmm14 #436.9 | |
psrld $13, %xmm13 #436.9 | |
pslld $19, %xmm14 #436.9 | |
por %xmm14, %xmm13 #436.9 | |
movdqa %xmm1, %xmm14 #436.9 | |
pxor %xmm13, %xmm15 #436.9 | |
movdqa %xmm1, %xmm13 #436.9 | |
movdqa %xmm1, %xmm4 #436.9 | |
psrld $22, %xmm13 #436.9 | |
pslld $10, %xmm14 #436.9 | |
pand %xmm6, %xmm4 #436.9 | |
por %xmm14, %xmm13 #436.9 | |
movdqa %xmm1, %xmm14 #436.9 | |
pxor %xmm13, %xmm15 #436.9 | |
pand %xmm11, %xmm14 #436.9 | |
movdqa %xmm4, %xmm13 #436.9 | |
paddd %xmm5, %xmm8 #436.9 | |
pxor %xmm14, %xmm13 #436.9 | |
paddd %xmm8, %xmm9 #436.9 | |
pxor %xmm0, %xmm13 #436.9 | |
### w4 = add4(SIGMA1_256(w2), w13, SIGMA0_256(w5), w4); | |
movdqa %xmm3, %xmm0 #437.14 | |
paddd %xmm13, %xmm15 #436.9 | |
movdqa %xmm3, %xmm13 #437.14 | |
paddd %xmm15, %xmm8 #436.9 | |
movdqa %xmm3, %xmm15 #437.14 | |
movdqa %xmm3, %xmm14 #437.14 | |
psrld $17, %xmm13 #437.14 | |
pslld $15, %xmm0 #437.14 | |
psrld $19, %xmm15 #437.14 | |
pslld $13, %xmm14 #437.14 | |
por %xmm0, %xmm13 #437.14 | |
por %xmm14, %xmm15 #437.14 | |
movdqa %xmm3, 4912(%rsp) #433.14 | |
pxor %xmm15, %xmm13 #437.14 | |
psrld $10, %xmm3 #437.14 | |
movdqa 4704(%rsp), %xmm0 #437.14 | |
pxor %xmm3, %xmm13 #437.14 | |
paddd %xmm13, %xmm0 #437.14 | |
movdqa 4368(%rsp), %xmm13 #437.14 | |
movdqa %xmm13, %xmm15 #437.14 | |
movdqa %xmm13, %xmm14 #437.14 | |
psrld $7, %xmm15 #437.14 | |
pslld $25, %xmm14 #437.14 | |
por %xmm14, %xmm15 #437.14 | |
movdqa %xmm13, %xmm3 #437.14 | |
movdqa %xmm13, %xmm14 #437.14 | |
psrld $18, %xmm3 #437.14 | |
pslld $14, %xmm14 #437.14 | |
por %xmm14, %xmm3 #437.14 | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 52, w4); | |
movdqa %xmm9, %xmm14 #438.9 | |
pxor %xmm3, %xmm15 #437.14 | |
movdqa %xmm13, %xmm3 #437.14 | |
psrld $3, %xmm3 #437.14 | |
psrld $11, %xmm14 #438.9 | |
pxor %xmm3, %xmm15 #437.14 | |
movdqa %xmm9, %xmm3 #438.9 | |
paddd %xmm15, %xmm12 #437.14 | |
movdqa %xmm9, %xmm15 #438.9 | |
paddd %xmm12, %xmm0 #437.14 | |
movdqa %xmm9, %xmm12 #438.9 | |
psrld $6, %xmm3 #438.9 | |
pslld $26, %xmm15 #438.9 | |
pslld $21, %xmm12 #438.9 | |
por %xmm15, %xmm3 #438.9 | |
por %xmm12, %xmm14 #438.9 | |
movdqa %xmm9, %xmm15 #438.9 | |
movdqa %xmm9, %xmm12 #438.9 | |
psrld $25, %xmm15 #438.9 | |
pslld $7, %xmm12 #438.9 | |
pxor %xmm14, %xmm3 #438.9 | |
por %xmm12, %xmm15 #438.9 | |
movdqa %xmm9, %xmm12 #438.9 | |
pxor %xmm15, %xmm3 #438.9 | |
movdqa %xmm9, %xmm15 #438.9 | |
pand %xmm2, %xmm12 #438.9 | |
pandn %xmm7, %xmm15 #438.9 | |
movdqa .L_2il0floatpacket.6797(%rip), %xmm14 #438.9 | |
pxor %xmm15, %xmm12 #438.9 | |
paddd %xmm12, %xmm14 #438.9 | |
movdqa %xmm8, %xmm15 #438.9 | |
movdqa %xmm8, %xmm12 #438.9 | |
paddd %xmm3, %xmm10 #438.9 | |
psrld $2, %xmm15 #438.9 | |
pslld $30, %xmm12 #438.9 | |
paddd %xmm14, %xmm10 #438.9 | |
por %xmm12, %xmm15 #438.9 | |
movdqa %xmm8, %xmm12 #438.9 | |
movdqa %xmm8, %xmm14 #438.9 | |
psrld $13, %xmm12 #438.9 | |
pslld $19, %xmm14 #438.9 | |
por %xmm14, %xmm12 #438.9 | |
movdqa %xmm8, %xmm14 #438.9 | |
pxor %xmm12, %xmm15 #438.9 | |
movdqa %xmm8, %xmm12 #438.9 | |
movdqa %xmm8, %xmm3 #438.9 | |
psrld $22, %xmm14 #438.9 | |
pslld $10, %xmm12 #438.9 | |
pand %xmm1, %xmm3 #438.9 | |
por %xmm12, %xmm14 #438.9 | |
movdqa %xmm8, %xmm12 #438.9 | |
pxor %xmm14, %xmm15 #438.9 | |
pand %xmm6, %xmm12 #438.9 | |
movdqa %xmm3, %xmm14 #438.9 | |
paddd %xmm0, %xmm10 #438.9 | |
pxor %xmm12, %xmm14 #438.9 | |
### w5 = add4(SIGMA1_256(w3), w14, SIGMA0_256(w6), w5); | |
movdqa %xmm5, %xmm12 #439.14 | |
pxor %xmm4, %xmm14 #438.9 | |
movdqa %xmm5, %xmm4 #439.14 | |
psrld $17, %xmm4 #439.14 | |
pslld $15, %xmm12 #439.14 | |
paddd %xmm14, %xmm15 #438.9 | |
por %xmm12, %xmm4 #439.14 | |
movdqa %xmm5, %xmm14 #439.14 | |
movdqa %xmm5, %xmm12 #439.14 | |
psrld $19, %xmm14 #439.14 | |
pslld $13, %xmm12 #439.14 | |
por %xmm12, %xmm14 #439.14 | |
paddd %xmm10, %xmm11 #438.9 | |
movdqa 4416(%rsp), %xmm12 #439.14 | |
paddd %xmm15, %xmm10 #438.9 | |
movdqa %xmm5, 4960(%rsp) #435.14 | |
pxor %xmm14, %xmm4 #439.14 | |
psrld $10, %xmm5 #439.14 | |
movdqa %xmm12, %xmm15 #439.14 | |
movdqa %xmm12, %xmm14 #439.14 | |
pxor %xmm5, %xmm4 #439.14 | |
movdqa 4752(%rsp), %xmm5 #439.14 | |
psrld $7, %xmm15 #439.14 | |
pslld $25, %xmm14 #439.14 | |
paddd %xmm4, %xmm5 #439.14 | |
por %xmm14, %xmm15 #439.14 | |
movdqa %xmm12, %xmm4 #439.14 | |
movdqa %xmm12, %xmm14 #439.14 | |
psrld $18, %xmm4 #439.14 | |
pslld $14, %xmm14 #439.14 | |
psrld $3, %xmm12 #439.14 | |
por %xmm14, %xmm4 #439.14 | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 53, w5); | |
movdqa %xmm11, %xmm14 #440.9 | |
pxor %xmm4, %xmm15 #439.14 | |
movdqa %xmm11, %xmm4 #440.9 | |
pxor %xmm12, %xmm15 #439.14 | |
movdqa %xmm11, %xmm12 #440.9 | |
paddd %xmm15, %xmm13 #439.14 | |
psrld $6, %xmm4 #440.9 | |
pslld $26, %xmm12 #440.9 | |
movdqa %xmm11, %xmm15 #440.9 | |
paddd %xmm13, %xmm5 #439.14 | |
por %xmm12, %xmm4 #440.9 | |
psrld $11, %xmm14 #440.9 | |
pslld $21, %xmm15 #440.9 | |
movdqa %xmm11, %xmm12 #440.9 | |
movdqa %xmm11, %xmm13 #440.9 | |
por %xmm15, %xmm14 #440.9 | |
psrld $25, %xmm12 #440.9 | |
pslld $7, %xmm13 #440.9 | |
pxor %xmm14, %xmm4 #440.9 | |
por %xmm13, %xmm12 #440.9 | |
pxor %xmm12, %xmm4 #440.9 | |
### w6 = add4(SIGMA1_256(w4), w15, SIGMA0_256(w7), w6); | |
movdqa %xmm0, %xmm12 #441.14 | |
paddd %xmm4, %xmm7 #440.9 | |
movdqa %xmm11, %xmm4 #440.9 | |
movdqa %xmm11, 5024(%rsp) #438.9 | |
pand %xmm9, %xmm4 #440.9 | |
pandn %xmm2, %xmm11 #440.9 | |
psrld $19, %xmm12 #441.14 | |
movdqa %xmm2, 4928(%rsp) #434.9 | |
pxor %xmm11, %xmm4 #440.9 | |
movdqa .L_2il0floatpacket.6798(%rip), %xmm2 #440.9 | |
movdqa %xmm10, %xmm11 #440.9 | |
paddd %xmm4, %xmm2 #440.9 | |
movdqa %xmm10, %xmm4 #440.9 | |
paddd %xmm2, %xmm7 #440.9 | |
movdqa %xmm10, %xmm2 #440.9 | |
paddd %xmm5, %xmm7 #440.9 | |
pand %xmm8, %xmm2 #440.9 | |
paddd %xmm7, %xmm6 #440.9 | |
pslld $30, %xmm4 #440.9 | |
movdqa %xmm9, 4976(%rsp) #436.9 | |
movdqa %xmm10, %xmm9 #440.9 | |
movdqa %xmm5, 5056(%rsp) #439.14 | |
movdqa %xmm10, %xmm5 #440.9 | |
movdqa %xmm6, 5072(%rsp) #440.9 | |
movdqa %xmm10, %xmm6 #440.9 | |
movdqa %xmm8, 4992(%rsp) #436.9 | |
psrld $2, %xmm9 #440.9 | |
psrld $13, %xmm5 #440.9 | |
pslld $19, %xmm6 #440.9 | |
movdqa %xmm10, %xmm8 #440.9 | |
por %xmm4, %xmm9 #440.9 | |
movdqa %xmm10, 5040(%rsp) #438.9 | |
por %xmm6, %xmm5 #440.9 | |
psrld $22, %xmm8 #440.9 | |
pslld $10, %xmm11 #440.9 | |
pand %xmm1, %xmm10 #440.9 | |
pxor %xmm5, %xmm9 #440.9 | |
movdqa %xmm2, 5088(%rsp) #440.9 | |
por %xmm11, %xmm8 #440.9 | |
pxor %xmm10, %xmm2 #440.9 | |
pxor %xmm8, %xmm9 #440.9 | |
movdqa %xmm1, 4944(%rsp) #434.9 | |
pxor %xmm3, %xmm2 #440.9 | |
movdqa %xmm0, %xmm1 #441.14 | |
movdqa %xmm0, %xmm3 #441.14 | |
movdqa %xmm0, 5008(%rsp) #437.14 | |
paddd %xmm2, %xmm9 #440.9 | |
psrld $17, %xmm1 #441.14 | |
pslld $15, %xmm3 #441.14 | |
pslld $13, %xmm0 #441.14 | |
paddd %xmm9, %xmm7 #440.9 | |
por %xmm3, %xmm1 #441.14 | |
por %xmm0, %xmm12 #441.14 | |
movdqa %xmm7, 5104(%rsp) #440.9 | |
movdqa %xmm1, 5120(%rsp) #441.14 | |
movdqa %xmm12, 5136(%rsp) #441.14 | |
# LOE | |
..B2.8: # Preds ..B2.9 | |
movdqa 4464(%rsp), %xmm9 #441.14 | |
movdqa %xmm9, %xmm1 #441.14 | |
movdqa %xmm9, %xmm11 #441.14 | |
psrld $7, %xmm1 #441.14 | |
pslld $25, %xmm11 #441.14 | |
movdqa %xmm9, %xmm4 #441.14 | |
movdqa %xmm9, %xmm5 #441.14 | |
por %xmm11, %xmm1 #441.14 | |
psrld $18, %xmm4 #441.14 | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 54, w6); | |
movdqa 5072(%rsp), %xmm11 #442.9 | |
pslld $14, %xmm5 #441.14 | |
movdqa 5120(%rsp), %xmm12 #441.14 | |
por %xmm5, %xmm4 #441.14 | |
movdqa 5008(%rsp), %xmm2 #441.14 | |
movdqa %xmm9, %xmm7 #441.14 | |
movdqa %xmm11, %xmm13 #442.9 | |
movdqa %xmm11, %xmm15 #442.9 | |
movdqa %xmm11, %xmm6 #442.9 | |
movdqa %xmm11, %xmm14 #442.9 | |
pxor 5136(%rsp), %xmm12 #441.14 | |
psrld $10, %xmm2 #441.14 | |
pxor %xmm4, %xmm1 #441.14 | |
psrld $3, %xmm7 #441.14 | |
psrld $6, %xmm13 #442.9 | |
pslld $26, %xmm15 #442.9 | |
psrld $11, %xmm6 #442.9 | |
pslld $21, %xmm14 #442.9 | |
movdqa %xmm11, %xmm0 #442.9 | |
movdqa %xmm11, %xmm10 #442.9 | |
movdqa 4864(%rsp), %xmm8 #441.14 | |
pxor %xmm2, %xmm12 #441.14 | |
movdqa 4416(%rsp), %xmm3 #441.14 | |
pxor %xmm7, %xmm1 #441.14 | |
por %xmm15, %xmm13 #442.9 | |
por %xmm14, %xmm6 #442.9 | |
psrld $25, %xmm0 #442.9 | |
pslld $7, %xmm10 #442.9 | |
paddd %xmm12, %xmm8 #441.14 | |
paddd %xmm1, %xmm3 #441.14 | |
pxor %xmm6, %xmm13 #442.9 | |
por %xmm10, %xmm0 #442.9 | |
paddd %xmm3, %xmm8 #441.14 | |
pxor %xmm0, %xmm13 #442.9 | |
movdqa 5024(%rsp), %xmm0 #442.9 | |
movdqa %xmm11, %xmm2 #442.9 | |
movdqa 4976(%rsp), %xmm3 #442.9 | |
movdqa %xmm11, %xmm1 #442.9 | |
pand %xmm0, %xmm2 #442.9 | |
pandn %xmm3, %xmm1 #442.9 | |
movdqa 4928(%rsp), %xmm7 #442.9 | |
pxor %xmm1, %xmm2 #442.9 | |
movdqa .L_2il0floatpacket.6799(%rip), %xmm4 #442.9 | |
paddd %xmm13, %xmm7 #442.9 | |
movdqa 5104(%rsp), %xmm12 #442.9 | |
paddd %xmm2, %xmm4 #442.9 | |
paddd %xmm4, %xmm7 #442.9 | |
movdqa %xmm12, %xmm15 #442.9 | |
movdqa %xmm12, %xmm6 #442.9 | |
movdqa %xmm12, %xmm1 #442.9 | |
movdqa %xmm12, %xmm4 #442.9 | |
psrld $2, %xmm15 #442.9 | |
pslld $30, %xmm6 #442.9 | |
psrld $13, %xmm1 #442.9 | |
pslld $19, %xmm4 #442.9 | |
movdqa %xmm12, %xmm14 #442.9 | |
movdqa 5040(%rsp), %xmm2 #442.9 | |
por %xmm6, %xmm15 #442.9 | |
por %xmm4, %xmm1 #442.9 | |
pand %xmm2, %xmm14 #442.9 | |
movdqa 4992(%rsp), %xmm4 #442.9 | |
pxor %xmm1, %xmm15 #442.9 | |
movdqa %xmm12, %xmm13 #442.9 | |
movdqa %xmm12, %xmm10 #442.9 | |
movdqa %xmm12, %xmm1 #442.9 | |
psrld $22, %xmm13 #442.9 | |
pslld $10, %xmm10 #442.9 | |
pand %xmm4, %xmm1 #442.9 | |
movdqa %xmm14, %xmm6 #442.9 | |
por %xmm10, %xmm13 #442.9 | |
pxor %xmm1, %xmm6 #442.9 | |
pxor %xmm13, %xmm15 #442.9 | |
movdqa 5088(%rsp), %xmm1 #442.9 | |
paddd %xmm8, %xmm7 #442.9 | |
pxor %xmm6, %xmm1 #442.9 | |
movdqa 4944(%rsp), %xmm5 #442.9 | |
paddd %xmm1, %xmm15 #442.9 | |
### w7 = add4(SIGMA1_256(w5), w0, SIGMA0_256(w8), w7); | |
movdqa 5056(%rsp), %xmm13 #443.14 | |
paddd %xmm7, %xmm5 #442.9 | |
paddd %xmm15, %xmm7 #442.9 | |
movdqa %xmm13, %xmm1 #443.14 | |
movdqa %xmm13, %xmm10 #443.14 | |
movdqa %xmm13, %xmm15 #443.14 | |
movdqa %xmm13, %xmm6 #443.14 | |
psrld $17, %xmm1 #443.14 | |
pslld $15, %xmm10 #443.14 | |
psrld $19, %xmm15 #443.14 | |
pslld $13, %xmm6 #443.14 | |
por %xmm10, %xmm1 #443.14 | |
por %xmm6, %xmm15 #443.14 | |
psrld $10, %xmm13 #443.14 | |
pxor %xmm15, %xmm1 #443.14 | |
movdqa 4560(%rsp), %xmm15 #443.14 | |
pxor %xmm13, %xmm1 #443.14 | |
movdqa %xmm15, %xmm13 #443.14 | |
movdqa %xmm15, %xmm6 #443.14 | |
movdqa 4880(%rsp), %xmm10 #443.14 | |
psrld $7, %xmm13 #443.14 | |
pslld $25, %xmm6 #443.14 | |
paddd %xmm1, %xmm10 #443.14 | |
por %xmm6, %xmm13 #443.14 | |
movdqa %xmm15, %xmm1 #443.14 | |
movdqa %xmm15, %xmm6 #443.14 | |
psrld $18, %xmm1 #443.14 | |
pslld $14, %xmm6 #443.14 | |
por %xmm6, %xmm1 #443.14 | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 55, w7); | |
movdqa %xmm5, %xmm6 #444.9 | |
pxor %xmm1, %xmm13 #443.14 | |
movdqa %xmm15, %xmm1 #443.14 | |
psrld $3, %xmm1 #443.14 | |
pslld $26, %xmm6 #444.9 | |
pxor %xmm1, %xmm13 #443.14 | |
movdqa %xmm5, %xmm1 #444.9 | |
paddd %xmm13, %xmm9 #443.14 | |
movdqa %xmm5, %xmm13 #444.9 | |
paddd %xmm9, %xmm10 #443.14 | |
movdqa %xmm5, %xmm9 #444.9 | |
psrld $6, %xmm9 #444.9 | |
psrld $11, %xmm1 #444.9 | |
pslld $21, %xmm13 #444.9 | |
por %xmm6, %xmm9 #444.9 | |
por %xmm13, %xmm1 #444.9 | |
movdqa %xmm5, %xmm6 #444.9 | |
pxor %xmm1, %xmm9 #444.9 | |
movdqa %xmm5, %xmm1 #444.9 | |
psrld $25, %xmm6 #444.9 | |
pslld $7, %xmm1 #444.9 | |
por %xmm1, %xmm6 #444.9 | |
movdqa %xmm11, %xmm1 #444.9 | |
movdqa %xmm5, %xmm13 #444.9 | |
pand %xmm5, %xmm1 #444.9 | |
pandn %xmm0, %xmm13 #444.9 | |
pxor %xmm6, %xmm9 #444.9 | |
movdqa .L_2il0floatpacket.6800(%rip), %xmm6 #444.9 | |
pxor %xmm13, %xmm1 #444.9 | |
paddd %xmm1, %xmm6 #444.9 | |
movdqa %xmm7, %xmm13 #444.9 | |
movdqa %xmm7, %xmm1 #444.9 | |
paddd %xmm9, %xmm3 #444.9 | |
psrld $2, %xmm13 #444.9 | |
pslld $30, %xmm1 #444.9 | |
paddd %xmm6, %xmm3 #444.9 | |
por %xmm1, %xmm13 #444.9 | |
movdqa %xmm7, %xmm6 #444.9 | |
movdqa %xmm7, %xmm1 #444.9 | |
psrld $13, %xmm6 #444.9 | |
pslld $19, %xmm1 #444.9 | |
por %xmm1, %xmm6 #444.9 | |
movdqa %xmm7, %xmm1 #444.9 | |
pxor %xmm6, %xmm13 #444.9 | |
movdqa %xmm7, %xmm6 #444.9 | |
movdqa %xmm12, %xmm9 #444.9 | |
psrld $22, %xmm1 #444.9 | |
pslld $10, %xmm6 #444.9 | |
pand %xmm7, %xmm9 #444.9 | |
por %xmm6, %xmm1 #444.9 | |
movdqa %xmm2, %xmm6 #444.9 | |
pxor %xmm1, %xmm13 #444.9 | |
pand %xmm7, %xmm6 #444.9 | |
movdqa %xmm9, %xmm1 #444.9 | |
paddd %xmm10, %xmm3 #444.9 | |
pxor %xmm6, %xmm1 #444.9 | |
paddd %xmm3, %xmm4 #444.9 | |
pxor %xmm14, %xmm1 #444.9 | |
### w8 = add4(SIGMA1_256(w6), w1, SIGMA0_256(w9), w8); | |
movdqa %xmm8, %xmm14 #445.14 | |
paddd %xmm1, %xmm13 #444.9 | |
movdqa %xmm8, %xmm1 #445.14 | |
paddd %xmm13, %xmm3 #444.9 | |
movdqa %xmm8, %xmm13 #445.14 | |
movdqa %xmm8, %xmm6 #445.14 | |
psrld $17, %xmm1 #445.14 | |
pslld $15, %xmm14 #445.14 | |
psrld $19, %xmm13 #445.14 | |
pslld $13, %xmm6 #445.14 | |
por %xmm14, %xmm1 #445.14 | |
por %xmm6, %xmm13 #445.14 | |
psrld $10, %xmm8 #445.14 | |
pxor %xmm13, %xmm1 #445.14 | |
pxor %xmm8, %xmm1 #445.14 | |
movdqa 4576(%rsp), %xmm8 #445.14 | |
movdqa %xmm8, %xmm14 #445.14 | |
movdqa %xmm8, %xmm13 #445.14 | |
movdqa 4896(%rsp), %xmm6 #445.14 | |
psrld $7, %xmm14 #445.14 | |
pslld $25, %xmm13 #445.14 | |
paddd %xmm1, %xmm6 #445.14 | |
por %xmm13, %xmm14 #445.14 | |
movdqa %xmm8, %xmm1 #445.14 | |
movdqa %xmm8, %xmm13 #445.14 | |
psrld $18, %xmm1 #445.14 | |
pslld $14, %xmm13 #445.14 | |
por %xmm13, %xmm1 #445.14 | |
### SHA256ROUND(a, b, c, d, e, f, g, h, 56, w8); | |
movdqa %xmm4, %xmm13 #446.9 | |
pxor %xmm1, %xmm14 #445.14 | |
movdqa %xmm8, %xmm1 #445.14 | |
psrld $3, %xmm1 #445.14 | |
pslld $26, %xmm13 #446.9 | |
pxor %xmm1, %xmm14 #445.14 | |
movdqa %xmm4, %xmm1 #446.9 | |
psrld $6, %xmm1 #446.9 | |
paddd %xmm14, %xmm15 #445.14 | |
por %xmm13, %xmm1 #446.9 | |
movdqa %xmm4, %xmm14 #446.9 | |
movdqa %xmm4, %xmm13 #446.9 | |
psrld $11, %xmm14 #446.9 | |
pslld $21, %xmm13 #446.9 | |
paddd %xmm15, %xmm6 #445.14 | |
por %xmm13, %xmm14 #446.9 | |
movdqa %xmm4, %xmm15 #446.9 | |
movdqa %xmm4, %xmm13 #446.9 | |
psrld $25, %xmm15 #446.9 | |
pslld $7, %xmm13 #446.9 | |
pxor %xmm14, %xmm1 #446.9 | |
por %xmm13, %xmm15 #446.9 | |
movdqa %xmm4, %xmm13 #446.9 | |
pxor %xmm15, %xmm1 #446.9 | |
movdqa %xmm4, %xmm15 #446.9 | |
pand %xmm5, %xmm13 #446.9 | |
pandn %xmm11, %xmm15 #446.9 | |
movdqa .L_2il0floatpacket.6801(%rip), %xmm14 #446.9 | |
pxor %xmm15, %xmm13 #446.9 | |
paddd %xmm1, %xmm0 #446.9 | |
paddd %xmm13, %xmm14 #446.9 | |
paddd %xmm14, %xmm0 #446.9 | |
movdqa %xmm3, %xmm14 #446.9 | |
movdqa %xmm3, %xmm15 #446.9 | |
psrld $2, %xmm14 #446.9 | |
pslld $30, %xmm15 #446.9 | |
movdqa %xmm3, %xmm13 #446.9 | |
por %xmm15, %xmm14 #446.9 | |
movdqa %xmm3, %xmm15 #446.9 | |
psrld $13, %xmm15 #446.9 | |
pslld $19, %xmm13 #446.9 | |
por %xmm13, %xmm15 #446.9 | |
movdqa %xmm3, %xmm13 #446.9 | |
pxor %xmm15, %xmm14 #446.9 | |
movdqa %xmm3, %xmm15 #446.9 | |
movdqa %xmm3, %xmm1 #446.9 | |
psrld $22, %xmm13 #446.9 | |
pslld $10, %xmm15 #446.9 | |
pand %xmm7, %xmm1 #446.9 | |
por %xmm15, %xmm13 #446.9 | |
movdqa %xmm12, %xmm15 #446.9 | |
pxor %xmm13, %xmm14 #446.9 | |
pand %xmm3, %xmm15 #446.9 | |
movdqa %xmm1, %xmm13 #446.9 | |
paddd %xmm6, %xmm0 #446.9 | |
pxor %xmm15, %xmm13 #446.9 | |
paddd %xmm0, %xmm2 #446.9 | |
pxor %xmm9, %xmm13 #446.9 | |
### w9 = add4(SIGMA1_256(w7), w2, SIGMA0_256(w10), w9); | |
movdqa %xmm10, %xmm9 #447.14 | |
paddd %xmm13, %xmm14 #446.9 | |
movdqa %xmm10, %xmm13 #447.14 | |
paddd %xmm14, %xmm0 #446.9 | |
movdqa %xmm10, %xmm15 #447.14 | |
movdqa %xmm10, %xmm14 #447.14 | |
psrld $17, %xmm13 #447.14 | |
pslld $15, %xmm9 #447.14 | |
psrld $19, %xmm15 #447.14 | |
pslld $13, %xmm14 #447.14 | |
por %xmm9, %xmm13 #447.14 | |
por %xmm14, %xmm15 #447.14 | |
psrld $10, %xmm10 #447.14 | |
pxor %xmm15, %xmm13 #447.14 | |
movdqa 4912(%rsp), %xmm15 #447.14 | |
pxor %xmm10, %xmm13 #447.14 | |
paddd %xmm13, %xmm15 #447.14 | |
movdqa 4592(%rsp), %xmm13 #447.14 | |
movdqa %xmm13, %xmm14 #447.14 | |
movdqa %xmm13, %xmm9 #447.14 | |
psrld $7, %xmm14 #447.14 | |
pslld $25, %xmm9 #447.14 | |
por %xmm9, %xmm14 #447.14 | |
movdqa %xmm13, %xmm9 #447.14 | |
movdqa %xmm13, %xmm10 #447.14 | |
psrld $18, %xmm9 #447.14 | |
pslld $14, %xmm10 #447.14 | |
por %xmm10, %xmm9 #447.14 | |
movdqa %xmm13, %xmm10 #447.14 | |
pxor %xmm9, %xmm14 #447.14 | |
psrld $3, %xmm10 #447.14 | |
pxor %xmm10, %xmm14 #447.14 | |
### SHA256ROUND(h, a, b, c, d, e, f, g, 57, w9); | |
movdqa %xmm2, %xmm9 #448.9 | |
paddd %xmm14, %xmm8 #447.14 | |
movdqa %xmm2, %xmm14 #448.9 | |
paddd %xmm8, %xmm15 #447.14 | |
movdqa %xmm2, %xmm10 #448.9 | |
movdqa %xmm2, %xmm8 #448.9 | |
psrld $6, %xmm14 #448.9 | |
pslld $26, %xmm9 #448.9 | |
psrld $11, %xmm10 #448.9 | |
pslld $21, %xmm8 #448.9 | |
por %xmm9, %xmm14 #448.9 | |
por %xmm8, %xmm10 #448.9 | |
movdqa %xmm2, %xmm9 #448.9 | |
movdqa %xmm2, %xmm8 #448.9 | |
psrld $25, %xmm9 #448.9 | |
pslld $7, %xmm8 #448.9 | |
pxor %xmm10, %xmm14 #448.9 | |
por %xmm8, %xmm9 #448.9 | |
movdqa %xmm2, %xmm8 #448.9 | |
pxor %xmm9, %xmm14 #448.9 | |
movdqa %xmm2, %xmm9 #448.9 | |
pand %xmm4, %xmm8 #448.9 | |
pandn %xmm5, %xmm9 #448.9 | |
movdqa .L_2il0floatpacket.6802(%rip), %xmm10 #448.9 | |
pxor %xmm9, %xmm8 #448.9 | |
paddd %xmm8, %xmm10 #448.9 | |
movdqa %xmm0, %xmm8 #448.9 | |
movdqa %xmm0, %xmm9 #448.9 | |
paddd %xmm14, %xmm11 #448.9 | |
psrld $2, %xmm8 #448.9 | |
pslld $30, %xmm9 #448.9 | |
paddd %xmm10, %xmm11 #448.9 | |
por %xmm9, %xmm8 #448.9 | |
movdqa %xmm0, %xmm10 #448.9 | |
movdqa %xmm0, %xmm9 #448.9 | |
psrld $13, %xmm10 #448.9 | |
pslld $19, %xmm9 #448.9 | |
por %xmm9, %xmm10 #448.9 | |
movdqa %xmm0, %xmm9 #448.9 | |
pxor %xmm10, %xmm8 #448.9 | |
movdqa %xmm0, %xmm10 #448.9 | |
movdqa %xmm0, %xmm14 #448.9 | |
psrld $22, %xmm9 #448.9 | |
pslld $10, %xmm10 #448.9 | |
pand %xmm3, %xmm14 #448.9 | |
por %xmm10, %xmm9 #448.9 | |
movdqa %xmm0, %xmm10 #448.9 | |
pxor %xmm9, %xmm8 #448.9 | |
pand %xmm7, %xmm10 #448.9 | |
movdqa %xmm14, %xmm9 #448.9 | |
paddd %xmm15, %xmm11 #448.9 | |
pxor %xmm10, %xmm9 #448.9 | |
paddd %xmm11, %xmm12 #448.9 | |
pxor %xmm1, %xmm9 #448.9 | |
### w10 = add4(SIGMA1_256(w8), w3, SIGMA0_256(w11), w10); | |
movdqa %xmm6, %xmm1 #449.15 | |
paddd %xmm9, %xmm8 #448.9 | |
movdqa %xmm6, %xmm9 #449.15 | |
paddd %xmm8, %xmm11 #448.9 | |
movdqa %xmm6, %xmm10 #449.15 | |
movdqa %xmm6, %xmm8 #449.15 | |
psrld $17, %xmm1 #449.15 | |
pslld $15, %xmm9 #449.15 | |
psrld $19, %xmm10 #449.15 | |
pslld $13, %xmm8 #449.15 | |
por %xmm9, %xmm1 #449.15 | |
por %xmm8, %xmm10 #449.15 | |
psrld $10, %xmm6 #449.15 | |
pxor %xmm10, %xmm1 #449.15 | |
pxor %xmm6, %xmm1 #449.15 | |
movdqa 4960(%rsp), %xmm6 #449.15 | |
paddd %xmm1, %xmm6 #449.15 | |
movdqa 4608(%rsp), %xmm1 #449.15 | |
movdqa %xmm1, %xmm10 #449.15 | |
movdqa %xmm1, %xmm9 #449.15 | |
psrld $7, %xmm10 #449.15 | |
pslld $25, %xmm9 #449.15 | |
por %xmm9, %xmm10 #449.15 | |
movdqa %xmm1, %xmm8 #449.15 | |
movdqa %xmm1, %xmm9 #449.15 | |
psrld $18, %xmm8 #449.15 | |
pslld $14, %xmm9 #449.15 | |
por %xmm9, %xmm8 #449.15 | |
### SHA256ROUND(g, h, a, b, c, d, e, f, 58, w10); | |
movdqa %xmm12, %xmm9 #450.9 | |
pxor %xmm8, %xmm10 #449.15 | |
movdqa %xmm1, %xmm8 #449.15 | |
psrld $3, %xmm8 #449.15 | |
pslld $26, %xmm9 #450.9 | |
pxor %xmm8, %xmm10 #449.15 | |
movdqa %xmm12, %xmm8 #450.9 | |
paddd %xmm10, %xmm13 #449.15 | |
movdqa %xmm12, %xmm10 #450.9 | |
paddd %xmm13, %xmm6 #449.15 | |
movdqa %xmm12, %xmm13 #450.9 | |
psrld $6, %xmm13 #450.9 | |
psrld $11, %xmm8 #450.9 | |
pslld $21, %xmm10 #450.9 | |
por %xmm9, %xmm13 #450.9 | |
por %xmm10, %xmm8 #450.9 | |
movdqa %xmm12, %xmm9 #450.9 | |
pxor %xmm8, %xmm13 #450.9 | |
movdqa %xmm12, %xmm8 #450.9 | |
psrld $25, %xmm9 #450.9 | |
pslld $7, %xmm8 #450.9 | |
por %xmm8, %xmm9 #450.9 | |
movdqa %xmm12, %xmm8 #450.9 | |
pxor %xmm9, %xmm13 #450.9 | |
movdqa %xmm12, %xmm9 #450.9 | |
pand %xmm2, %xmm8 #450.9 | |
pandn %xmm4, %xmm9 #450.9 | |
movdqa .L_2il0floatpacket.6803(%rip), %xmm10 #450.9 | |
pxor %xmm9, %xmm8 #450.9 | |
paddd %xmm13, %xmm5 #450.9 | |
paddd %xmm8, %xmm10 #450.9 | |
paddd %xmm10, %xmm5 #450.9 | |
movdqa %xmm11, %xmm10 #450.9 | |
movdqa %xmm11, %xmm9 #450.9 | |
psrld $2, %xmm10 #450.9 | |
pslld $30, %xmm9 #450.9 | |
movdqa %xmm11, %xmm8 #450.9 | |
por %xmm9, %xmm10 #450.9 | |
movdqa %xmm11, %xmm9 #450.9 | |
psrld $13, %xmm8 #450.9 | |
pslld $19, %xmm9 #450.9 | |
por %xmm9, %xmm8 #450.9 | |
movdqa %xmm11, %xmm9 #450.9 | |
pxor %xmm8, %xmm10 #450.9 | |
movdqa %xmm11, %xmm8 #450.9 | |
movdqa %xmm11, %xmm13 #450.9 | |
psrld $22, %xmm8 #450.9 | |
pslld $10, %xmm9 #450.9 | |
pand %xmm0, %xmm13 #450.9 | |
por %xmm9, %xmm8 #450.9 | |
movdqa %xmm11, %xmm9 #450.9 | |
pxor %xmm8, %xmm10 #450.9 | |
pand %xmm3, %xmm9 #450.9 | |
movdqa %xmm13, %xmm8 #450.9 | |
paddd %xmm6, %xmm5 #450.9 | |
pxor %xmm9, %xmm8 #450.9 | |
paddd %xmm5, %xmm7 #450.9 | |
pxor %xmm14, %xmm8 #450.9 | |
### w11 = add4(SIGMA1_256(w9), w4, SIGMA0_256(w12), w11); | |
### SHA256ROUND(f, g, h, a, b, c, d, e, 59, w11); | |
movdqa %xmm7, %xmm14 #452.9 | |
paddd %xmm8, %xmm10 #450.9 | |
movdqa %xmm7, %xmm8 #452.9 | |
paddd %xmm10, %xmm5 #450.9 | |
movdqa %xmm7, %xmm10 #452.9 | |
movdqa %xmm7, %xmm9 #452.9 | |
psrld $6, %xmm10 #452.9 | |
pslld $26, %xmm14 #452.9 | |
psrld $11, %xmm8 #452.9 | |
pslld $21, %xmm9 #452.9 | |
por %xmm14, %xmm10 #452.9 | |
por %xmm9, %xmm8 #452.9 | |
movdqa %xmm7, %xmm14 #452.9 | |
pxor %xmm8, %xmm10 #452.9 | |
movdqa %xmm7, %xmm8 #452.9 | |
psrld $25, %xmm14 #452.9 | |
pslld $7, %xmm8 #452.9 | |
por %xmm8, %xmm14 #452.9 | |
movdqa %xmm7, %xmm8 #452.9 | |
pxor %xmm14, %xmm10 #452.9 | |
movdqa %xmm7, %xmm14 #452.9 | |
pand %xmm12, %xmm14 #452.9 | |
pandn %xmm2, %xmm8 #452.9 | |
movdqa .L_2il0floatpacket.6804(%rip), %xmm9 #452.9 | |
pxor %xmm8, %xmm14 #452.9 | |
paddd %xmm14, %xmm9 #452.9 | |
movdqa %xmm15, %xmm8 #451.15 | |
movdqa %xmm15, %xmm14 #451.15 | |
psrld $17, %xmm8 #451.15 | |
pslld $15, %xmm14 #451.15 | |
paddd %xmm10, %xmm4 #452.9 | |
por %xmm14, %xmm8 #451.15 | |
movdqa %xmm15, %xmm10 #451.15 | |
movdqa %xmm15, %xmm14 #451.15 | |
psrld $19, %xmm10 #451.15 | |
pslld $13, %xmm14 #451.15 | |
psrld $10, %xmm15 #451.15 | |
por %xmm14, %xmm10 #451.15 | |
paddd %xmm9, %xmm4 #452.9 | |
pxor %xmm10, %xmm8 #451.15 | |
pxor %xmm15, %xmm8 #451.15 | |
movdqa 4656(%rsp), %xmm15 #451.15 | |
movdqa %xmm15, %xmm9 #451.15 | |
movdqa %xmm15, %xmm10 #451.15 | |
movdqa 5008(%rsp), %xmm14 #451.15 | |
psrld $7, %xmm9 #451.15 | |
pslld $25, %xmm10 #451.15 | |
paddd %xmm8, %xmm14 #451.15 | |
por %xmm10, %xmm9 #451.15 | |
movdqa %xmm15, %xmm8 #451.15 | |
movdqa %xmm15, %xmm10 #451.15 | |
psrld $18, %xmm8 #451.15 | |
pslld $14, %xmm10 #451.15 | |
por %xmm10, %xmm8 #451.15 | |
movdqa %xmm15, %xmm10 #451.15 | |
pxor %xmm8, %xmm9 #451.15 | |
psrld $3, %xmm10 #451.15 | |
pxor %xmm10, %xmm9 #451.15 | |
movdqa %xmm5, %xmm8 #452.9 | |
paddd %xmm9, %xmm1 #451.15 | |
movdqa %xmm5, %xmm9 #452.9 | |
paddd %xmm1, %xmm14 #451.15 | |
movdqa %xmm5, %xmm10 #452.9 | |
paddd %xmm14, %xmm4 #452.9 | |
movdqa %xmm5, %xmm14 #452.9 | |
psrld $2, %xmm14 #452.9 | |
pslld $30, %xmm8 #452.9 | |
psrld $13, %xmm9 #452.9 | |
pslld $19, %xmm10 #452.9 | |
movdqa %xmm5, %xmm1 #452.9 | |
por %xmm8, %xmm14 #452.9 | |
por %xmm10, %xmm9 #452.9 | |
pand %xmm11, %xmm1 #452.9 | |
pxor %xmm9, %xmm14 #452.9 | |
movdqa %xmm5, %xmm8 #452.9 | |
movdqa %xmm5, %xmm9 #452.9 | |
movdqa %xmm5, %xmm10 #452.9 | |
movdqa %xmm0, 5152(%rsp) #446.9 | |
psrld $22, %xmm8 #452.9 | |
pslld $10, %xmm9 #452.9 | |
pand %xmm0, %xmm10 #452.9 | |
movdqa %xmm1, %xmm0 #452.9 | |
paddd %xmm4, %xmm3 #452.9 | |
por %xmm9, %xmm8 #452.9 | |
pxor %xmm10, %xmm0 #452.9 | |
pxor %xmm8, %xmm14 #452.9 | |
pxor %xmm13, %xmm0 #452.9 | |
### w12 = add4(SIGMA1_256(w10), w5, SIGMA0_256(w13), w12); | |
### SHA256ROUND(e, f, g, h, a, b, c, d, 60, w12); | |
movdqa %xmm3, %xmm13 #454.9 | |
movdqa %xmm3, %xmm8 #454.9 | |
psrld $6, %xmm13 #454.9 | |
pslld $26, %xmm8 #454.9 | |
movdqa %xmm3, %xmm9 #454.9 | |
movdqa %xmm3, %xmm10 #454.9 | |
paddd %xmm0, %xmm14 #452.9 | |
por %xmm8, %xmm13 #454.9 | |
psrld $11, %xmm9 #454.9 | |
pslld $21, %xmm10 #454.9 | |
movdqa %xmm3, %xmm0 #454.9 | |
movdqa %xmm3, %xmm8 #454.9 | |
por %xmm10, %xmm9 #454.9 | |
psrld $25, %xmm0 #454.9 | |
pslld $7, %xmm8 #454.9 | |
pxor %xmm9, %xmm13 #454.9 | |
por %xmm8, %xmm0 #454.9 | |
movdqa %xmm3, %xmm8 #454.9 | |
pxor %xmm0, %xmm13 #454.9 | |
movdqa %xmm3, %xmm0 #454.9 | |
pand %xmm7, %xmm0 #454.9 | |
pandn %xmm12, %xmm8 #454.9 | |
paddd %xmm14, %xmm4 #452.9 | |
pxor %xmm8, %xmm0 #454.9 | |
movdqa .L_2il0floatpacket.6805(%rip), %xmm14 #454.9 | |
paddd %xmm13, %xmm2 #454.9 | |
paddd %xmm0, %xmm14 #454.9 | |
movdqa %xmm6, %xmm0 #453.15 | |
movdqa %xmm6, %xmm13 #453.15 | |
movdqa %xmm6, %xmm8 #453.15 | |
movdqa %xmm6, %xmm9 #453.15 | |
psrld $17, %xmm0 #453.15 | |
pslld $15, %xmm13 #453.15 | |
psrld $19, %xmm8 #453.15 | |
pslld $13, %xmm9 #453.15 | |
por %xmm13, %xmm0 #453.15 | |
por %xmm9, %xmm8 #453.15 | |
psrld $10, %xmm6 #453.15 | |
pxor %xmm8, %xmm0 #453.15 | |
paddd %xmm14, %xmm2 #454.9 | |
movdqa 5056(%rsp), %xmm13 #453.15 | |
pxor %xmm6, %xmm0 #453.15 | |
movdqa 4704(%rsp), %xmm6 #453.15 | |
paddd %xmm0, %xmm13 #453.15 | |
movdqa %xmm6, %xmm10 #453.15 | |
movdqa %xmm6, %xmm0 #453.15 | |
psrld $7, %xmm10 #453.15 | |
pslld $25, %xmm0 #453.15 | |
movdqa %xmm6, %xmm8 #453.15 | |
movdqa %xmm6, %xmm9 #453.15 | |
por %xmm0, %xmm10 #453.15 | |
psrld $18, %xmm8 #453.15 | |
### | |
### /* Skip last 3-rounds; not necessary for H==0 */ | |
### #if 0 | |
### w13 = add4(SIGMA1_256(w11), w6, SIGMA0_256(w14), w13); | |
### SHA256ROUND(d, e, f, g, h, a, b, c, 61, w13); | |
### w14 = add4(SIGMA1_256(w12), w7, SIGMA0_256(w15), w14); | |
### SHA256ROUND(c, d, e, f, g, h, a, b, 62, w14); | |
### w15 = add4(SIGMA1_256(w13), w8, SIGMA0_256(w0), w15); | |
### SHA256ROUND(b, c, d, e, f, g, h, a, 63, w15); | |
### #endif | |
### | |
### /* store resulsts directly in thash */ | |
### #define store_2(x,i) \ | |
### w0 = _mm_set1_epi32(hInit[i]); \ | |
### *(__m128i *)&(thash)[i][0+k] = _mm_add_epi32(w0, x); | |
### | |
### store_2(a, 0); | |
movdqa 960(%rsp), %xmm0 #471.9 | |
pslld $14, %xmm9 #453.15 | |
movq 5240(%rsp), %rdx #471.9 | |
por %xmm9, %xmm8 #453.15 | |
movq 5232(%rsp), %rax #471.9 | |
paddd %xmm3, %xmm0 #471.9 | |
### store_2(b, 1); | |
movdqa 976(%rsp), %xmm3 #472.9 | |
pxor %xmm8, %xmm10 #453.15 | |
psrld $3, %xmm6 #453.15 | |
paddd %xmm7, %xmm3 #472.9 | |
### store_2(c, 2); | |
movdqa 992(%rsp), %xmm7 #473.9 | |
pxor %xmm6, %xmm10 #453.15 | |
movdqa %xmm0, (%rax,%rdx,4) #471.9 | |
paddd %xmm12, %xmm7 #473.9 | |
movdqa %xmm3, 128(%rax,%rdx,4) #472.9 | |
movdqa %xmm4, %xmm12 #454.9 | |
movdqa %xmm4, %xmm0 #454.9 | |
movdqa %xmm4, %xmm3 #454.9 | |
movdqa %xmm4, %xmm6 #454.9 | |
psrld $2, %xmm12 #454.9 | |
movdqa %xmm7, 256(%rax,%rdx,4) #473.9 | |
pslld $30, %xmm0 #454.9 | |
psrld $13, %xmm3 #454.9 | |
pslld $19, %xmm6 #454.9 | |
movdqa %xmm4, %xmm7 #454.9 | |
movdqa %xmm4, %xmm8 #454.9 | |
movdqa %xmm4, %xmm9 #454.9 | |
por %xmm0, %xmm12 #454.9 | |
movdqa %xmm4, 5200(%rsp) #452.9 | |
por %xmm6, %xmm3 #454.9 | |
psrld $22, %xmm7 #454.9 | |
pslld $10, %xmm8 #454.9 | |
pand %xmm5, %xmm9 #454.9 | |
pand %xmm11, %xmm4 #454.9 | |
paddd %xmm10, %xmm15 #453.15 | |
pxor %xmm3, %xmm12 #454.9 | |
por %xmm8, %xmm7 #454.9 | |
pxor %xmm4, %xmm9 #454.9 | |
paddd %xmm15, %xmm13 #453.15 | |
pxor %xmm7, %xmm12 #454.9 | |
pxor %xmm1, %xmm9 #454.9 | |
paddd %xmm13, %xmm2 #454.9 | |
paddd %xmm9, %xmm12 #454.9 | |
movdqa %xmm2, 5216(%rsp) #454.9 | |
paddd %xmm12, %xmm2 #454.9 | |
### store_2(d, 3); | |
movdqa 1008(%rsp), %xmm0 #474.9 | |
paddd %xmm2, %xmm0 #474.9 | |
movdqa %xmm11, 5168(%rsp) #448.9 | |
movdqa %xmm5, 5184(%rsp) #450.9 | |
movdqa %xmm0, 384(%rax,%rdx,4) #474.9 | |
# LOE | |
..B2.7: # Preds ..B2.8 | |
### store_2(e, 4); | |
movq 5240(%rsp), %rdx #475.9 | |
movq 5232(%rsp), %rax #475.9 | |
movl 5248(%rsp), %ecx #168.24 | |
movdqa 5152(%rsp), %xmm3 #454.9 | |
addl $4, %ecx #168.24 | |
movdqa 1024(%rsp), %xmm0 #475.9 | |
### store_2(f, 5); | |
movdqa 1040(%rsp), %xmm1 #476.9 | |
### store_2(g, 6); | |
movdqa 1056(%rsp), %xmm2 #477.9 | |
paddd 5216(%rsp), %xmm3 #454.9 | |
### store_2(h, 7); | |
movdqa 1072(%rsp), %xmm4 #478.9 | |
movl %ecx, 5248(%rsp) #168.24 | |
paddd %xmm3, %xmm4 #478.9 | |
paddd 5200(%rsp), %xmm0 #475.9 | |
cmpl $32, %ecx #168.18 | |
paddd 5184(%rsp), %xmm1 #476.9 | |
paddd 5168(%rsp), %xmm2 #477.9 | |
### *(__m128i *)&(thash)[8][0+k] = nonce; | |
movdqa (%rsp), %xmm5 #479.22 | |
movdqa %xmm0, 512(%rax,%rdx,4) #475.9 | |
movdqa %xmm1, 640(%rax,%rdx,4) #476.9 | |
movdqa %xmm2, 768(%rax,%rdx,4) #477.9 | |
movdqa %xmm4, 896(%rax,%rdx,4) #478.9 | |
movdqa %xmm5, 1024(%rax,%rdx,4) #479.22 | |
movq %rcx, 5240(%rsp) #168.24 | |
jb ..B2.2 # Prob 87% #168.18 | |
# LOE | |
..B2.3: # Preds ..B2.7 | |
### } | |
### | |
### } | |
addq $5256, %rsp #482.1 | |
..___tag_value_DoubleBlockSHA256.48: # | |
popq %rbp #482.1 | |
..___tag_value_DoubleBlockSHA256.50: # | |
popq %rbx #482.1 | |
..___tag_value_DoubleBlockSHA256.52: # | |
popq %r15 #482.1 | |
..___tag_value_DoubleBlockSHA256.54: # | |
popq %r14 #482.1 | |
..___tag_value_DoubleBlockSHA256.56: # | |
popq %r13 #482.1 | |
..___tag_value_DoubleBlockSHA256.58: # | |
popq %r12 #482.1 | |
..___tag_value_DoubleBlockSHA256.60: # | |
ret #482.1 | |
.align 16,0x90 | |
..___tag_value_DoubleBlockSHA256.61: # | |
# LOE | |
# mark_end; | |
.type DoubleBlockSHA256,@function | |
.size DoubleBlockSHA256,.-DoubleBlockSHA256 | |
.data | |
# -- End DoubleBlockSHA256 | |
.section .rodata, "a" | |
.align 16 | |
.align 16 | |
.L_2il0floatpacket.6744: | |
.long 0x00000000,0x00000001,0x00000002,0x00000003 | |
.type .L_2il0floatpacket.6744,@object | |
.size .L_2il0floatpacket.6744,16 | |
.align 16 | |
.L_2il0floatpacket.6745: | |
.long 0x428a2f98,0x428a2f98,0x428a2f98,0x428a2f98 | |
.type .L_2il0floatpacket.6745,@object | |
.size .L_2il0floatpacket.6745,16 | |
.align 16 | |
.L_2il0floatpacket.6746: | |
.long 0x71374491,0x71374491,0x71374491,0x71374491 | |
.type .L_2il0floatpacket.6746,@object | |
.size .L_2il0floatpacket.6746,16 | |
.align 16 | |
.L_2il0floatpacket.6747: | |
.long 0xb5c0fbcf,0xb5c0fbcf,0xb5c0fbcf,0xb5c0fbcf | |
.type .L_2il0floatpacket.6747,@object | |
.size .L_2il0floatpacket.6747,16 | |
.align 16 | |
.L_2il0floatpacket.6748: | |
.long 0xe9b5dba5,0xe9b5dba5,0xe9b5dba5,0xe9b5dba5 | |
.type .L_2il0floatpacket.6748,@object | |
.size .L_2il0floatpacket.6748,16 | |
.align 16 | |
.L_2il0floatpacket.6749: | |
.long 0x3956c25b,0x3956c25b,0x3956c25b,0x3956c25b | |
.type .L_2il0floatpacket.6749,@object | |
.size .L_2il0floatpacket.6749,16 | |
.align 16 | |
.L_2il0floatpacket.6750: | |
.long 0x59f111f1,0x59f111f1,0x59f111f1,0x59f111f1 | |
.type .L_2il0floatpacket.6750,@object | |
.size .L_2il0floatpacket.6750,16 | |
.align 16 | |
.L_2il0floatpacket.6751: | |
.long 0x923f82a4,0x923f82a4,0x923f82a4,0x923f82a4 | |
.type .L_2il0floatpacket.6751,@object | |
.size .L_2il0floatpacket.6751,16 | |
.align 16 | |
.L_2il0floatpacket.6752: | |
.long 0xab1c5ed5,0xab1c5ed5,0xab1c5ed5,0xab1c5ed5 | |
.type .L_2il0floatpacket.6752,@object | |
.size .L_2il0floatpacket.6752,16 | |
.align 16 | |
.L_2il0floatpacket.6753: | |
.long 0xd807aa98,0xd807aa98,0xd807aa98,0xd807aa98 | |
.type .L_2il0floatpacket.6753,@object | |
.size .L_2il0floatpacket.6753,16 | |
.align 16 | |
.L_2il0floatpacket.6754: | |
.long 0x12835b01,0x12835b01,0x12835b01,0x12835b01 | |
.type .L_2il0floatpacket.6754,@object | |
.size .L_2il0floatpacket.6754,16 | |
.align 16 | |
.L_2il0floatpacket.6755: | |
.long 0x243185be,0x243185be,0x243185be,0x243185be | |
.type .L_2il0floatpacket.6755,@object | |
.size .L_2il0floatpacket.6755,16 | |
.align 16 | |
.L_2il0floatpacket.6756: | |
.long 0x550c7dc3,0x550c7dc3,0x550c7dc3,0x550c7dc3 | |
.type .L_2il0floatpacket.6756,@object | |
.size .L_2il0floatpacket.6756,16 | |
.align 16 | |
.L_2il0floatpacket.6757: | |
.long 0x72be5d74,0x72be5d74,0x72be5d74,0x72be5d74 | |
.type .L_2il0floatpacket.6757,@object | |
.size .L_2il0floatpacket.6757,16 | |
.align 16 | |
.L_2il0floatpacket.6758: | |
.long 0x80deb1fe,0x80deb1fe,0x80deb1fe,0x80deb1fe | |
.type .L_2il0floatpacket.6758,@object | |
.size .L_2il0floatpacket.6758,16 | |
.align 16 | |
.L_2il0floatpacket.6759: | |
.long 0x9bdc06a7,0x9bdc06a7,0x9bdc06a7,0x9bdc06a7 | |
.type .L_2il0floatpacket.6759,@object | |
.size .L_2il0floatpacket.6759,16 | |
.align 16 | |
.L_2il0floatpacket.6760: | |
.long 0xc19bf174,0xc19bf174,0xc19bf174,0xc19bf174 | |
.type .L_2il0floatpacket.6760,@object | |
.size .L_2il0floatpacket.6760,16 | |
.align 16 | |
.L_2il0floatpacket.6761: | |
.long 0xe49b69c1,0xe49b69c1,0xe49b69c1,0xe49b69c1 | |
.type .L_2il0floatpacket.6761,@object | |
.size .L_2il0floatpacket.6761,16 | |
.align 16 | |
.L_2il0floatpacket.6762: | |
.long 0xefbe4786,0xefbe4786,0xefbe4786,0xefbe4786 | |
.type .L_2il0floatpacket.6762,@object | |
.size .L_2il0floatpacket.6762,16 | |
.align 16 | |
.L_2il0floatpacket.6763: | |
.long 0x0fc19dc6,0x0fc19dc6,0x0fc19dc6,0x0fc19dc6 | |
.type .L_2il0floatpacket.6763,@object | |
.size .L_2il0floatpacket.6763,16 | |
.align 16 | |
.L_2il0floatpacket.6764: | |
.long 0x240ca1cc,0x240ca1cc,0x240ca1cc,0x240ca1cc | |
.type .L_2il0floatpacket.6764,@object | |
.size .L_2il0floatpacket.6764,16 | |
.align 16 | |
.L_2il0floatpacket.6765: | |
.long 0x2de92c6f,0x2de92c6f,0x2de92c6f,0x2de92c6f | |
.type .L_2il0floatpacket.6765,@object | |
.size .L_2il0floatpacket.6765,16 | |
.align 16 | |
.L_2il0floatpacket.6766: | |
.long 0x4a7484aa,0x4a7484aa,0x4a7484aa,0x4a7484aa | |
.type .L_2il0floatpacket.6766,@object | |
.size .L_2il0floatpacket.6766,16 | |
.align 16 | |
.L_2il0floatpacket.6767: | |
.long 0x5cb0a9dc,0x5cb0a9dc,0x5cb0a9dc,0x5cb0a9dc | |
.type .L_2il0floatpacket.6767,@object | |
.size .L_2il0floatpacket.6767,16 | |
.align 16 | |
.L_2il0floatpacket.6768: | |
.long 0x76f988da,0x76f988da,0x76f988da,0x76f988da | |
.type .L_2il0floatpacket.6768,@object | |
.size .L_2il0floatpacket.6768,16 | |
.align 16 | |
.L_2il0floatpacket.6769: | |
.long 0x983e5152,0x983e5152,0x983e5152,0x983e5152 | |
.type .L_2il0floatpacket.6769,@object | |
.size .L_2il0floatpacket.6769,16 | |
.align 16 | |
.L_2il0floatpacket.6770: | |
.long 0xa831c66d,0xa831c66d,0xa831c66d,0xa831c66d | |
.type .L_2il0floatpacket.6770,@object | |
.size .L_2il0floatpacket.6770,16 | |
.align 16 | |
.L_2il0floatpacket.6771: | |
.long 0xb00327c8,0xb00327c8,0xb00327c8,0xb00327c8 | |
.type .L_2il0floatpacket.6771,@object | |
.size .L_2il0floatpacket.6771,16 | |
.align 16 | |
.L_2il0floatpacket.6772: | |
.long 0xbf597fc7,0xbf597fc7,0xbf597fc7,0xbf597fc7 | |
.type .L_2il0floatpacket.6772,@object | |
.size .L_2il0floatpacket.6772,16 | |
.align 16 | |
.L_2il0floatpacket.6773: | |
.long 0xc6e00bf3,0xc6e00bf3,0xc6e00bf3,0xc6e00bf3 | |
.type .L_2il0floatpacket.6773,@object | |
.size .L_2il0floatpacket.6773,16 | |
.align 16 | |
.L_2il0floatpacket.6774: | |
.long 0xd5a79147,0xd5a79147,0xd5a79147,0xd5a79147 | |
.type .L_2il0floatpacket.6774,@object | |
.size .L_2il0floatpacket.6774,16 | |
.align 16 | |
.L_2il0floatpacket.6775: | |
.long 0x06ca6351,0x06ca6351,0x06ca6351,0x06ca6351 | |
.type .L_2il0floatpacket.6775,@object | |
.size .L_2il0floatpacket.6775,16 | |
.align 16 | |
.L_2il0floatpacket.6776: | |
.long 0x14292967,0x14292967,0x14292967,0x14292967 | |
.type .L_2il0floatpacket.6776,@object | |
.size .L_2il0floatpacket.6776,16 | |
.align 16 | |
.L_2il0floatpacket.6777: | |
.long 0x27b70a85,0x27b70a85,0x27b70a85,0x27b70a85 | |
.type .L_2il0floatpacket.6777,@object | |
.size .L_2il0floatpacket.6777,16 | |
.align 16 | |
.L_2il0floatpacket.6778: | |
.long 0x2e1b2138,0x2e1b2138,0x2e1b2138,0x2e1b2138 | |
.type .L_2il0floatpacket.6778,@object | |
.size .L_2il0floatpacket.6778,16 | |
.align 16 | |
.L_2il0floatpacket.6779: | |
.long 0x4d2c6dfc,0x4d2c6dfc,0x4d2c6dfc,0x4d2c6dfc | |
.type .L_2il0floatpacket.6779,@object | |
.size .L_2il0floatpacket.6779,16 | |
.align 16 | |
.L_2il0floatpacket.6780: | |
.long 0x53380d13,0x53380d13,0x53380d13,0x53380d13 | |
.type .L_2il0floatpacket.6780,@object | |
.size .L_2il0floatpacket.6780,16 | |
.align 16 | |
.L_2il0floatpacket.6781: | |
.long 0x650a7354,0x650a7354,0x650a7354,0x650a7354 | |
.type .L_2il0floatpacket.6781,@object | |
.size .L_2il0floatpacket.6781,16 | |
.align 16 | |
.L_2il0floatpacket.6782: | |
.long 0x766a0abb,0x766a0abb,0x766a0abb,0x766a0abb | |
.type .L_2il0floatpacket.6782,@object | |
.size .L_2il0floatpacket.6782,16 | |
.align 16 | |
.L_2il0floatpacket.6783: | |
.long 0x81c2c92e,0x81c2c92e,0x81c2c92e,0x81c2c92e | |
.type .L_2il0floatpacket.6783,@object | |
.size .L_2il0floatpacket.6783,16 | |
.align 16 | |
.L_2il0floatpacket.6784: | |
.long 0x92722c85,0x92722c85,0x92722c85,0x92722c85 | |
.type .L_2il0floatpacket.6784,@object | |
.size .L_2il0floatpacket.6784,16 | |
.align 16 | |
.L_2il0floatpacket.6785: | |
.long 0xa2bfe8a1,0xa2bfe8a1,0xa2bfe8a1,0xa2bfe8a1 | |
.type .L_2il0floatpacket.6785,@object | |
.size .L_2il0floatpacket.6785,16 | |
.align 16 | |
.L_2il0floatpacket.6786: | |
.long 0xa81a664b,0xa81a664b,0xa81a664b,0xa81a664b | |
.type .L_2il0floatpacket.6786,@object | |
.size .L_2il0floatpacket.6786,16 | |
.align 16 | |
.L_2il0floatpacket.6787: | |
.long 0xc24b8b70,0xc24b8b70,0xc24b8b70,0xc24b8b70 | |
.type .L_2il0floatpacket.6787,@object | |
.size .L_2il0floatpacket.6787,16 | |
.align 16 | |
.L_2il0floatpacket.6788: | |
.long 0xc76c51a3,0xc76c51a3,0xc76c51a3,0xc76c51a3 | |
.type .L_2il0floatpacket.6788,@object | |
.size .L_2il0floatpacket.6788,16 | |
.align 16 | |
.L_2il0floatpacket.6789: | |
.long 0xd192e819,0xd192e819,0xd192e819,0xd192e819 | |
.type .L_2il0floatpacket.6789,@object | |
.size .L_2il0floatpacket.6789,16 | |
.align 16 | |
.L_2il0floatpacket.6790: | |
.long 0xd6990624,0xd6990624,0xd6990624,0xd6990624 | |
.type .L_2il0floatpacket.6790,@object | |
.size .L_2il0floatpacket.6790,16 | |
.align 16 | |
.L_2il0floatpacket.6791: | |
.long 0xf40e3585,0xf40e3585,0xf40e3585,0xf40e3585 | |
.type .L_2il0floatpacket.6791,@object | |
.size .L_2il0floatpacket.6791,16 | |
.align 16 | |
.L_2il0floatpacket.6792: | |
.long 0x106aa070,0x106aa070,0x106aa070,0x106aa070 | |
.type .L_2il0floatpacket.6792,@object | |
.size .L_2il0floatpacket.6792,16 | |
.align 16 | |
.L_2il0floatpacket.6793: | |
.long 0x19a4c116,0x19a4c116,0x19a4c116,0x19a4c116 | |
.type .L_2il0floatpacket.6793,@object | |
.size .L_2il0floatpacket.6793,16 | |
.align 16 | |
.L_2il0floatpacket.6794: | |
.long 0x1e376c08,0x1e376c08,0x1e376c08,0x1e376c08 | |
.type .L_2il0floatpacket.6794,@object | |
.size .L_2il0floatpacket.6794,16 | |
.align 16 | |
.L_2il0floatpacket.6795: | |
.long 0x2748774c,0x2748774c,0x2748774c,0x2748774c | |
.type .L_2il0floatpacket.6795,@object | |
.size .L_2il0floatpacket.6795,16 | |
.align 16 | |
.L_2il0floatpacket.6796: | |
.long 0x34b0bcb5,0x34b0bcb5,0x34b0bcb5,0x34b0bcb5 | |
.type .L_2il0floatpacket.6796,@object | |
.size .L_2il0floatpacket.6796,16 | |
.align 16 | |
.L_2il0floatpacket.6797: | |
.long 0x391c0cb3,0x391c0cb3,0x391c0cb3,0x391c0cb3 | |
.type .L_2il0floatpacket.6797,@object | |
.size .L_2il0floatpacket.6797,16 | |
.align 16 | |
.L_2il0floatpacket.6798: | |
.long 0x4ed8aa4a,0x4ed8aa4a,0x4ed8aa4a,0x4ed8aa4a | |
.type .L_2il0floatpacket.6798,@object | |
.size .L_2il0floatpacket.6798,16 | |
.align 16 | |
.L_2il0floatpacket.6799: | |
.long 0x5b9cca4f,0x5b9cca4f,0x5b9cca4f,0x5b9cca4f | |
.type .L_2il0floatpacket.6799,@object | |
.size .L_2il0floatpacket.6799,16 | |
.align 16 | |
.L_2il0floatpacket.6800: | |
.long 0x682e6ff3,0x682e6ff3,0x682e6ff3,0x682e6ff3 | |
.type .L_2il0floatpacket.6800,@object | |
.size .L_2il0floatpacket.6800,16 | |
.align 16 | |
.L_2il0floatpacket.6801: | |
.long 0x748f82ee,0x748f82ee,0x748f82ee,0x748f82ee | |
.type .L_2il0floatpacket.6801,@object | |
.size .L_2il0floatpacket.6801,16 | |
.align 16 | |
.L_2il0floatpacket.6802: | |
.long 0x78a5636f,0x78a5636f,0x78a5636f,0x78a5636f | |
.type .L_2il0floatpacket.6802,@object | |
.size .L_2il0floatpacket.6802,16 | |
.align 16 | |
.L_2il0floatpacket.6803: | |
.long 0x84c87814,0x84c87814,0x84c87814,0x84c87814 | |
.type .L_2il0floatpacket.6803,@object | |
.size .L_2il0floatpacket.6803,16 | |
.align 16 | |
.L_2il0floatpacket.6804: | |
.long 0x8cc70208,0x8cc70208,0x8cc70208,0x8cc70208 | |
.type .L_2il0floatpacket.6804,@object | |
.size .L_2il0floatpacket.6804,16 | |
.align 16 | |
.L_2il0floatpacket.6805: | |
.long 0x90befffa,0x90befffa,0x90befffa,0x90befffa | |
.type .L_2il0floatpacket.6805,@object | |
.size .L_2il0floatpacket.6805,16 | |
.align 16 | |
.L_2il0floatpacket.6806: | |
.long 0xa4506ceb,0xa4506ceb,0xa4506ceb,0xa4506ceb | |
.type .L_2il0floatpacket.6806,@object | |
.size .L_2il0floatpacket.6806,16 | |
.align 16 | |
.L_2il0floatpacket.6807: | |
.long 0xbef9a3f7,0xbef9a3f7,0xbef9a3f7,0xbef9a3f7 | |
.type .L_2il0floatpacket.6807,@object | |
.size .L_2il0floatpacket.6807,16 | |
.align 16 | |
.L_2il0floatpacket.6808: | |
.long 0xc67178f2,0xc67178f2,0xc67178f2,0xc67178f2 | |
.type .L_2il0floatpacket.6808,@object | |
.size .L_2il0floatpacket.6808,16 | |
.align 4 | |
pSHA256InitState: | |
.long 1779033703 | |
.long 3144134277 | |
.long 1013904242 | |
.long 2773480762 | |
.long 1359893119 | |
.long 2600822924 | |
.long 528734635 | |
.long 1541459225 | |
.type pSHA256InitState,@object | |
.size pSHA256InitState,32 | |
.data | |
.section .note.GNU-stack, "" | |
// -- Begin DWARF2 SEGMENT .eh_frame | |
.section .eh_frame,"a",@progbits | |
.eh_frame_seg: | |
.align 8 | |
.4byte 0x00000014 | |
.8byte 0x7801000100000000 | |
.8byte 0x0000019008070c10 | |
.4byte 0x00000000 | |
.4byte 0x0000015c | |
.4byte 0x0000001c | |
.8byte ..___tag_value_ScanHash_4WaySSE2.1 | |
.8byte ..___tag_value_ScanHash_4WaySSE2.32-..___tag_value_ScanHash_4WaySSE2.1 | |
.byte 0x04 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.3-..___tag_value_ScanHash_4WaySSE2.1 | |
.4byte 0x0410070c | |
.4byte ..___tag_value_ScanHash_4WaySSE2.4-..___tag_value_ScanHash_4WaySSE2.3 | |
.4byte 0x8610060c | |
.2byte 0x0402 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.6-..___tag_value_ScanHash_4WaySSE2.4 | |
.8byte 0xff800d1c380e0c10 | |
.8byte 0xfffffff80d1affff | |
.2byte 0x0422 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.7-..___tag_value_ScanHash_4WaySSE2.6 | |
.8byte 0xff800d1c380e0d10 | |
.8byte 0xfffffff00d1affff | |
.2byte 0x0422 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.8-..___tag_value_ScanHash_4WaySSE2.7 | |
.8byte 0xff800d1c380e0e10 | |
.8byte 0xffffffe80d1affff | |
.2byte 0x0422 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.9-..___tag_value_ScanHash_4WaySSE2.8 | |
.8byte 0xff800d1c380e0f10 | |
.8byte 0xffffffe00d1affff | |
.2byte 0x0422 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.10-..___tag_value_ScanHash_4WaySSE2.9 | |
.8byte 0xff800d1c380e0310 | |
.8byte 0xffffffd80d1affff | |
.2byte 0x0422 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.11-..___tag_value_ScanHash_4WaySSE2.10 | |
.4byte 0x04030309 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.12-..___tag_value_ScanHash_4WaySSE2.11 | |
.4byte 0x040f0f09 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.13-..___tag_value_ScanHash_4WaySSE2.12 | |
.4byte 0x040e0e09 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.14-..___tag_value_ScanHash_4WaySSE2.13 | |
.4byte 0x040d0d09 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.15-..___tag_value_ScanHash_4WaySSE2.14 | |
.4byte 0x040c0c09 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.16-..___tag_value_ScanHash_4WaySSE2.15 | |
.4byte 0x0908070c | |
.2byte 0x0606 | |
.byte 0x04 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.18-..___tag_value_ScanHash_4WaySSE2.16 | |
.8byte 0x1c380e031010060c | |
.8byte 0xd80d1affffff800d | |
.8byte 0x0c10028622ffffff | |
.8byte 0xffffff800d1c380e | |
.8byte 0x1022fffffff80d1a | |
.8byte 0xffff800d1c380e0d | |
.8byte 0x22fffffff00d1aff | |
.8byte 0xff800d1c380e0e10 | |
.8byte 0xffffffe80d1affff | |
.8byte 0x800d1c380e0f1022 | |
.8byte 0xffffe00d1affffff | |
.2byte 0x22ff | |
.byte 0x04 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.25-..___tag_value_ScanHash_4WaySSE2.18 | |
.4byte 0x04030309 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.26-..___tag_value_ScanHash_4WaySSE2.25 | |
.4byte 0x040f0f09 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.27-..___tag_value_ScanHash_4WaySSE2.26 | |
.4byte 0x040e0e09 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.28-..___tag_value_ScanHash_4WaySSE2.27 | |
.4byte 0x040d0d09 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.29-..___tag_value_ScanHash_4WaySSE2.28 | |
.4byte 0x040c0c09 | |
.4byte ..___tag_value_ScanHash_4WaySSE2.30-..___tag_value_ScanHash_4WaySSE2.29 | |
.8byte 0x000006060908070c | |
.byte 0x00 | |
.4byte 0x000000a4 | |
.4byte 0x0000017c | |
.8byte ..___tag_value_DoubleBlockSHA256.33 | |
.8byte ..___tag_value_DoubleBlockSHA256.61-..___tag_value_DoubleBlockSHA256.33 | |
.byte 0x04 | |
.4byte ..___tag_value_DoubleBlockSHA256.35-..___tag_value_DoubleBlockSHA256.33 | |
.4byte 0x070c028c | |
.2byte 0x0410 | |
.4byte ..___tag_value_DoubleBlockSHA256.37-..___tag_value_DoubleBlockSHA256.35 | |
.4byte 0x070c038d | |
.2byte 0x0418 | |
.4byte ..___tag_value_DoubleBlockSHA256.39-..___tag_value_DoubleBlockSHA256.37 | |
.4byte 0x070c048e | |
.2byte 0x0420 | |
.4byte ..___tag_value_DoubleBlockSHA256.41-..___tag_value_DoubleBlockSHA256.39 | |
.4byte 0x070c058f | |
.2byte 0x0428 | |
.4byte ..___tag_value_DoubleBlockSHA256.43-..___tag_value_DoubleBlockSHA256.41 | |
.4byte 0x070c0683 | |
.2byte 0x0430 | |
.4byte ..___tag_value_DoubleBlockSHA256.45-..___tag_value_DoubleBlockSHA256.43 | |
.4byte 0x070c0786 | |
.2byte 0x0438 | |
.4byte ..___tag_value_DoubleBlockSHA256.47-..___tag_value_DoubleBlockSHA256.45 | |
.4byte 0x29c0070c | |
.byte 0x04 | |
.4byte ..___tag_value_DoubleBlockSHA256.48-..___tag_value_DoubleBlockSHA256.47 | |
.4byte 0x0938070c | |
.2byte 0x0606 | |
.byte 0x04 | |
.4byte ..___tag_value_DoubleBlockSHA256.50-..___tag_value_DoubleBlockSHA256.48 | |
.4byte 0x0930070c | |
.2byte 0x0303 | |
.byte 0x04 | |
.4byte ..___tag_value_DoubleBlockSHA256.52-..___tag_value_DoubleBlockSHA256.50 | |
.4byte 0x0928070c | |
.2byte 0x0f0f | |
.byte 0x04 | |
.4byte ..___tag_value_DoubleBlockSHA256.54-..___tag_value_DoubleBlockSHA256.52 | |
.4byte 0x0920070c | |
.2byte 0x0e0e | |
.byte 0x04 | |
.4byte ..___tag_value_DoubleBlockSHA256.56-..___tag_value_DoubleBlockSHA256.54 | |
.4byte 0x0918070c | |
.2byte 0x0d0d | |
.byte 0x04 | |
.4byte ..___tag_value_DoubleBlockSHA256.58-..___tag_value_DoubleBlockSHA256.56 | |
.4byte 0x0910070c | |
.2byte 0x0c0c | |
.byte 0x04 | |
.4byte ..___tag_value_DoubleBlockSHA256.60-..___tag_value_DoubleBlockSHA256.58 | |
.4byte 0x0008070c | |
# End |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment