Home
last modified time | relevance | path

Searched refs:xmm4 (Results 1 – 25 of 37) sorted by relevance

12

/linux/lib/crypto/x86/
H A Dblake2s-core.S92 movd (DATA,%rax,4),%xmm4
99 punpckldq %xmm5,%xmm4
101 punpcklqdq %xmm6,%xmm4
102 paddd %xmm4,%xmm0
119 movd (DATA,%rax,4),%xmm4
121 punpckldq %xmm4,%xmm7
141 movd (DATA,%rax,4),%xmm4
145 punpckldq %xmm5,%xmm4
146 punpcklqdq %xmm4,%xmm6
160 movd (DATA,%rax,4),%xmm4
[all …]
H A Dchacha-ssse3-x86_64.S38 movdqa ROT8(%rip),%xmm4
58 pshufb %xmm4,%xmm3
91 pshufb %xmm4,%xmm3
139 movdqu 0x00(%rdx),%xmm4
140 pxor %xmm4,%xmm0
253 pshufd $0x00,%xmm5,%xmm4
286 paddd %xmm4,%xmm0
311 pxor %xmm8,%xmm4
312 movdqa %xmm4,%xmm0
314 psrld $20,%xmm4
[all …]
H A Dsha512-avx-asm.S172 vmovdqa W_t(idx), %xmm4 # XMM4 = W[t-2]
176 vpsrlq $61, %xmm4, %xmm0 # XMM0 = W[t-2]>>61
181 vpsrlq $19, %xmm4, %xmm1 # XMM1 = W[t-2]>>19
190 vpsrlq $6, %xmm4, %xmm2 # XMM2 = W[t-2]>>6
200 vpsllq $(64-61), %xmm4, %xmm3 # XMM3 = W[t-2]<<3
217 vpsllq $(64-19), %xmm4, %xmm4 # XMM4 = W[t-2]<<25
220 vpxor %xmm4, %xmm0, %xmm0 # XMM0 = W[t-2]>>61 ^ W[t-2]>>19 ^
H A Dpolyval-pclmul-avx.S95 vpclmulqdq $0x11, (16*\i)(KEY_POWERS), %xmm0, %xmm4
98 vpxor %xmm4, HI, HI
H A Dsha512-ssse3-asm.S218 movdqa %xmm5, %xmm4 # XMM4 = W[t-15]
225 psllq $(64-1)-(64-8), %xmm4 # XMM4 = W[t-15] << 7
231 pxor %xmm5, %xmm4 # XMM4 = (W[t-15]<<7)^W[t-15]
237 psllq $(64-8), %xmm4 # XMM4 = ((W[t-15]<<7)^W[t-15])<<56
246 pxor %xmm4, %xmm3 # XMM3 = s0(W[t-15])
H A Dsha1-ni-asm.S66 #define MSG1 %xmm4
H A Dnh-sse2.S16 #define K0 %xmm4
H A Dnh-avx2.S17 #define K0_XMM %xmm4
H A Dsha256-ni-asm.S68 #define MSG1 %xmm4
206 #define STATE1_B %xmm4
H A Dchacha-avx512vl-x86_64.S385 vextracti128 $1,%ymm10,%xmm4
409 vmovdqa %xmm4,%xmm10
/linux/arch/x86/crypto/
H A Daria-aesni-avx-asm_64.S889 inpack16_post(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
893 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
895 aria_fe(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7,
899 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
901 aria_fe(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7,
905 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
907 aria_fe(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7,
911 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
913 aria_fe(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7,
917 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
[all …]
H A Dcamellia-aesni-avx-asm_64.S193 roundsm16(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
201 roundsm16(%xmm4, %xmm5, %xmm6, %xmm7, %xmm0, %xmm1, %xmm2, %xmm3,
729 inpack16_post(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
733 enc_rounds16(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
737 fls16(%rax, %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
745 enc_rounds16(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
749 fls16(%rax, %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
757 enc_rounds16(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
776 outunpack16(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
787 fls16(%rax, %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
[all …]
H A Daesni-intel_asm.S24 #define STATE2 %xmm4
70 shufps $0b00010000, %xmm0, %xmm4
71 pxor %xmm4, %xmm0
72 shufps $0b10001100, %xmm0, %xmm4
73 pxor %xmm4, %xmm0
83 shufps $0b00010000, %xmm0, %xmm4
84 pxor %xmm4, %xmm0
85 shufps $0b10001100, %xmm0, %xmm4
86 pxor %xmm4, %xmm0
107 shufps $0b00010000, %xmm0, %xmm4
[all …]
H A Daes-gcm-aesni-x86_64.S504 .set H_POW1_X64, %xmm4 // H^1 * x^64
585 .set H_POW1, %xmm4 // H^1
711 .set MI, %xmm4 // Middle part of unreduced product
1016 .set GHASH_ACC, %xmm4
H A Daes-gcm-vaes-avx512.S297 .set H_INC_XMM, %xmm4
517 .set BSWAP_MASK_XMM, %xmm4
723 .set GHASHDATA0_XMM, %xmm4
1052 .set GFPOLY, %xmm4
H A Dghash-clmulni-intel_asm.S28 #define T3 %xmm4
H A Daes-gcm-vaes-avx2.S516 .set MI_XMM, %xmm4
725 .set TMP2_XMM, %xmm4
1035 .set GFPOLY, %xmm4
H A Dcast6-avx-x86_64-asm_64.S42 #define RA2 %xmm4
H A Dtwofish-avx-x86_64-asm_64.S42 #define RA2 %xmm4
H A Dsm4-aesni-avx2-asm_64.S53 #define RTMP1x %xmm4
H A Dcast5-avx-x86_64-asm_64.S41 #define RL3 %xmm4
H A Dsm4-aesni-avx-asm_64.S26 #define RTMP1 %xmm4
H A Daegis128-aesni-asm.S16 #define STATE4 %xmm4
/linux/lib/crc/x86/
H A Dcrc-pclmul-template.S391 _fold_vec_final 16, %xmm0, %xmm1, CONSTS_XMM, BSWAP_MASK_XMM, %xmm4, %xmm5
427 %xmm0, %xmm0, unaligned_mem_tmp=%xmm4
433 movdqa %xmm0, %xmm4
436 movdqa %xmm4, %xmm0
444 _fold_vec %xmm0, %xmm1, CONSTS_XMM, %xmm4
/linux/arch/x86/entry/vdso/vdso64/
H A Dvgetrandom-chacha.S36 .set state3, %xmm4

12