summaryrefslogtreecommitdiffstats
path: root/arch/x86/crypto/sm4-aesni-avx2-asm_64.S
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-18 18:50:03 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-18 18:50:03 +0000
commit01a69402cf9d38ff180345d55c2ee51c7e89fbc7 (patch)
treeb406c5242a088c4f59c6e4b719b783f43aca6ae9 /arch/x86/crypto/sm4-aesni-avx2-asm_64.S
parentAdding upstream version 6.7.12. (diff)
downloadlinux-01a69402cf9d38ff180345d55c2ee51c7e89fbc7.tar.xz
linux-01a69402cf9d38ff180345d55c2ee51c7e89fbc7.zip
Adding upstream version 6.8.9.upstream/6.8.9
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'arch/x86/crypto/sm4-aesni-avx2-asm_64.S')
-rw-r--r--arch/x86/crypto/sm4-aesni-avx2-asm_64.S55
1 files changed, 0 insertions, 55 deletions
diff --git a/arch/x86/crypto/sm4-aesni-avx2-asm_64.S b/arch/x86/crypto/sm4-aesni-avx2-asm_64.S
index 98ede94592..9ff5ba0755 100644
--- a/arch/x86/crypto/sm4-aesni-avx2-asm_64.S
+++ b/arch/x86/crypto/sm4-aesni-avx2-asm_64.S
@@ -439,58 +439,3 @@ SYM_TYPED_FUNC_START(sm4_aesni_avx2_cbc_dec_blk16)
FRAME_END
RET;
SYM_FUNC_END(sm4_aesni_avx2_cbc_dec_blk16)
-
-/*
- * void sm4_aesni_avx2_cfb_dec_blk16(const u32 *rk, u8 *dst,
- * const u8 *src, u8 *iv)
- */
-SYM_TYPED_FUNC_START(sm4_aesni_avx2_cfb_dec_blk16)
- /* input:
- * %rdi: round key array, CTX
- * %rsi: dst (16 blocks)
- * %rdx: src (16 blocks)
- * %rcx: iv
- */
- FRAME_BEGIN
-
- vzeroupper;
-
- /* Load input */
- vmovdqu (%rcx), RNOTx;
- vinserti128 $1, (%rdx), RNOT, RA0;
- vmovdqu (0 * 32 + 16)(%rdx), RA1;
- vmovdqu (1 * 32 + 16)(%rdx), RA2;
- vmovdqu (2 * 32 + 16)(%rdx), RA3;
- vmovdqu (3 * 32 + 16)(%rdx), RB0;
- vmovdqu (4 * 32 + 16)(%rdx), RB1;
- vmovdqu (5 * 32 + 16)(%rdx), RB2;
- vmovdqu (6 * 32 + 16)(%rdx), RB3;
-
- /* Update IV */
- vmovdqu (7 * 32 + 16)(%rdx), RNOTx;
- vmovdqu RNOTx, (%rcx);
-
- call __sm4_crypt_blk16;
-
- vpxor (0 * 32)(%rdx), RA0, RA0;
- vpxor (1 * 32)(%rdx), RA1, RA1;
- vpxor (2 * 32)(%rdx), RA2, RA2;
- vpxor (3 * 32)(%rdx), RA3, RA3;
- vpxor (4 * 32)(%rdx), RB0, RB0;
- vpxor (5 * 32)(%rdx), RB1, RB1;
- vpxor (6 * 32)(%rdx), RB2, RB2;
- vpxor (7 * 32)(%rdx), RB3, RB3;
-
- vmovdqu RA0, (0 * 32)(%rsi);
- vmovdqu RA1, (1 * 32)(%rsi);
- vmovdqu RA2, (2 * 32)(%rsi);
- vmovdqu RA3, (3 * 32)(%rsi);
- vmovdqu RB0, (4 * 32)(%rsi);
- vmovdqu RB1, (5 * 32)(%rsi);
- vmovdqu RB2, (6 * 32)(%rsi);
- vmovdqu RB3, (7 * 32)(%rsi);
-
- vzeroall;
- FRAME_END
- RET;
-SYM_FUNC_END(sm4_aesni_avx2_cfb_dec_blk16)