summaryrefslogtreecommitdiffstats
path: root/src/crypto/isa-l/isa-l_crypto/aes/aarch64
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-21 11:54:28 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-21 11:54:28 +0000
commite6918187568dbd01842d8d1d2c808ce16a894239 (patch)
tree64f88b554b444a49f656b6c656111a145cbbaa28 /src/crypto/isa-l/isa-l_crypto/aes/aarch64
parentInitial commit. (diff)
downloadceph-e6918187568dbd01842d8d1d2c808ce16a894239.tar.xz
ceph-e6918187568dbd01842d8d1d2c808ce16a894239.zip
Adding upstream version 18.2.2.upstream/18.2.2
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/crypto/isa-l/isa-l_crypto/aes/aarch64')
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_aes_finalize_128.S215
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_aes_finalize_256.S220
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_aes_init.S161
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_consts.S140
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_enc_dec_128.S30
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_enc_dec_256.S30
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_precomp_128.S30
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_precomp_256.S30
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_update_128.S32
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_update_256.S32
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_aarch64_dispatcher.c108
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_common.S54
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_dec_aes.S482
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_enc_aes.S157
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_multibinary_aarch64.S38
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_aarch64_dispatcher.c255
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_common.S430
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_common_128.S165
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_common_256.S181
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_enc_dec.S588
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_multibinary_aarch64.S58
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_precomp.S83
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_update.S277
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_128_aarch64_aes.S134
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_192_aarch64_aes.S136
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_256_aarch64_aes.S153
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_aarch64_dispatcher.c72
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_multibinary_aarch64.S35
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aarch64_dispatcher.c102
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_common.S214
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_dec.S116
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_enc.S91
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_256_common.S247
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_256_dec.S116
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_256_enc.S88
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_common.S232
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_128_dec.S49
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_128_enc.S49
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_256_dec.S49
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_256_enc.S49
-rw-r--r--src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_multibinary_aarch64.S39
41 files changed, 5767 insertions, 0 deletions
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_aes_finalize_128.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_aes_finalize_128.S
new file mode 100644
index 000000000..7214f0f25
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_aes_finalize_128.S
@@ -0,0 +1,215 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+
+#include "gcm_common_128.S"
+/*
+ void gist_aes_gcm_enc_finalize_##mode( \
+ const struct gcm_key_data *key_data, \
+ struct gcm_context_data *context, \
+ uint8_t *auth_tag, \
+ uint64_t auth_tag_len \
+ )
+*/
+ declare_var_generic_reg key_data ,0
+ declare_var_generic_reg context ,1
+ declare_var_generic_reg auth_tag ,2
+ declare_var_generic_reg auth_tag_len ,3
+ declare_var_generic_reg partial_block_len ,4
+ declare_var_generic_reg partial_block ,1
+
+ declare_var_generic_reg hashkey_addr ,0
+ declare_var_generic_reg temp0, 6
+
+ declare_var_vector_reg OrigIV ,0
+ declare_var_vector_reg AadHash ,1
+ declare_var_vector_reg HashKey0 ,2
+ declare_var_vector_reg HashKey0Ext ,3
+ declare_var_vector_reg High ,4
+ declare_var_vector_reg Low ,5
+ declare_var_vector_reg Middle0 ,6
+ declare_var_vector_reg Len ,7
+ declare_var_vector_reg Tmp0 ,8
+ declare_var_vector_reg Tmp1 ,9
+ declare_var_vector_reg Zero ,10
+ declare_var_vector_reg Poly ,11
+ declare_var_vector_reg PartitialBlock ,13
+
+ declare_var_vector_reg Tmp2 ,31
+ declare_var_vector_reg Tmp3 ,12
+
+ .set stack_size,48
+ .macro push_stack
+ stp d8, d9,[sp,-stack_size]!
+ stp d10,d11,[sp,16]
+ stp d12,d13,[sp,32]
+ .endm
+
+ .macro pop_stack
+ ldp d10,d11,[sp,16]
+ ldp d12,d13,[sp,32]
+ ldp d8, d9, [sp], stack_size
+ .endm
+START_FUNC(enc,KEY_LEN,_finalize_)
+START_FUNC(dec,KEY_LEN,_finalize_)
+ ldr partial_block_len,[context,PARTIAL_BLOCK_LENGTH_OFF]
+ load_aes_keys key_data
+ push_stack
+ /* Init Consts for ghash */
+ movi vZero.4s,0
+ mov temp0,0x87
+ dup vPoly.2d,temp0
+ ldr qOrigIV,[context,ORIG_IV_OFF] /* OrigIV */
+ ldp qAadHash,qLen,[context],PARTIAL_BLOCK_ENC_KEY_OFF /* Len , context move to partial block*/
+ /* Init Consts for ghash */
+ movi vZero.4s,0
+ mov temp0,0x87
+ dup vPoly.2d,temp0
+ /* complete part */
+ cbnz partial_block_len,10f
+ ldp qHashKey0,qHashKey0Ext,[hashkey_addr,(HASHKEY_TOTAL_NUM-2)*32]
+ aes_encrypt_round OrigIV,Key0
+ pmull2 vHigh.1q,vAadHash.2d,vHashKey0.2d
+ aes_encrypt_round OrigIV,Key1
+ pmull vLow.1q ,vAadHash.1d,vHashKey0.1d
+ shl vLen.2d,vLen.2d,3 /* Len */
+ aes_encrypt_round OrigIV,Key2
+ pmull vMiddle0.1q,vAadHash.1d,vHashKey0Ext.1d
+ rev64 vLen.16b,vLen.16b /* Len */
+ aes_encrypt_round OrigIV,Key3
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0Ext.2d
+ rbit vAadHash.16b,vLen.16b /* Len */
+ ldp qHashKey0,qHashKey0Ext,[hashkey_addr,(HASHKEY_TOTAL_NUM-1)*32]
+ aes_encrypt_round OrigIV,Key4
+ eor vMiddle0.16b,vMiddle0.16b,vTmp0.16b
+ aes_encrypt_round OrigIV,Key5
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0.2d
+ aes_encrypt_round OrigIV,Key6
+ pmull vTmp1.1q ,vAadHash.1d,vHashKey0.1d
+ aes_encrypt_round OrigIV,Key7
+ eor vHigh.16b,vHigh.16b,vTmp0.16b
+ eor vLow.16b ,vLow.16b ,vTmp1.16b
+ pmull2 vTmp2.1q ,vAadHash.2d,vHashKey0Ext.2d
+ aes_encrypt_round OrigIV,Key8
+ pmull vTmp3.1q ,vAadHash.1d,vHashKey0Ext.1d
+ aese vOrigIV.16b,vKey9.16b
+ eor vMiddle0.16b,vMiddle0.16b,vTmp2.16b
+ eor vOrigIV.16b,vOrigIV.16b,vKey10.16b
+ rbit vAadHash.16b,vOrigIV.16b
+ eor vMiddle0.16b,vMiddle0.16b,vTmp3.16b
+ ghash_mult_final_round AadHash,High,Low,Middle0,Tmp0,Zero,Poly
+
+ rbit vAadHash.16b,vAadHash.16b /* Aad */
+ /* output auth_tag */
+ cmp auth_tag_len,16
+ bne 1f
+ /* most likely auth_tag_len=16 */
+ str qAadHash,[auth_tag]
+ pop_stack
+ ret
+1: /* auth_tag_len=12 */
+ cmp auth_tag_len,12
+ bne 1f
+ str dAadHash,[auth_tag],8
+ st1 {vAadHash.s}[2],[auth_tag]
+ pop_stack
+ ret
+1: /* auth_tag_len=8 */
+ str dAadHash,[auth_tag]
+ pop_stack
+ ret
+
+10: /* cbnz partial_block_len,10f */
+ ldp qHashKey0,qHashKey0Ext,[hashkey_addr,(HASHKEY_TOTAL_NUM-3)*32]
+ aes_encrypt_round OrigIV,Key0
+ read_small_data_start PartitialBlock,partial_block,partial_block_len,temp0,Tmp0
+ pmull2 vHigh.1q,vAadHash.2d,vHashKey0.2d
+ aes_encrypt_round OrigIV,Key1
+ pmull vLow.1q ,vAadHash.1d,vHashKey0.1d
+ aes_encrypt_round OrigIV,Key2
+ pmull vMiddle0.1q,vAadHash.1d,vHashKey0Ext.1d
+ aes_encrypt_round OrigIV,Key3
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0Ext.2d
+ aes_encrypt_round OrigIV,Key4
+ rbit vAadHash.16b,vPartitialBlock.16b
+ ldp qHashKey0,qHashKey0Ext,[hashkey_addr,(HASHKEY_TOTAL_NUM-2)*32]
+ aes_encrypt_round OrigIV,Key5
+ eor vMiddle0.16b,vMiddle0.16b,vTmp0.16b
+ pmull2 vTmp0.1q,vAadHash.2d,vHashKey0.2d
+ aes_encrypt_round OrigIV,Key6
+ shl vLen.2d,vLen.2d,3 /* Len */
+ pmull vTmp1.1q ,vAadHash.1d,vHashKey0.1d
+ eor vHigh.16b,vHigh.16b,vTmp0.16b
+ aes_encrypt_round OrigIV,Key7
+ eor vLow.16b,vLow.16b,vTmp1.16b
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0Ext.2d
+ rev64 vLen.16b,vLen.16b /* Len */
+ aes_encrypt_round OrigIV,Key8
+ eor vMiddle0.16b,vMiddle0.16b,vTmp0.16b
+ aese vOrigIV.16b,vKey9.16b
+ pmull vTmp0.1q,vAadHash.1d,vHashKey0Ext.1d
+ rbit vAadHash.16b,vLen.16b /* Len */
+ ldp qHashKey0,qHashKey0Ext,[hashkey_addr,(HASHKEY_TOTAL_NUM-1)*32]
+ eor vMiddle0.16b,vMiddle0.16b,vTmp0.16b
+ eor vOrigIV.16b,vOrigIV.16b,vKey10.16b
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0.2d
+ pmull vTmp1.1q ,vAadHash.1d,vHashKey0.1d
+ eor vHigh.16b,vHigh.16b,vTmp0.16b
+ eor vLow.16b ,vLow.16b ,vTmp1.16b
+ pmull2 vTmp2.1q ,vAadHash.2d,vHashKey0Ext.2d
+ pmull vTmp3.1q ,vAadHash.1d,vHashKey0Ext.1d
+ eor vMiddle0.16b,vMiddle0.16b,vTmp2.16b
+ eor vMiddle0.16b,vMiddle0.16b,vTmp3.16b
+ rbit vAadHash.16b,vOrigIV.16b
+ ghash_mult_final_round AadHash,High,Low,Middle0,Tmp0,Zero,Poly
+
+ rbit vAadHash.16b,vAadHash.16b /* Aad */
+ /* output auth_tag */
+ cmp auth_tag_len,16
+ bne 1f
+ /* most likely auth_tag_len=16 */
+ str qAadHash,[auth_tag]
+ pop_stack
+ ret
+1: /* auth_tag_len=12 */
+ cmp auth_tag_len,12
+ bne 1f
+ str dAadHash,[auth_tag],8
+ st1 {vAadHash.s}[2],[auth_tag]
+ pop_stack
+ ret
+1: /* auth_tag_len=8 */
+ str dAadHash,[auth_tag]
+ pop_stack
+ ret
+
+END_FUNC(enc,KEY_LEN,_finalize_)
+END_FUNC(dec,KEY_LEN,_finalize_)
+
+
+
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_aes_finalize_256.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_aes_finalize_256.S
new file mode 100644
index 000000000..9eda7178e
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_aes_finalize_256.S
@@ -0,0 +1,220 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+
+#include "gcm_common_256.S"
+/*
+ void gist_aes_gcm_enc_finalize_##mode( \
+ const struct gcm_key_data *key_data, \
+ struct gcm_context_data *context, \
+ uint8_t *auth_tag, \
+ uint64_t auth_tag_len \
+ )
+*/
+ declare_var_generic_reg key_data ,0
+ declare_var_generic_reg context ,1
+ declare_var_generic_reg auth_tag ,2
+ declare_var_generic_reg auth_tag_len ,3
+ declare_var_generic_reg partial_block_len ,4
+ declare_var_generic_reg partial_block ,1
+
+ declare_var_generic_reg hashkey_addr ,0
+ declare_var_generic_reg temp0 ,6
+
+ declare_var_vector_reg OrigIV ,0
+ declare_var_vector_reg AadHash ,1
+ declare_var_vector_reg HashKey0 ,2
+ declare_var_vector_reg HashKey0Ext ,3
+ declare_var_vector_reg High ,4
+ declare_var_vector_reg Low ,5
+ declare_var_vector_reg Middle0 ,6
+ declare_var_vector_reg Len ,7
+ declare_var_vector_reg Tmp0 ,8
+ declare_var_vector_reg Tmp1 ,9
+ declare_var_vector_reg Zero ,10
+ declare_var_vector_reg Poly ,11
+ declare_var_vector_reg PartitialBlock ,13
+
+ declare_var_vector_reg Tmp2 ,31
+ declare_var_vector_reg Tmp3 ,12
+
+ .set stack_size,48
+ .macro push_stack
+ stp d8, d9,[sp,-stack_size]!
+ stp d10,d11,[sp,16]
+ stp d12,d13,[sp,32]
+ .endm
+ .macro pop_stack
+ ldp d10,d11,[sp,16]
+ ldp d12,d13,[sp,32]
+ ldp d8, d9, [sp], stack_size
+ .endm
+
+START_FUNC(enc,KEY_LEN,_finalize_)
+START_FUNC(dec,KEY_LEN,_finalize_)
+ ldr partial_block_len,[context,PARTIAL_BLOCK_LENGTH_OFF]
+ load_aes_keys key_data
+ push_stack
+
+ ldr qOrigIV,[context,ORIG_IV_OFF] /* OrigIV */
+ ldp qAadHash,qLen,[context],PARTIAL_BLOCK_ENC_KEY_OFF /* Len , context move to partial block*/
+ /* Init Consts for ghash */
+ movi vZero.4s,0
+ mov temp0,0x87
+ dup vPoly.2d,temp0
+ /* complete part */
+ cbnz partial_block_len,10f
+ ldp qHashKey0,qHashKey0Ext,[hashkey_addr,(HASHKEY_TOTAL_NUM-2)*32]
+ aes_encrypt_round OrigIV,Key0
+ pmull2 vHigh.1q,vAadHash.2d,vHashKey0.2d
+ aes_encrypt_round OrigIV,Key1
+ pmull vLow.1q ,vAadHash.1d,vHashKey0.1d
+ shl vLen.2d,vLen.2d,3 /* Len */
+ aes_encrypt_round OrigIV,Key2
+ pmull vMiddle0.1q,vAadHash.1d,vHashKey0Ext.1d
+ rev64 vLen.16b,vLen.16b /* Len */
+ aes_encrypt_round OrigIV,Key3
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0Ext.2d
+ rbit vAadHash.16b,vLen.16b /* Len */
+ ldp qHashKey0,qHashKey0Ext,[hashkey_addr,(HASHKEY_TOTAL_NUM-1)*32]
+ aes_encrypt_round OrigIV,Key4
+ eor vMiddle0.16b,vMiddle0.16b,vTmp0.16b
+ aes_encrypt_round OrigIV,Key5
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0.2d
+ aes_encrypt_round OrigIV,Key6
+ pmull vTmp1.1q ,vAadHash.1d,vHashKey0.1d
+ aes_encrypt_round OrigIV,Key7
+ eor vHigh.16b,vHigh.16b,vTmp0.16b
+ eor vLow.16b ,vLow.16b ,vTmp1.16b
+ pmull2 vTmp2.1q ,vAadHash.2d,vHashKey0Ext.2d
+ aes_encrypt_round OrigIV,Key8
+ pmull vTmp3.1q ,vAadHash.1d,vHashKey0Ext.1d
+ aes_encrypt_round OrigIV,Key9
+ aes_encrypt_round OrigIV,Key10
+ aes_encrypt_round OrigIV,Key11
+ aes_encrypt_round OrigIV,Key12
+ aese vOrigIV.16b,vKey13.16b
+ eor vMiddle0.16b,vMiddle0.16b,vTmp2.16b
+ eor vOrigIV.16b,vOrigIV.16b,vKey14.16b
+ rbit vAadHash.16b,vOrigIV.16b
+ eor vMiddle0.16b,vMiddle0.16b,vTmp3.16b
+ ghash_mult_final_round AadHash,High,Low,Middle0,Tmp0,Zero,Poly
+
+ rbit vAadHash.16b,vAadHash.16b /* Aad */
+ /* output auth_tag */
+ cmp auth_tag_len,16
+ bne 1f
+ /* most likely auth_tag_len=16 */
+ str qAadHash,[auth_tag]
+ pop_stack
+ ret
+1: /* auth_tag_len=12 */
+ cmp auth_tag_len,12
+ bne 1f
+ str dAadHash,[auth_tag],8
+ st1 {vAadHash.s}[2],[auth_tag]
+ pop_stack
+ ret
+1: /* auth_tag_len=8 */
+ str dAadHash,[auth_tag]
+ pop_stack
+ ret
+
+10: /* cbnz partial_block_len,10f */
+ ldp qHashKey0,qHashKey0Ext,[hashkey_addr,(HASHKEY_TOTAL_NUM-3)*32]
+ aes_encrypt_round OrigIV,Key0
+ read_small_data_start PartitialBlock,partial_block,partial_block_len,temp0,Tmp0
+ pmull2 vHigh.1q,vAadHash.2d,vHashKey0.2d
+ aes_encrypt_round OrigIV,Key1
+ pmull vLow.1q ,vAadHash.1d,vHashKey0.1d
+ aes_encrypt_round OrigIV,Key2
+ pmull vMiddle0.1q,vAadHash.1d,vHashKey0Ext.1d
+ aes_encrypt_round OrigIV,Key3
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0Ext.2d
+ aes_encrypt_round OrigIV,Key4
+ rbit vAadHash.16b,vPartitialBlock.16b
+ ldp qHashKey0,qHashKey0Ext,[hashkey_addr,(HASHKEY_TOTAL_NUM-2)*32]
+ aes_encrypt_round OrigIV,Key5
+ eor vMiddle0.16b,vMiddle0.16b,vTmp0.16b
+ pmull2 vTmp0.1q,vAadHash.2d,vHashKey0.2d
+ aes_encrypt_round OrigIV,Key6
+ shl vLen.2d,vLen.2d,3 /* Len */
+ pmull vTmp1.1q ,vAadHash.1d,vHashKey0.1d
+ eor vHigh.16b,vHigh.16b,vTmp0.16b
+ aes_encrypt_round OrigIV,Key7
+ eor vLow.16b,vLow.16b,vTmp1.16b
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0Ext.2d
+ rev64 vLen.16b,vLen.16b /* Len */
+ aes_encrypt_round OrigIV,Key8
+ eor vMiddle0.16b,vMiddle0.16b,vTmp0.16b
+ pmull vTmp0.1q,vAadHash.1d,vHashKey0Ext.1d
+ aes_encrypt_round OrigIV,Key9
+ rbit vAadHash.16b,vLen.16b /* Len */
+ ldp qHashKey0,qHashKey0Ext,[hashkey_addr,(HASHKEY_TOTAL_NUM-1)*32]
+ aes_encrypt_round OrigIV,Key10
+ eor vMiddle0.16b,vMiddle0.16b,vTmp0.16b
+ aes_encrypt_round OrigIV,Key11
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0.2d
+ aes_encrypt_round OrigIV,Key12
+ pmull vTmp1.1q ,vAadHash.1d,vHashKey0.1d
+ aese vOrigIV.16b,vKey13.16b
+ eor vHigh.16b,vHigh.16b,vTmp0.16b
+ eor vLow.16b ,vLow.16b ,vTmp1.16b
+ pmull2 vTmp2.1q ,vAadHash.2d,vHashKey0Ext.2d
+ pmull vTmp3.1q ,vAadHash.1d,vHashKey0Ext.1d
+ eor vMiddle0.16b,vMiddle0.16b,vTmp2.16b
+ eor vOrigIV.16b,vOrigIV.16b,vKey14.16b
+ eor vMiddle0.16b,vMiddle0.16b,vTmp3.16b
+ rbit vAadHash.16b,vOrigIV.16b
+ ghash_mult_final_round AadHash,High,Low,Middle0,Tmp0,Zero,Poly
+
+ rbit vAadHash.16b,vAadHash.16b /* Aad */
+ /* output auth_tag */
+ cmp auth_tag_len,16
+ bne 1f
+ /* most likely auth_tag_len=16 */
+ str qAadHash,[auth_tag]
+ pop_stack
+ ret
+1: /* auth_tag_len=12 */
+ cmp auth_tag_len,12
+ bne 1f
+ str dAadHash,[auth_tag],8
+ st1 {vAadHash.s}[2],[auth_tag]
+ pop_stack
+ ret
+1: /* auth_tag_len=8 */
+ str dAadHash,[auth_tag]
+ pop_stack
+ ret
+
+END_FUNC(enc,KEY_LEN,_finalize_)
+END_FUNC(dec,KEY_LEN,_finalize_)
+
+
+
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_aes_init.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_aes_init.S
new file mode 100644
index 000000000..0dd94c6b7
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_aes_init.S
@@ -0,0 +1,161 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+
+#include "gcm_common.S"
+/*
+void gist_aes_gcm_init_##mode(
+ const struct gcm_key_data *key_data,
+ struct gcm_context_data *context,
+ uint8_t *iv,
+ uint8_t const *aad,
+ uint64_t aad_len
+ );
+*/
+ key_data .req x0
+ context .req x1
+ iv .req x2
+ aad .req x3
+ aad_len .req x4
+ temp0 .req x7
+ wtemp0 .req w7
+ temp1 .req x6
+ left_len .req x5
+ aad_left .req x2
+ small_tbl_adr .req x6
+
+ hashkey_base .req x0
+ hashkey_addr .req x2
+
+ declare_var_vector_reg AadHash,0
+ declare_var_vector_reg Dat0,1
+ declare_var_vector_reg HashKey0,2
+ declare_var_vector_reg HashKey0Ext,3
+ declare_var_vector_reg High,4
+ declare_var_vector_reg Middle0,5
+ declare_var_vector_reg Low,6
+ declare_var_vector_reg LeftDat,7
+ declare_var_vector_reg Zero,16
+ declare_var_vector_reg Poly,17
+
+ declare_var_vector_reg Tmp0,18
+ declare_var_vector_reg Tmp1,19
+ declare_var_vector_reg Ctr,1
+
+
+START_FUNC(init,128,_)
+START_FUNC(init,192,_)
+START_FUNC(init,256,_)
+ stp aad_len,xzr,[context,AAD_LEN_OFF] //save in_length and aad_length
+ str xzr,[context,PARTIAL_BLOCK_LENGTH_OFF] //clear partial_block_length
+ add hashkey_base,key_data,HASHKEY_BASE_OFF
+ /* Init Consts for ghash */
+ movi vZero.4s,0
+ mov temp0,0x87
+ dup vPoly.2d,temp0
+ /* Set orig_IV */
+ ldr wtemp0,[iv,8]
+ ldr temp1,[iv]
+ movk temp0,0x100,lsl 48
+ stp temp1,temp0,[context,ORIG_IV_OFF]
+ and left_len,aad_len,15
+ ldp qHashKey0,qHashKey0Ext,[key_data,(HASHKEY_TOTAL_NUM-1)*32]
+ /* Set current_counter, save as cpu order */
+ ldr qCtr,[context,ORIG_IV_OFF]
+ rev32 vCtr.16b,vCtr.16b
+ str qCtr,[context,CTR_OFF]
+ cbz aad_len,init_zero_exit
+ lsr aad_len,aad_len,4
+ /* Read small data */
+ cbz left_len,2f
+ add aad_left,aad,aad_len,lsl 4
+ read_small_data_start LeftDat,aad_left,left_len,small_tbl_adr,Tmp0
+ cbz aad_len,24f // aad_len less than 16
+2:
+ cbnz left_len,1f
+ /*left_len == 0 && aad_len !=0 */
+
+ sub aad_len,aad_len,1
+ /* leftDat = aad[-1] */
+ ldr qLeftDat,[aad,aad_len,lsl 4]
+ cbz aad_len,24f /* aad_len == 16 */
+1:
+ /* aad_len > 16 */
+ ldr qAadHash,[aad],16
+ rbit vAadHash.16b,vAadHash.16b
+ sub aad_len,aad_len,1
+1:
+ /* loop ghash_block */
+ cmp aad_len,HASHKEY_TOTAL_NUM - 1
+ bls 1f /* break loop */
+ sub aad_len,aad_len,HASHKEY_TOTAL_NUM
+ ghash_block_n HASHKEY_TOTAL_NUM,AadHash,Dat0,aad,hashkey_addr,hashkey_base, \
+ HashKey0,HashKey0Ext,High,Low,Middle0,Zero,Poly , \
+ Tmp0,Tmp1
+ b 1b /* back to loop start */
+1:
+ cbz aad_len,23f /* left aad_len == 0 */
+ mov temp0,HASHKEY_TOTAL_NUM - 1
+ sub temp0,temp0,aad_len
+ add hashkey_addr,hashkey_base,temp0,lsl 5
+ sub aad_len,aad_len,1
+
+
+ ghash_mult_init_round AadHash,aad,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Middle0,Tmp0,Dat0,2 /* load next hash */
+1:
+ cbz aad_len,1f
+ ghash_mult_round AadHash,aad,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Middle0,Tmp0,Tmp1,Dat0, 2
+
+ sub aad_len,aad_len,1
+ b 1b
+1:
+ ghash_mult_round_noload AadHash,HashKey0,HashKey0Ext,High,Low,Middle0,Tmp0,Tmp1
+ rbit vAadHash.16b, vLeftDat.16b
+ ghash_mult_final_round AadHash,High,Low,Middle0,Tmp0,Zero,Poly
+ str qAadHash,[context]
+ ret
+
+23:
+ ghash_block_reg AadHash,LeftDat, \
+ HashKey0,HashKey0Ext,High,Low,Middle0,Zero,Poly , \
+ Tmp0
+ str qAadHash,[context]
+ ret
+24: /* less or equal than 16 */
+ rbit vLeftDat.16b, vLeftDat.16b
+ str qLeftDat,[context]
+ ret
+init_zero_exit:
+ stp xzr,xzr,[context]
+ ret
+END_FUNC(init,128,_)
+END_FUNC(init,192,_)
+END_FUNC(init,256,_)
+
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_consts.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_consts.S
new file mode 100644
index 000000000..c4e8ef59c
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_consts.S
@@ -0,0 +1,140 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a
+ .section .rodata
+#define CONST_VAR_START(a) \
+ .align 3;.global a;.type a, %object;a
+
+#define CONST_VAR_END(a) \
+ .size a,. - a
+CONST_VAR_START(shift_small_data_table):
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff
+CONST_VAR_START(read_small_data_table):
+ .byte 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
+CONST_VAR_END(shift_small_data_table)
+ .byte 0x0e,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff
+ .byte 0x0c,0x0d,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff
+ .byte 0x0c,0x0d,0x0e,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff
+ .byte 0x08,0x09,0x0a,0x0b,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff
+ .byte 0x08,0x09,0x0a,0x0b,0x0e,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff
+ .byte 0x08,0x09,0x0a,0x0b,0x0c,0x0d,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff
+ .byte 0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x0e,0xff,0xff,0xff,0xff,0xff,0xff,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x0c,0x0d,0xff,0xff,0xff,0xff,0xff,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x0c,0x0d,0x0e,0xff,0xff,0xff,0xff,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0xff,0xff,0xff,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0e,0xff,0xff,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0xff,0xff
+CONST_VAR_START(write_small_data_table):
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0xff
+CONST_VAR_END(read_small_data_table)
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0xff
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x01,0xff,0xff
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x01,0x02,0xff
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x01,0x02,0x03,0xff,0xff,0xff,0xff
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x01,0x02,0x03,0xff,0xff,0x04,0xff
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x01,0x02,0x03,0x04,0x05,0xff,0xff
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x01,0x02,0x03,0x04,0x05,0x06,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0xff,0xff,0xff,0xff,0xff,0xff,0x08,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0xff,0xff,0xff,0xff,0x08,0x09,0xff,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0xff,0xff,0xff,0xff,0x08,0x09,0x0a,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0xff,0xff,0xff,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0xff,0xff,0x0c,0xff
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0xff,0xff
+CONST_VAR_START(read_end_small_data_table):
+ .byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0xff
+CONST_VAR_END(write_small_data_table)
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0e
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0c,0x0d
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0c,0x0d,0x0e
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x08,0x09,0x0a,0x0b
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x08,0x09,0x0a,0x0b,0x0e
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x08,0x09,0x0a,0x0b,0x0c,0x0d
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x0e
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x0c,0x0d
+ .byte 0xff,0xff,0xff,0xff,0xff,0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x0c,0x0d,0x0e
+ .byte 0xff,0xff,0xff,0xff,0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b
+ .byte 0xff,0xff,0xff,0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0e
+ .byte 0xff,0xff,0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d
+CONST_VAR_START(write_end_small_data_table):
+ .byte 0xff,0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e
+CONST_VAR_END(read_end_small_data_table)
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0f,0xff
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0e,0x0f,0xff,0xff
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0d,0x0e,0x0f,0xff
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0c,0x0d,0x0e,0x0f,0xff,0xff,0xff,0xff
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0b,0x0c,0x0d,0x0e,0xff,0xff,0x0f,0xff
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f,0xff,0xff
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f,0xff
+ .byte 0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff
+ .byte 0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0xff,0xff,0xff,0xff,0xff,0xff,0x0f,0xff
+ .byte 0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0xff,0xff,0xff,0xff,0x0e,0x0f,0xff,0xff
+ .byte 0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0xff,0xff,0xff,0xff,0x0d,0x0e,0x0f,0xff
+ .byte 0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f,0xff,0xff,0xff,0xff
+ .byte 0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0xff,0xff,0x0f,0xff
+ .byte 0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f,0xff,0xff
+CONST_VAR_START(tbx_end_small_data_table):
+ .byte 0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f,0xff
+CONST_VAR_END(write_end_small_data_table)
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+CONST_VAR_START(tbx_start_small_data_table):
+ .byte 0xff,0xff,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+CONST_VAR_END(tbx_end_small_data_table)
+ .byte 0xff,0xff,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0b,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0c,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0d,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0e,0x0f
+ .byte 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x0f
+CONST_VAR_END(tbx_start_small_data_table)
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_enc_dec_128.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_enc_dec_128.S
new file mode 100644
index 000000000..9f1ff80fb
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_enc_dec_128.S
@@ -0,0 +1,30 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+#include "gcm_common_128.S"
+#include "gcm_enc_dec.S"
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_enc_dec_256.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_enc_dec_256.S
new file mode 100644
index 000000000..f3cc2b802
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_enc_dec_256.S
@@ -0,0 +1,30 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+#include "gcm_common_256.S"
+#include "gcm_enc_dec.S"
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_precomp_128.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_precomp_128.S
new file mode 100644
index 000000000..e635d7e70
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_precomp_128.S
@@ -0,0 +1,30 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+#include "gcm_common_128.S"
+#include "gcm_precomp.S" \ No newline at end of file
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_precomp_256.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_precomp_256.S
new file mode 100644
index 000000000..52b76a6a2
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_precomp_256.S
@@ -0,0 +1,30 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+#include "gcm_common_256.S"
+#include "gcm_precomp.S" \ No newline at end of file
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_update_128.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_update_128.S
new file mode 100644
index 000000000..42c48d9a0
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_update_128.S
@@ -0,0 +1,32 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+#include "gcm_common_128.S"
+#include "gcm_update.S"
+
+
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_update_256.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_update_256.S
new file mode 100644
index 000000000..1c2c33b48
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/aes_gcm_update_256.S
@@ -0,0 +1,32 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+#include "gcm_common_256.S"
+#include "gcm_update.S"
+
+
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_aarch64_dispatcher.c b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_aarch64_dispatcher.c
new file mode 100644
index 000000000..1a2077356
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_aarch64_dispatcher.c
@@ -0,0 +1,108 @@
+/**********************************************************************
+ Copyright(c) 2020-2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+#include <aarch64_multibinary.h>
+
+#undef PROVIDER_BASIC
+#define PROVIDER_BASIC(a) (void*)0
+
+static unsigned long is_crypto_available(void)
+{
+ unsigned long auxval = getauxval(AT_HWCAP);
+ return (auxval & (HWCAP_ASIMD | HWCAP_AES)) == (HWCAP_ASIMD | HWCAP_AES);
+}
+
+#define DEFINE_CBC_INTERFACE_DISPATCHER(func,mode,suffix) \
+ DEFINE_INTERFACE_DISPATCHER(aes_cbc_##func##_##mode) \
+ { \
+ if (is_crypto_available()) \
+ return PROVIDER_INFO(aes_cbc_##func##_##mode##_##suffix); \
+ return PROVIDER_BASIC(aes_cbc_##func##_##mode); \
+ }
+
+DEFINE_CBC_INTERFACE_DISPATCHER(enc, 128, aes);
+DEFINE_CBC_INTERFACE_DISPATCHER(enc, 192, aes);
+DEFINE_CBC_INTERFACE_DISPATCHER(enc, 256, aes);
+
+/*
+ * AES-CBC decryption can be parallelised according to algorithm. Decryption
+ * flow is to do decrypt and then EOR previous input data or IV(first).
+ * So, decryption can be parallelised and EOR all data as output data.
+ *
+ * The unroll factor depends on micro architecture. The factors of N1, A57 and A72
+ * are based on optimization guide and test results. Other platforms are based on
+ * ThunderX2 test results.
+ *
+ */
+DEFINE_INTERFACE_DISPATCHER(aes_cbc_dec_128)
+{
+ if (is_crypto_available()) {
+ switch (get_micro_arch_id()) {
+ case MICRO_ARCH_ID(ARM, NEOVERSE_N1):
+ return PROVIDER_INFO(aes_cbc_dec_128_aes_1);
+ case MICRO_ARCH_ID(ARM, CORTEX_A57):
+ return PROVIDER_INFO(aes_cbc_dec_128_aes_4);
+ case MICRO_ARCH_ID(ARM, CORTEX_A72):
+ return PROVIDER_INFO(aes_cbc_dec_128_aes_6);
+ }
+ return PROVIDER_INFO(aes_cbc_dec_128_aes_5);
+ }
+ return PROVIDER_BASIC(aes_cbc_dec_128);
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_cbc_dec_192)
+{
+ if (is_crypto_available()) {
+ switch (get_micro_arch_id()) {
+ case MICRO_ARCH_ID(ARM, NEOVERSE_N1):
+ return PROVIDER_INFO(aes_cbc_dec_192_aes_1);
+ case MICRO_ARCH_ID(ARM, CORTEX_A57):
+ return PROVIDER_INFO(aes_cbc_dec_192_aes_5);
+ case MICRO_ARCH_ID(ARM, CORTEX_A72):
+ return PROVIDER_INFO(aes_cbc_dec_192_aes_4);
+ }
+ return PROVIDER_INFO(aes_cbc_dec_192_aes_5);
+ }
+ return PROVIDER_BASIC(aes_cbc_dec_192);
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_cbc_dec_256)
+{
+ if (is_crypto_available()) {
+ switch (get_micro_arch_id()) {
+ case MICRO_ARCH_ID(ARM, NEOVERSE_N1):
+ return PROVIDER_INFO(aes_cbc_dec_256_aes_1);
+ case MICRO_ARCH_ID(ARM, CORTEX_A57):
+ return PROVIDER_INFO(aes_cbc_dec_256_aes_5);
+ case MICRO_ARCH_ID(ARM, CORTEX_A72):
+ return PROVIDER_INFO(aes_cbc_dec_256_aes_6);
+ }
+ return PROVIDER_INFO(aes_cbc_dec_256_aes_5);
+ }
+ return PROVIDER_BASIC(aes_cbc_dec_256);
+}
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_common.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_common.S
new file mode 100644
index 000000000..6f793843a
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_common.S
@@ -0,0 +1,54 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+
+#define FN_NAME(fn,mode,post) aes_cbc_##fn##_##mode##_##post
+#define LABEL_NAME(fn,mode,post) .L##fn##_##mode##_##post
+#define START_FUNC(fn,mode,post) .global FN_NAME(fn,mode,post); \
+ .type FN_NAME(fn,mode,post), %function; \
+ FN_NAME(fn,mode,post):
+#define END_FUNC(fn,mode,post) .size FN_NAME(fn,mode,post), .-FN_NAME(fn,mode,post)
+.macro declare_var_vector_reg name:req,reg:req
+.ifdef q\name
+ .unreq q\name
+ .unreq v\name
+ .unreq s\name
+ .unreq d\name
+.endif
+ .set q\name , \reg
+ q\name .req q\reg
+ v\name .req v\reg
+ s\name .req s\reg
+ d\name .req d\reg
+.endm
+
+.macro declare_var_generic_reg name:req,reg:req
+ \name .req x\reg
+ x\name .req x\reg
+ w\name .req w\reg
+.endm \ No newline at end of file
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_dec_aes.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_dec_aes.S
new file mode 100644
index 000000000..11bd90a71
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_dec_aes.S
@@ -0,0 +1,482 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+ .text
+#include "cbc_common.S"
+ .altmacro
+.macro _aes_decrypt_round block:req,key:req
+ aesd v\block\().16b,vKey\key\().16b
+ .if \key < 13
+ aesimc v\block\().16b,v\block\().16b
+ .endif
+ .if \key > 13
+ .error "erro her"
+ .endif
+.endm
+
+.macro aes_decrypt_round block,reg,key
+ _aes_decrypt_round In\reg\()_\block,\key
+.endm
+
+.macro load_keys first_key
+ .if \first_key == 4
+ ld1 {vKey4.4s -vKey6.4s},[keys],3*16
+ .endif
+ .ifc 2 , \first_key
+ ldr qKey2,[keys],1*16
+ ld1 {vKey3.16b -vKey6.16b},[keys],4*16
+ .endif
+ .ifc 0 , \first_key
+ ld1 {vKey0.16b -vKey2.16b},[keys],3*16
+ ld1 {vKey3.16b -vKey6.16b},[keys],4*16
+ .endif
+ ld1 {vKey7.16b -vKey10.16b},[keys],4*16
+ ld1 {vKey11.16b-vKey14.16b},[keys],4*16
+.endm
+
+.macro aes_decrypt_blocks_round blocks,key_idx,key_reg,next_keyreg,first_idx
+ .if \key_idx == 12
+ ldr q\next_keyreg,[keys],(\first_idx-13)*16
+ .else
+ ldr q\next_keyreg,[keys],16
+ .endif
+ n=0
+ .rept \blocks
+ _aes_decrypt_round %n,\key_reg
+ n=n+1
+ .endr
+.endm
+
+.macro aes_decrypt_rounds blocks,key_st,key_end,first_idx
+ j=key_st
+ .rept \key_end - \key_st + 1
+ aes_decrypt_blocks_round \blocks,%j,%(j%2),%((j+1)%2),\first_idx
+ j=j+1
+ .endr
+.endm
+
+.macro aes_cbc_decrypt_rounds blocks,first_idx,reg,next_reg
+ aes_decrypt_rounds \blocks,\first_idx,12,\first_idx
+.endm
+
+.macro declare_prefix idx,reg,prefix
+ declare_var_vector_reg \prefix\()\idx,\reg
+.endm
+
+.macro mldr reg,block,addr
+ ldr qIn\reg\()_\block,[\addr],16
+.endm
+
+.macro mldrin reg,blocks,addr
+ .if \blocks == 1
+ ldr qIn\reg\()_0,[\addr],16
+ .exitm
+ .endif
+ .if \blocks == 2
+ ldp qIn\reg\()_0,qIn\reg\()_1,[\addr],2*16
+ .exitm
+ .endif
+ .if \blocks == 3
+ ldr qIn\reg\()_0,[\addr],16
+ ldp qIn\reg\()_1,qIn\reg\()_2,[\addr],2*16
+ .exitm
+ .endif
+ .if \blocks == 4
+ ld1 {vIn\reg\()_0.16b-vIn\reg\()_3.16b},[\addr],4*16
+ .exitm
+ .endif
+ .if \blocks == 5
+ ldr qIn\reg\()_0,[\addr],16
+ ld1 {vIn\reg\()_1.16b-vIn\reg\()_4.16b},[\addr],4*16
+ .exitm
+ .endif
+ .if \blocks == 6
+ ldp qIn\reg\()_0,qIn\reg\()_1,[\addr],2*16
+ ld1 {vIn\reg\()_2.16b-vIn\reg\()_5.16b},[\addr],4*16
+ .exitm
+ .endif
+ .if \blocks == 7
+ ld1 {vIn\reg\()_0.16b-vIn\reg\()_2.16b},[\addr],3*16
+ ld1 {vIn\reg\()_3.16b-vIn\reg\()_6.16b},[\addr],4*16
+ .exitm
+ .endif
+
+ .if \blocks == 8
+ ld1 {vIn\reg\()_0.16b-vIn\reg\()_3.16b},[\addr],4*16
+ ld1 {vIn\reg\()_4.16b-vIn\reg\()_7.16b},[\addr],4*16
+ .exitm
+ .endif
+ .if \blocks == 9
+ ld1 {vIn\reg\()_0.16b-vIn\reg\()_3.16b},[\addr],4*16
+ ld1 {vIn\reg\()_4.16b-vIn\reg\()_7.16b},[\addr],4*16
+ ldr qIn\reg\()_8,[\addr],16
+ .exitm
+ .endif
+.endm
+
+.macro mstrout reg,blocks,addr
+ .if \blocks == 1
+ str qIn\reg\()_0,[\addr],16
+ .exitm
+ .endif
+ .if \blocks == 2
+ stp qIn\reg\()_0,qIn\reg\()_1,[\addr],2*16
+ .exitm
+ .endif
+ .if \blocks == 3
+ str qIn\reg\()_0,[\addr],16
+ stp qIn\reg\()_1,qIn\reg\()_2,[\addr],2*16
+ .exitm
+ .endif
+ .if \blocks == 4
+ st1 {vIn\reg\()_0.16b-vIn\reg\()_3.16b},[\addr],4*16
+ .exitm
+ .endif
+ .if \blocks == 5
+ str qIn\reg\()_0,[\addr],16
+ st1 {vIn\reg\()_1.16b-vIn\reg\()_4.16b},[\addr],4*16
+ .exitm
+ .endif
+ .if \blocks == 6
+ stp qIn\reg\()_0,qIn\reg\()_1,[\addr],2*16
+ st1 {vIn\reg\()_2.16b-vIn\reg\()_5.16b},[\addr],4*16
+ .exitm
+ .endif
+ .if \blocks == 7
+ st1 {vIn\reg\()_0.16b-vIn\reg\()_2.16b},[\addr],3*16
+ st1 {vIn\reg\()_3.16b-vIn\reg\()_6.16b},[\addr],4*16
+ .exitm
+ .endif
+
+ .if \blocks == 8
+ st1 {vIn\reg\()_0.16b-vIn\reg\()_3.16b},[\addr],4*16
+ st1 {vIn\reg\()_4.16b-vIn\reg\()_7.16b},[\addr],4*16
+ .exitm
+ .endif
+ .if \blocks == 9
+ st1 {vIn\reg\()_0.16b-vIn\reg\()_3.16b},[\addr],4*16
+ st1 {vIn\reg\()_4.16b-vIn\reg\()_7.16b},[\addr],4*16
+ str qIn\reg\()_8,[\addr],16
+ .exitm
+ .endif
+.endm
+
+.macro eorkey14 block,reg
+ eor vBlock\block\().16b,vKey14.16b,vState\reg\()_\block\().16b
+.endm
+
+.macro eorblock block,reg
+ eor vIn\reg\()_\block\().16b,vBlock\block\().16b,vIn\reg\()_\block\().16b
+.endm
+
+.macro movstate0 block,reg
+ mov vState\reg\()_0.16b,vIn\reg\()_\block\().16b
+.endm
+
+.macro cbc_decrypt_rounds blocks,reg,first_key,cur_blocks
+ .ifb \cur_blocks
+ _blocks=\blocks
+ .else
+ _blocks=\cur_blocks
+ .endif
+ key=\first_key + 1
+ .if 3*\blocks+1 >= 32-15+\first_key
+ ldr_key %key,\first_key
+ .endif
+ n=0
+ .rept _blocks - 1
+ eorkey14 %((n+1)%_blocks),\reg
+ aes_decrypt_round %n,\reg,\first_key
+ n=n+1
+ .endr
+ eorkey14 0,\reg
+ movstate0 %(_blocks-1),\reg
+ aes_decrypt_round %n,\reg,\first_key
+
+ k=0
+ .rept 15-\first_key-3
+ n=0
+ .if 3*\blocks+1 >= 32-15+\first_key
+ ldr_key %(key+k+1),\first_key
+ .endif
+
+ .rept _blocks
+ aes_decrypt_round %n,\reg,%(key+k)
+ n=n+1
+ .endr
+ k=k+1
+ .endr
+ n=0
+ .if 3*\blocks+1 >= 32-15+\first_key
+ ldr_key \first_key,\first_key
+ .endif
+ .rept _blocks
+ aes_decrypt_round %n,\reg,13
+ eorblock %n,\reg
+ n=n+1
+ .endr
+.endm
+
+.macro print_macro a,b,c,d,e
+ .print "print_macro,\a \b \c \d \e"
+.endm
+
+.macro remainder_process blocks,first_key,curblk
+.if \blocks > (1<<\curblk)
+ tbz xlen_remainder,\curblk,1f
+ mldrin 0,%(1<<\curblk),in
+ cbc_decrypt_rounds \blocks,0,\first_key,%(1<<\curblk)
+ mstrout 0,%(1<<\curblk),out
+1:
+.endif
+.endm
+
+.macro aes_cbc_decrypt_blocks first_key,blocks
+ division \blocks, len_bytes,len_remainder,tmp0,tmp1
+ mov xlen_quotient_in,xlen_quotient
+ /*
+ input regs(2*\block) + tmp regs(\blocks) + State reg(1)
+ + key regs(15-\first_key) < 32
+ */
+ .if 3*\blocks+1 < 32-15+\first_key
+ n=\first_key
+ .rept 15-\first_key
+ declare_prefix %n,%(n+17),Key
+ n=n+1
+ .endr
+ load_keys \first_key
+ .else
+ n=\first_key
+ .rept 14-\first_key
+ declare_prefix %n,%((n%2)+29),Key
+ n=n+1
+ .endr
+ declare_prefix 14,31,Key
+ /* load first key */
+ ldr_key \first_key,\first_key
+ /* load last key */
+ ldr_key 14,\first_key
+ .endif
+ m=\blocks
+ l=\blocks-1
+ declare_prefix 0,0,State0_
+ declare_prefix 0,0,State1_
+ n=0
+ .rept \blocks
+ declare_prefix %n,%(n+1),In0_
+ declare_prefix %n,%(n+m+1),In1_
+ declare_prefix %n,%(n+2*m+1),Block
+ n=n+1
+ .endr
+ n=1
+ .rept \blocks -1
+ declare_prefix %n,%(n),State0_
+ declare_prefix %n,%(n+m),State1_
+ n=n+1
+ .endr
+ ldr qState0_0,[IV]
+ cbz xlen_quotient,9f
+ mldrin 0,\blocks,in
+ sub xlen_quotient_in,xlen_quotient_in,1
+ b 5f
+
+3:
+ sub xlen_quotient,xlen_quotient,1
+ mstrout 1,\blocks,out
+ cbz xlen_quotient,9f
+5:
+ cbz xlen_quotient_in,1f
+ mldrin 1,\blocks,in
+ sub xlen_quotient_in,xlen_quotient_in,1
+1:
+ cbc_decrypt_rounds \blocks,0,\first_key
+ sub xlen_quotient,xlen_quotient,1
+ mstrout 0,\blocks,out
+ cbz xlen_quotient,9f
+
+ cbz xlen_quotient_in,1f
+ mldrin 0,\blocks,in
+ sub xlen_quotient_in,xlen_quotient_in,1
+1:
+ cbc_decrypt_rounds \blocks,1,\first_key
+ b 3b
+9:
+ remainder_process \blocks,\first_key,3
+ remainder_process \blocks,\first_key,2
+ remainder_process \blocks,\first_key,1
+ remainder_process \blocks,\first_key,0
+.endm
+
+
+.macro division blocks,quotient,remainder,tmp0,tmp1
+ .if \blocks == 1
+ mov x\remainder, 0
+ .exitm
+ .endif
+ .if \blocks == 2
+ and x\remainder, x\quotient, 1
+ lsr x\quotient, x\quotient, 1
+ .exitm
+ .endif
+ .if \blocks == 3
+ mov x\tmp0, -6148914691236517206
+ mov x\remainder, x\quotient
+ movk x\tmp0, 0xaaab, lsl 0
+ umulh x\tmp0, x\quotient, x\tmp0
+ and x\tmp1, x\tmp0, -2
+ lsr x\quotient, x\tmp0, 1
+ add x\tmp1, x\tmp1, x\quotient
+ sub x\remainder, x\remainder, x\tmp1
+ .exitm
+ .endif
+ .if \blocks == 4
+ and x\remainder, x\quotient, 3
+ lsr x\quotient, x\quotient, 2
+ .exitm
+ .endif
+ .if \blocks == 5
+ mov x\tmp0, -3689348814741910324
+ mov x\remainder, x\quotient
+ movk x\tmp0, 0xcccd, lsl 0
+ umulh x\tmp0, x\quotient, x\tmp0
+ and x\tmp1, x\tmp0, -4
+ lsr x\quotient, x\tmp0, 2
+ add x\tmp1, x\tmp1, x\quotient
+ sub x\remainder, x\remainder, x\tmp1
+ .exitm
+ .endif
+ .if \blocks == 6
+ mov x\tmp0, -6148914691236517206
+ mov x\tmp1, x\quotient
+ movk x\tmp0, 0xaaab, lsl 0
+ umulh x\tmp0, x\quotient, x\tmp0
+ lsr x\quotient, x\tmp0, 2
+ add x\remainder, x\quotient, x\quotient, lsl 1
+ sub x\remainder, x\tmp1, x\remainder, lsl 1
+ .exitm
+ .endif
+ .if \blocks == 7
+ mov x\tmp0, 9363
+ mov x\tmp1, x\quotient
+ movk x\tmp0, 0x9249, lsl 16
+ movk x\tmp0, 0x4924, lsl 32
+ movk x\tmp0, 0x2492, lsl 48
+ umulh x\quotient, x\quotient, x\tmp0
+ sub x\tmp0, x\tmp1, x\quotient
+ add x\tmp0, x\quotient, x\tmp0, lsr 1
+ lsr x\quotient, x\tmp0, 2
+ lsl x\remainder, x\quotient, 3
+ sub x\remainder, x\remainder, x\quotient
+ sub x\remainder, x\tmp1, x\remainder
+ .exitm
+ .endif
+ .if \blocks == 8
+ and x\remainder, x\quotient, 7
+ lsr x\quotient, x\quotient, 3
+ .exitm
+ .endif
+ .if \blocks == 9
+ mov x\tmp0, 58255
+ mov x\remainder, x\quotient
+ movk x\tmp0, 0x8e38, lsl 16
+ movk x\tmp0, 0x38e3, lsl 32
+ movk x\tmp0, 0xe38e, lsl 48
+ umulh x\tmp0, x\quotient, x\tmp0
+ and x\tmp1, x\tmp0, -8
+ lsr x\quotient, x\tmp0, 3
+ add x\tmp1, x\tmp1, x\quotient
+ sub x\remainder, x\remainder, x\tmp1
+ .exitm
+ .endif
+.endm
+
+.macro ldr_key num,first_key
+ ldr qKey\num,[keys,16*(\num - \first_key)]
+.endm
+#ifndef CBC_DECRYPT_BLOCKS_NUM
+#define CBC_DECRYPT_BLOCKS_NUM 8
+#endif
+
+.macro cbc_decrypt first_key:req,blocks
+ lsr xlen_bytes,xlen_bytes,4
+ cbz xlen_bytes,10f
+ push_stack
+ aes_cbc_decrypt_blocks \first_key,\blocks
+ pop_stack
+10:
+.endm
+
+.set stack_size,64
+.macro push_stack
+ stp d8, d9,[sp,-stack_size]!
+ stp d10,d11,[sp,16]
+ stp d12,d13,[sp,32]
+ stp d14,d15,[sp,48]
+.endm
+
+.macro pop_stack
+ ldp d10,d11,[sp,16]
+ ldp d12,d13,[sp,32]
+ ldp d14,d15,[sp,48]
+ ldp d8, d9, [sp], stack_size
+.endm
+
+/*
+void aes_cbc_dec_128(
+ void *in, //!< Input cipher text
+ uint8_t *IV, //!< Must be 16 bytes aligned to a 16 byte boundary
+ uint8_t *keys, //!< Must be on a 16 byte boundary and length of key size * key rounds or dec_keys of cbc_key_data
+ void *out, //!< Output plain text
+ uint64_t len_bytes //!< Must be a multiple of 16 bytes
+ );
+*/
+ declare_var_generic_reg in ,0
+ declare_var_generic_reg IV ,1
+ declare_var_generic_reg keys ,2
+ declare_var_generic_reg out ,3
+ declare_var_generic_reg len_bytes ,4
+ declare_var_generic_reg len_quotient,4
+ declare_var_generic_reg len_remainder,5
+ declare_var_generic_reg tmp0 ,6
+ declare_var_generic_reg tmp1 ,7
+ declare_var_generic_reg len_quotient_in,6
+
+.macro define_aes_cbc_dec_func mode:req,blocks:req
+ .global aes_cbc_dec_\mode\()_aes_\blocks
+aes_cbc_dec_\mode\()_aes_\blocks:
+ cbc_decrypt %((256-mode)/32),\blocks
+ ret
+ .size aes_cbc_dec_\mode\()_aes_\blocks, . - aes_cbc_dec_\mode\()_aes_\blocks
+.endm
+
+.irp blocks,1,2,3,4,5,6,7,8,9
+ define_aes_cbc_dec_func 128,\blocks
+ define_aes_cbc_dec_func 192,\blocks
+ define_aes_cbc_dec_func 256,\blocks
+.endr
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_enc_aes.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_enc_aes.S
new file mode 100644
index 000000000..8eb5e507d
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_enc_aes.S
@@ -0,0 +1,157 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+ .text
+
+#include "cbc_common.S"
+
+ declare_var_vector_reg Key0 ,17
+ declare_var_vector_reg Key1 ,18
+ declare_var_vector_reg Key2 ,19
+ declare_var_vector_reg Key3 ,20
+ declare_var_vector_reg Key4 ,21
+ declare_var_vector_reg Key5 ,22
+ declare_var_vector_reg Key6 ,23
+ declare_var_vector_reg Key7 ,24
+ declare_var_vector_reg Key8 ,25
+ declare_var_vector_reg Key9 ,26
+ declare_var_vector_reg Key10 ,27
+ declare_var_vector_reg Key11 ,28
+ declare_var_vector_reg Key12 ,29
+ declare_var_vector_reg Key13 ,30
+ declare_var_vector_reg Key14 ,31
+
+.macro aes_encrypt_round block,key
+ aese v\block\().16b,vKey\key\().16b
+ .if \key < 13
+ aesmc v\block\().16b,v\block\().16b
+ .endif
+.endm
+
+.macro aes_encrypt_round_name block,key
+ aese v\block\().16b,v\key\().16b
+ aesmc v\block\().16b,v\block\().16b
+.endm
+
+
+
+.set stack_size,64
+.macro push_stack
+ stp d8, d9,[sp,-stack_size]!
+ stp d10,d11,[sp,16]
+ stp d12,d13,[sp,32]
+ stp d14,d15,[sp,48]
+.endm
+
+.macro pop_stack
+ ldp d10,d11,[sp,16]
+ ldp d12,d13,[sp,32]
+ ldp d14,d15,[sp,48]
+ ldp d8, d9, [sp], stack_size
+.endm
+/*
+void aes_cbc_dec_128(
+ void *in, //!< Input cipher text
+ uint8_t *IV, //!< Must be 16 bytes aligned to a 16 byte boundary
+ uint8_t *keys, //!< Must be on a 16 byte boundary and length of key size * key rounds or dec_keys of cbc_key_data
+ void *out, //!< Output plain text
+ uint64_t len_bytes //!< Must be a multiple of 16 bytes
+ );
+*/
+ declare_var_generic_reg in ,0
+ declare_var_generic_reg IV ,1
+ declare_var_generic_reg keys ,2
+ declare_var_generic_reg out ,3
+ declare_var_generic_reg len_bytes ,4
+
+ declare_var_vector_reg State ,0
+ declare_var_vector_reg FirstKey ,1
+ declare_var_vector_reg Block ,2
+ declare_var_vector_reg ConstKey ,3
+.macro load_key num
+ ldr qKey\num,[keys],16
+.endm
+.altmacro
+.macro cbc_encrypt first:req
+ lsr xlen_bytes,xlen_bytes,4
+ cbz xlen_bytes,3f
+ ldr qState,[IV]
+ ldr qKey\first,[keys],16
+ .set lastkey_off,13-\first
+ ldr qKey14,[keys,lastkey_off*16]
+ ldr qBlock,[in],16
+ n=\first
+ second=1+\first
+ .rept 5-n
+ n=n+1
+ load_key %n
+ .endr
+ ld1 {vKey6.4s - vKey9.4s},[keys],4*16
+ eor vBlock.16b,vBlock.16b ,vState.16b
+ eor vConstKey.16b,vKey\first\().16b,vKey14.16b
+ aes_encrypt_round Block,\first
+ ld1 {vKey10.4s - vKey13.4s},[keys]
+ b 1f
+2:
+ aes_encrypt_round Block,\first
+ str qState,[out],16
+1:
+ sub xlen_bytes,xlen_bytes,1
+ aes_encrypt_round Block,%second
+ cbz xlen_bytes,1f
+ ldr qKey\first,[in],16
+1:
+ n=second
+ .rept 12-n
+ n=n+1
+ aes_encrypt_round Block,%n
+ .endr
+
+ eor vKey\first\().16b,vKey\first\().16b,vConstKey.16b
+ aes_encrypt_round Block,13
+ eor vState.16b,vBlock.16b,vKey14.16b
+ cbnz xlen_bytes,2b
+ str qState,[out]
+3:
+
+.endm
+START_FUNC(enc,128,aes)
+ cbc_encrypt 4
+ ret
+END_FUNC(enc,128,aes)
+
+START_FUNC(enc,192,aes)
+ cbc_encrypt 2
+ ret
+END_FUNC(enc,192,aes)
+
+START_FUNC(enc,256,aes)
+ cbc_encrypt 0
+ ret
+END_FUNC(enc,256,aes) \ No newline at end of file
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_multibinary_aarch64.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_multibinary_aarch64.S
new file mode 100644
index 000000000..fba533754
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/cbc_multibinary_aarch64.S
@@ -0,0 +1,38 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+
+#include "aarch64_multibinary.h"
+
+mbin_interface aes_cbc_dec_128
+mbin_interface aes_cbc_dec_192
+mbin_interface aes_cbc_dec_256
+
+mbin_interface aes_cbc_enc_128
+mbin_interface aes_cbc_enc_192
+mbin_interface aes_cbc_enc_256
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_aarch64_dispatcher.c b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_aarch64_dispatcher.c
new file mode 100644
index 000000000..f8188e3ae
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_aarch64_dispatcher.c
@@ -0,0 +1,255 @@
+/**********************************************************************
+ Copyright(c) 2020 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+#include <aarch64_multibinary.h>
+
+#undef PROVIDER_BASIC
+#define PROVIDER_BASIC(a) (void*)0
+
+static unsigned long is_crypto_available(void)
+{
+ unsigned long auxval = getauxval(AT_HWCAP);
+ return (auxval & (HWCAP_ASIMD | HWCAP_AES | HWCAP_PMULL)) ==
+ (HWCAP_ASIMD | HWCAP_AES | HWCAP_PMULL);
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_enc_128)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_enc_128_aes);
+
+ return PROVIDER_BASIC(aes_gcm_enc_128);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_dec_128)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_dec_128_aes);
+
+ return PROVIDER_BASIC(aes_gcm_dec_128);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_precomp_128)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_precomp_128_aes);
+
+ return PROVIDER_BASIC(aes_gcm_precomp_128);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_enc_256)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_enc_256_aes);
+
+ return PROVIDER_BASIC(aes_gcm_enc_256);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_dec_256)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_dec_256_aes);
+
+ return PROVIDER_BASIC(aes_gcm_dec_256);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_precomp_256)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_precomp_256_aes);
+
+ return PROVIDER_BASIC(aes_gcm_precomp_256);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_enc_128_update)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_enc_128_update_aes);
+
+ return PROVIDER_BASIC(aes_gcm_enc_128_update);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_enc_128_finalize)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_enc_128_finalize_aes);
+
+ return PROVIDER_BASIC(aes_gcm_enc_128_finalize);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_dec_128_update)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_dec_128_update_aes);
+
+ return PROVIDER_BASIC(aes_gcm_dec_128_update);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_dec_128_finalize)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_dec_128_finalize_aes);
+
+ return PROVIDER_BASIC(aes_gcm_dec_128_finalize);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_enc_256_update)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_enc_256_update_aes);
+
+ return PROVIDER_BASIC(aes_gcm_enc_256_update);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_enc_256_finalize)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_enc_256_finalize_aes);
+
+ return PROVIDER_BASIC(aes_gcm_enc_256_finalize);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_dec_256_update)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_dec_256_update_aes);
+
+ return PROVIDER_BASIC(aes_gcm_dec_256_update);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_dec_256_finalize)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_dec_256_finalize_aes);
+
+ return PROVIDER_BASIC(aes_gcm_dec_256_finalize);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_init_256)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_init_256_aes);
+
+ return PROVIDER_BASIC(aes_gcm_init_256);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_init_128)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_init_128_aes);
+
+ return PROVIDER_BASIC(aes_gcm_init_128);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_enc_128_nt)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_enc_128_nt_aes);
+
+ return PROVIDER_BASIC(aes_gcm_enc_128_nt);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_enc_128_update_nt)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_enc_128_update_nt_aes);
+
+ return PROVIDER_BASIC(aes_gcm_enc_128_update_nt);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_dec_128_nt)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_dec_128_nt_aes);
+
+ return PROVIDER_BASIC(aes_gcm_dec_128_nt);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_dec_128_update_nt)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_dec_128_update_nt_aes);
+
+ return PROVIDER_BASIC(aes_gcm_dec_128_update_nt);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_enc_256_nt)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_enc_256_nt_aes);
+
+ return PROVIDER_BASIC(aes_gcm_enc_256_nt);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_enc_256_update_nt)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_enc_256_update_nt_aes);
+
+ return PROVIDER_BASIC(aes_gcm_enc_256_update_nt);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_dec_256_nt)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_dec_256_nt_aes);
+
+ return PROVIDER_BASIC(aes_gcm_dec_256_nt);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_gcm_dec_256_update_nt)
+{
+ if (is_crypto_available())
+ return PROVIDER_INFO(aes_gcm_dec_256_update_nt_aes);
+
+ return PROVIDER_BASIC(aes_gcm_dec_256_update_nt);
+
+}
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_common.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_common.S
new file mode 100644
index 000000000..042f6cf19
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_common.S
@@ -0,0 +1,430 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+ .text
+#define HASHKEY_TOTAL_NUM (24)
+#define HASHKEY_BASE_OFF (15*16)
+#define HASHKEY_OFF(n) ((15*16)+n*32)
+#define HASHKEY_EXT_OFF(n) ((15*16)+n*32+16)
+#ifndef KEY_LEN
+#define KEY_LEN 128
+#endif
+#ifndef BLOCKS
+#define BLOCKS 24
+#endif
+#define FN_NAME(fn,mode,post) aes_gcm_##fn##_##mode####post##aes
+#define START_FUNC(fn,mode,post) .global FN_NAME(fn,mode,post); \
+ .type FN_NAME(fn,mode,post), %function; \
+ FN_NAME(fn,mode,post):
+#define END_FUNC(fn,mode,post) .size FN_NAME(fn,mode,post), .-FN_NAME(fn,mode,post)
+
+#define AAD_LEN_OFF 16
+#define IN_LENGTH_OFF 24
+#define PARTIAL_BLOCK_ENC_KEY_OFF 32
+#define PARTIAL_BLOCK_LENGTH_OFF 80
+#define CTR_OFF 64
+#define ORIG_IV_OFF 48
+/*
+ [low,middle,tmp0,high] +=dat0 * [hashkey0,hashkey0_ext]
+ ifnb dat1
+ dat1=rbit(*dat_adr)
+ [hashkey0,hashkey0_ext] = *hashkey_adr
+ dat_adr+=16
+ hashkey_adr+=32
+*/
+
+.macro ghash_mult_round aadhash:req,dat_adr:req,hashkey_adr:req, \
+ hashkey0:req,hashkey0_ext:req,high:req,low:req,middle:req, \
+ tmp0:req,tmp1:req,next_dat:req,left_count:req
+
+ ldr q\next_dat,[\dat_adr],16
+ pmull v\tmp0\().1q,v\aadhash\().1d,v\hashkey0_ext\().1d
+ pmull2 v\tmp1\().1q,v\aadhash\().2d,v\hashkey0_ext\().2d
+ .if \left_count > 1
+ ldr q\hashkey0_ext,[\hashkey_adr,16]
+ .endif
+ eor v\middle\().16b,v\middle\().16b,v\tmp0\().16b
+ pmull2 v\tmp0\().1q,v\aadhash\().2d,v\hashkey0\().2d
+ eor v\middle\().16b,v\middle\().16b,v\tmp1\().16b
+ pmull v\tmp1\().1q,v\aadhash\().1d,v\hashkey0\().1d
+ .if \left_count > 1
+ ldr q\hashkey0,[\hashkey_adr],32
+ .endif
+ eor v\high\().16b,v\high\().16b,v\tmp0\().16b
+ eor v\low\().16b,v\low\().16b,v\tmp1\().16b
+ rbit v\aadhash\().16b, v\next_dat\().16b
+.endm
+
+.macro ghash_mult_init_round aadhash:req,dat_adr:req,hashkey_adr:req, \
+ hashkey0:req,hashkey0_ext:req, \
+ high:req,low:req,middle:req,tmp0:req,next_dat:req,left_count:req
+ ldp q\hashkey0,q\hashkey0_ext,[\hashkey_adr],32
+ ldr q\next_dat,[\dat_adr],16
+ pmull v\middle\().1q,v\aadhash\().1d,v\hashkey0_ext\().1d
+ pmull2 v\tmp0\().1q,v\aadhash\().2d,v\hashkey0_ext\().2d
+ .if \left_count > 1
+ ldr q\hashkey0_ext,[\hashkey_adr,16]
+ .endif
+ pmull2 v\high\().1q,v\aadhash\().2d,v\hashkey0\().2d
+ eor v\middle\().16b,v\middle\().16b,v\tmp0\().16b
+
+ pmull v\low\().1q,v\aadhash\().1d,v\hashkey0\().1d
+ .if \left_count > 1
+ ldr q\hashkey0,[\hashkey_adr],32
+ .endif
+ rbit v\aadhash\().16b, v\next_dat\().16b
+.endm
+
+/* aadhash=reduction(low,middle,high)+dat0 */
+.macro ghash_mult_final_round aadhash:req, \
+ high:req,low:req,middle:req,tmp0:req, \
+ zero:req,poly:req
+
+ ext v\tmp0\().16b,v\middle\().16b,v\zero\().16b,8 /*high*/
+ ext v\middle\().16b,v\zero\().16b,v\middle\().16b,8 /*low */
+ eor v\high\().16b,v\high\().16b,v\tmp0\().16b
+ eor v\low\().16b,v\low\().16b,v\middle\().16b
+
+ pmull2 v\middle\().1q,v\high\().2d,v\poly\().2d
+
+ ext v\tmp0\().16b,v\middle\().16b,v\zero\().16b,8 /*high*/
+ ext v\middle\().16b,v\zero\().16b,v\middle\().16b,8 /*low*/
+ eor v\high\().16b,v\high\().16b,v\tmp0\().16b
+ eor v\low\().16b,v\low\().16b,v\middle\().16b
+ pmull v\middle\().1q,v\high\().1d,v\poly\().1d
+ eor v\tmp0\().16b, v\low\().16b, v\middle\().16b
+ eor v\aadhash\().16b, v\aadhash\().16b, v\tmp0\().16b
+.endm
+.macro ghash_reset_hashkey_addr hashkey_addr:req,hashkey_base:req,count:req
+ add \hashkey_addr,\hashkey_base,(24-\count)<<5
+.endm
+
+
+.macro ghash_block_n count:req,aadhash:req, dat:req,dat_addr:req, hashkey_addr:req, hashkey_base:req, \
+ hashkey:req,hashkey_ext:req,high:req,low:req,middle:req, zero:req,poly:req, \
+ tmp0:req,tmp1:req
+
+ ghash_reset_hashkey_addr \hashkey_addr,\hashkey_base,\count
+ ghash_mult_init_round \aadhash,\dat_addr,\hashkey_addr,\hashkey,\hashkey_ext, \
+ \high,\low,\middle,\tmp0,\dat,\count
+ .set left_count,\count - 1
+ .rept left_count
+ ghash_mult_round \aadhash,\dat_addr,\hashkey_addr,\hashkey,\hashkey_ext, \
+ \high,\low,\middle,\tmp0,\tmp1,\dat, left_count
+ .set left_count,left_count - 1
+
+ .endr
+ ghash_mult_final_round \aadhash,\high,\low,\middle,\tmp0,\zero,\poly
+.endm
+
+/*
+ aadhash=aadhash*[hashkey,hashkey_ext] + rbit(dat)
+*/
+.macro ghash_block_reg aadhash:req, dat:req, \
+ hashkey:req,hashkey_ext:req,high:req,low:req,middle:req, zero:req,poly:req, \
+ tmp0:req
+ pmull v\middle\().1q,v\aadhash\().1d,v\hashkey_ext\().1d
+ pmull2 v\tmp0\().1q,v\aadhash\().2d,v\hashkey_ext\().2d
+ pmull2 v\high\().1q,v\aadhash\().2d,v\hashkey\().2d
+ eor v\middle\().16b,v\middle\().16b,v\tmp0\().16b
+ pmull v\low\().1q,v\aadhash\().1d,v\hashkey\().1d
+ rbit v\aadhash\().16b, v\dat\().16b
+ ghash_mult_final_round \aadhash,\high,\low,\middle,\tmp0,\zero,\poly
+.endm
+
+.macro ghash_mult_round_noload aadhash:req, \
+ hashkey0:req,hashkey0_ext:req,high:req,low:req,middle:req, \
+ tmp0:req,tmp1:req
+
+ pmull v\tmp0\().1q,v\aadhash\().1d,v\hashkey0_ext\().1d
+ pmull2 v\tmp1\().1q,v\aadhash\().2d,v\hashkey0_ext\().2d
+ eor v\middle\().16b,v\middle\().16b,v\tmp0\().16b
+ pmull2 v\tmp0\().1q,v\aadhash\().2d,v\hashkey0\().2d
+ eor v\middle\().16b,v\middle\().16b,v\tmp1\().16b
+ pmull v\tmp1\().1q,v\aadhash\().1d,v\hashkey0\().1d
+ eor v\high\().16b,v\high\().16b,v\tmp0\().16b
+ eor v\low\().16b,v\low\().16b,v\tmp1\().16b
+
+.endm
+
+/* aadhash=reduction([low,high],poly)+dat0 */
+.macro poly_mult_final_x2 aadhash:req, \
+ high:req,low:req,tmp0:req,tmp1:req, \
+ poly:req
+ pmull2 v\tmp1\().1q,v\high\().2d,v\poly\().2d
+ eor v\low\().16b, v\aadhash\().16b, v\low\().16b
+ eor v\aadhash\().16b,v\aadhash\().16b,v\aadhash\().16b
+ ext v\tmp0\().16b,v\tmp1\().16b,v\aadhash\().16b,8 //high
+ ext v\tmp1\().16b,v\aadhash\().16b,v\tmp1\().16b,8 //low
+ eor v\high\().16b,v\high\().16b,v\tmp0\().16b
+ eor v\low\().16b,v\low\().16b,v\tmp1\().16b
+ pmull v\tmp1\().1q,v\high\().1d,v\poly\().1d
+ eor v\aadhash\().16b, v\low\().16b, v\tmp1\().16b
+.endm
+
+.macro aes_encrypt_round block,key
+ aese v\block\().16b,v\key\().16b
+ aesmc v\block\().16b,v\block\().16b
+.endm
+
+.macro declare_var_vector_reg name:req,reg:req
+ q\name .req q\reg
+ v\name .req v\reg
+ s\name .req s\reg
+ d\name .req d\reg
+.endm
+
+.macro declare_var_generic_reg name:req,reg:req
+ \name .req x\reg
+ x\name .req x\reg
+ w\name .req w\reg
+.endm
+
+/*Read data less than 16 */
+.macro read_small_data dest:req,src:req,size:req,tbl_adr:req,tbl:req
+ ldr q\tbl,[\tbl_adr,\size,lsl 4]
+ tbz \size,3,1f
+ ld1 {v\dest\().d}[0],[\src],8
+1:
+ tbz \size,2,1f
+ ld1 {v\dest\().s}[2],[\src],4
+1:
+ tbz \size,1,1f
+ ld1 {v\dest\().h}[6],[\src],2
+1:
+ tbz \size,0,1f
+ ld1 {v\dest\().b}[14],[\src],1
+1:
+ tbl v\dest\().16b,{v\dest\().16b},v\tbl\().16b
+.endm
+.macro read_small_data_start dest:req,src:req,size:req,tbl_adr:req,tbl:req
+ adrp \tbl_adr,:got:read_small_data_table
+ ldr \tbl_adr,[\tbl_adr,#:got_lo12:read_small_data_table]
+ read_small_data \dest,\src,\size,\tbl_adr,\tbl
+.endm
+
+.macro read_small_data_end dest:req,src:req,size:req,tbl_adr:req,tbl:req
+ adrp \tbl_adr,:got:read_end_small_data_table
+ ldr \tbl_adr,[\tbl_adr,#:got_lo12:read_end_small_data_table]
+ read_small_data \dest,\src,\size,\tbl_adr,\tbl
+.endm
+
+.macro write_small_data src:req,dest:req,size:req,tbl_adr:req,tmp1:req
+ ldr q\tmp1,[\tbl_adr,\size,lsl 4]
+ tbl v\tmp1\().16b,{v\src\().16b},v\tmp1\().16b
+ tbz \size,3,1f
+ st1 {v\tmp1\().d}[0],[\dest],8
+1:
+ tbz \size,2,1f
+ st1 {v\tmp1\().s}[2],[\dest],4
+1:
+ tbz \size,1,1f
+ st1 {v\tmp1\().h}[6],[\dest],2
+1:
+ tbz \size,0,1f
+ st1 {v\tmp1\().b}[14],[\dest],1
+1:
+.endm
+.macro write_small_data_start src:req,dest:req,size:req,tbl_adr:req,tmp1:req
+ adrp \tbl_adr,:got:write_small_data_table
+ ldr \tbl_adr,[\tbl_adr,#:got_lo12:write_small_data_table]
+ write_small_data \src,\dest,\size,\tbl_adr,\tmp1
+.endm
+.macro write_small_data_end src:req,dest:req,size:req,tbl_adr:req,tmp1:req
+ adrp \tbl_adr,:got:write_end_small_data_table
+ ldr \tbl_adr,[\tbl_adr,#:got_lo12:write_end_small_data_table]
+ write_small_data \src,\dest,\size,\tbl_adr,\tmp1
+.endm
+
+.macro tbx_small_data_end src:req,dest:req,size:req,tbl_adr:req,tmp1:req
+ adrp \tbl_adr,:got:tbx_end_small_data_table
+ ldr \tbl_adr,[\tbl_adr,#:got_lo12:tbx_end_small_data_table]
+ ldr q\tmp1,[\tbl_adr,\size,lsl 4]
+ tbx v\dest\().16b,{v\src\().16b},v\tmp1\().16b
+.endm
+
+.macro tbx_small_data_start src:req,dest:req,size:req,tbl_adr:req,tmp1:req
+ adrp \tbl_adr,:got:tbx_start_small_data_table
+ ldr \tbl_adr,[\tbl_adr,#:got_lo12:tbx_start_small_data_table]
+ ldr q\tmp1,[\tbl_adr,\size,lsl 4]
+ tbx v\dest\().16b,{v\src\().16b},v\tmp1\().16b
+.endm
+
+
+.macro clear_small_data dest:req,zero:req,size:req,tbl_adr:req,tmp1:req
+ adrp \tbl_adr,:got:shift_small_data_table
+ ldr \tbl_adr,[\tbl_adr,#:got_lo12:shift_small_data_table]
+ add \tbl_adr,\tbl_adr,16
+ sub \tbl_adr,\tbl_adr,\size
+ ldr q\tmp1,[\tbl_adr]
+ tbx v\dest\().16b,{v\zero\().16b},v\tmp1\().16b
+.endm
+
+
+.macro aes_gcm_n_round is_enc:req,count:req,aadhash:req, dat_addr:req, \
+ hashkey_addr:req, hashkey_base:req, \
+ hashkey:req,hashkey_ext:req,high:req,low:req, poly:req, \
+ ctr:req,enc_ctr:req,one:req,out_adr:req, \
+ tmp0:req,tmp1:req
+
+ ghash_reset_hashkey_addr \hashkey_addr,\hashkey_base,\count
+
+ aes_gcm_init \is_enc,\aadhash,\dat_addr,\hashkey_addr, \
+ \hashkey,\hashkey_ext, \high,\low, \
+ \ctr,\enc_ctr,\one,\out_adr, \
+ \tmp0,\tmp1,\count
+
+ .set left_count,\count - 1
+ .rept left_count
+ aes_gcm_middle \is_enc,\aadhash,\dat_addr,\hashkey_addr, \
+ \hashkey,\hashkey_ext, \high,\low, \
+ \ctr,\enc_ctr,\one,\out_adr, \
+ \tmp0,\tmp1, left_count
+ .set left_count,left_count - 1
+ .endr
+
+ poly_mult_final_x2 \aadhash,\high,\low,\tmp0,\tmp1,\poly
+
+.endm
+
+
+/*
+ aadhash=aadhash*[hashkey_base[(TOTAL_HASHKEY_NUM-2),(TOTAL_HASHKEY_NUM-1)]] + rbit(dat)
+*/
+.macro ghash_block_reg_x2 aadhash:req, dat:req, hashkey_base:req, \
+ hashkey:req,high:req,low:req,tmp0:req, tmp1:req, \
+ tmp2:req,temp0:req
+ ldr q\hashkey,[\hashkey_base,(TOTAL_HASHKEY_NUM-1)*32+16]
+ eor v\tmp2\().16b,v\tmp2\().16b,v\tmp2\().16b,8 //zero
+ pmull v\tmp1\().1q,v\aadhash\().1d,v\hashkey\().1d
+ pmull2 v\tmp0\().1q,v\aadhash\().2d,v\hashkey\().2d
+ ldr q\hashkey,[\hashkey_base,(TOTAL_HASHKEY_NUM-1)*32]
+ eor v\tmp0\().16b,v\tmp1\().16b,v\tmp0\().16b
+ ext v\tmp0\().16b,v\tmp0\().16b,v\tmp2\().16b,8 /*high*/
+ ext v\tmp1\().16b,v\tmp2\().16b,v\tmp0\().16b,8 /*low*/
+ pmull2 v\high\().1q,v\aadhash\().2d,v\hashkey\().2d
+ mov temp0,0x87
+ pmull v\low\().1q,v\aadhash\().1d,v\hashkey\().1d
+ dup v\tmp2\().2d,x0
+ eor v\high\().16b,v\high\().16b,v\tmp0\().16b
+ eor v\low\().16b,v\low\().16b,v\tmp1\().16b
+ rbit v\aadhash\().16b, v\dat\().16b
+ poly_mult_final_x2 \aadhash,\high,\low,\tmp0,\tmp1,\tmp2
+.endm
+
+.macro __generic_load_small_data is_enc:req,len_bit:req,small_read_len:req, \
+ in_adr:req,out_adr:req,partial_block:req,temp0:req,temp1:req,r:req,p
+ tbz \small_read_len,\len_bit,1f
+ ldr\p \r\()\temp0,[\in_adr],1<<\len_bit /*in */
+ ldr\p \r\()\temp1,[\partial_block] /* partial*/
+ eor \r\()\temp1,\r\()\temp0,\r\()\temp1
+ .ifc \is_enc ,decrypt
+ str\p \r\()\temp0,[\partial_block],1<<\len_bit
+ .endif
+ .ifc \is_enc, encrypt
+ str\p \r\()\temp1,[\partial_block],1<<\len_bit
+ .endif
+ str\p \r\()\temp1,[\out_adr],1<<\len_bit
+1:
+.endm
+.macro generic_load_partial_block is_enc:req,small_read_len:req,in_adr:req,out_adr:req, \
+ partial_block:req,temp0:req,temp1:req
+ __generic_load_small_data \is_enc,3,\small_read_len,\in_adr,\out_adr,\partial_block,\temp0,\temp1,x /* small_read_len >=8 */
+ __generic_load_small_data \is_enc,2,\small_read_len,\in_adr,\out_adr,\partial_block,\temp0,\temp1,w /* small_read_len >=4 */
+ __generic_load_small_data \is_enc,1,\small_read_len,\in_adr,\out_adr,\partial_block,\temp0,\temp1,w,h /* small_read_len >=2 */
+ __generic_load_small_data \is_enc,0,\small_read_len,\in_adr,\out_adr,\partial_block,\temp0,\temp1,w,b /* small_read_len >=1 */
+.endm
+/* without Neon read version */
+.macro generic_partial_block_start is_enc:req,in_len:req,in_adr:req,out_adr:req,context:req, \
+ partial_block:req,partial_block_len:req,small_read_len:req,left_partial_block_len:req, \
+ temp0:req
+ mov \left_partial_block_len,16
+ add \partial_block,\context,PARTIAL_BLOCK_ENC_KEY_OFF
+ sub \left_partial_block_len,\left_partial_block_len,\partial_block_len
+ add \partial_block,\partial_block,\partial_block_len
+ cmp \in_len,\left_partial_block_len
+ csel \small_read_len,\in_len,\left_partial_block_len, ls
+ add \partial_block_len,\partial_block_len,\small_read_len
+ sub \in_len,\in_len,\small_read_len
+ and \partial_block_len,\partial_block_len,0xf
+ str \partial_block_len,[\context,PARTIAL_BLOCK_LENGTH_OFF]
+ generic_load_partial_block \is_enc,\small_read_len,\in_adr,\out_adr,\partial_block, \
+ \left_partial_block_len,\temp0 /* small_read_len >=8 */
+.endm
+.macro generic_paritial_block_end is_enc:req,in_len:req,in_adr:req,out_adr:req,context:req, \
+ partial_block:req,temp0:req,temp1:req
+ str \in_len,[\context,PARTIAL_BLOCK_LENGTH_OFF]
+ add \partial_block,\context,PARTIAL_BLOCK_ENC_KEY_OFF
+ generic_load_partial_block \is_enc,\in_len,\in_adr,\out_adr,\partial_block,\temp0,\temp1 /* small_read_len >=8 */
+.endm
+/*partial_block_len+in_len < 16,partial_block_len=0,in_len>0 */
+.macro paritial_block_small_length is_enc:req,context:req,in_len:req,in_adr:req,out_adr:req,temp0:req,temp1:req,Ctr:req
+
+ cbz 1f
+ ldr \temp0,[\context,PARTIAL_BLOCK_LENGTH_OFF]
+ add \temp1,\temp0,\in_len
+ str \temp1,[\context,PARTIAL_BLOCK_LENGTH_OFF]
+ add \context,\temp0,PARTIAL_BLOCK_ENC_KEY_OFF
+2:/* loop start */
+ sub \in_len,\in_len,1
+ ldrb w\temp0,[\in_adr],1
+ ldrb w\temp1,[\context]
+ eor w\temp1,w\temp1,w\temp0
+ strb w\temp1,[\out_adr],1
+.ifc \is_enc , encrypt
+ strb w\temp1,[\context],1
+.endif
+.ifc \is_enc,decrypt
+ strb w\temp0,[\context],1
+.endif
+ cbnz \in_len,2b
+1:/* loop end */
+.endm
+
+/* 0<in_len < 16,partial_block_len=0 */
+.macro paritial_block_end is_enc:req,context:req,in_len:req,in_adr:req,out_adr:req, \
+ temp0:req,partial_block_len:req \
+ PartialBlock:req,ctr:req,one:req,Tmp2:req,Tmp3:req,Tmp4:req
+ add v\ctr\().4s,v\ctr\().4s,v\one\().4s //increase ctr
+ str q\ctr,[context,CTR_OFF]
+ read_small_data_start \PartialBlock,\in_adr,\in_len,\tbl_adr,\Tmp0
+ aes_encrypt_block \ctr
+
+.endm
+ declare_var_vector_reg Key0 ,16
+ declare_var_vector_reg Key1 ,17
+ declare_var_vector_reg Key2 ,18
+ declare_var_vector_reg Key3 ,19
+ declare_var_vector_reg Key4 ,20
+ declare_var_vector_reg Key5 ,21
+ declare_var_vector_reg Key6 ,22
+ declare_var_vector_reg Key7 ,23
+ declare_var_vector_reg Key8 ,24
+ declare_var_vector_reg Key9 ,25
+ declare_var_vector_reg Key10,26
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_common_128.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_common_128.S
new file mode 100644
index 000000000..02add91a2
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_common_128.S
@@ -0,0 +1,165 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+#define KEY_LEN 128
+#include "gcm_common.S"
+
+#define KEY_REGS 0,1,2,3,4,5,6,7,8
+.macro aes_encrypt_block block:req
+ aes_encrypt_round \block,Key0
+ aes_encrypt_round \block,Key1
+ aes_encrypt_round \block,Key2
+ aes_encrypt_round \block,Key3
+ aes_encrypt_round \block,Key4
+ aes_encrypt_round \block,Key5
+ aes_encrypt_round \block,Key6
+ aes_encrypt_round \block,Key7
+ aes_encrypt_round \block,Key8
+ aese v\block\().16b,vKey9.16b
+ eor v\block\().16b,v\block\().16b,vKey10.16b
+.endm
+
+/*
+ Load Aes Keys to [vKey0..vKey8,vKeyLast0,vKeyLast1]
+ */
+.macro load_aes_keys key_addr:req
+ ld1 {vKey0.4s- vKey3.4s},[\key_addr],64
+ ld1 {vKey4.4s- vKey7.4s},[\key_addr],64
+ ldp qKey8,qKey9,[\key_addr],32
+ ldr qKey10,[\key_addr],15*16 - 128 - 32
+.endm
+
+
+
+/*
+ [low,middle,tmp0,high] +=aadhash * [hashkey0,hashkey0_ext]
+ dat=*dat_adr
+ enc_dat=aes_encrypt(ctr)^dat
+ aadhash=rbit(enc_dat)
+ [hashkey0,hashkey0_ext] = *hashkey_adr
+ dat_adr+=16
+ hashkey_adr+=32
+*/
+.macro aes_gcm_middle is_enc:req,aadhash:req,dat_adr:req,hashkey_adr:req, \
+ hashkey0:req,hashkey0_ext:req,high:req,low:req, \
+ ctr:req,enc_ctr:req,one:req,out_adr:req, \
+ tmp0:req,tmp1:req,left_count:req
+
+ pmull2 v\tmp0\().1q,v\aadhash\().2d,v\hashkey0\().2d
+ pmull v\tmp1\().1q,v\aadhash\().1d,v\hashkey0\().1d
+ .if \left_count > 1
+ ldr q\hashkey0,[\hashkey_adr],16
+ .endif
+
+ add v\ctr\().4s,v\ctr\().4s,v\one\().4s //increase ctr
+
+ rev32 v\enc_ctr\().16b,v\ctr\().16b
+ aes_encrypt_round \enc_ctr,Key0
+ eor v\high\().16b,v\high\().16b,v\tmp0\().16b
+ pmull v\tmp0\().1q,v\aadhash\().1d,v\hashkey0_ext\().1d
+ eor v\low\().16b,v\low\().16b,v\tmp1\().16b
+ pmull2 v\tmp1\().1q,v\aadhash\().2d,v\hashkey0_ext\().2d
+ .if \left_count > 1
+ ldr q\hashkey0_ext,[\hashkey_adr],16
+ .endif
+ eor v\aadhash\().16b,v\aadhash\().16b,v\aadhash\().16b
+ aes_encrypt_round \enc_ctr,Key1
+ aes_encrypt_round \enc_ctr,Key2
+ eor v\tmp0\().16b,v\tmp1\().16b,v\tmp0\().16b
+ aes_encrypt_round \enc_ctr,Key3
+ ext v\tmp1\().16b,v\aadhash\().16b,v\tmp0\().16b,8
+ ext v\tmp0\().16b,v\tmp0\().16b,v\aadhash\().16b,8
+ aes_encrypt_round \enc_ctr,Key4
+ eor v\low\().16b,v\low\().16b,v\tmp1\().16b
+ eor v\high\().16b,v\high\().16b,v\tmp0\().16b
+ aes_encrypt_round \enc_ctr,Key5
+ ldr q\aadhash,[\dat_adr],16
+ aes_encrypt_round \enc_ctr,Key6
+ aes_encrypt_round \enc_ctr,Key7
+ aes_encrypt_round \enc_ctr,Key8
+ aese v\enc_ctr\().16b,vKey9.16b
+ eor v\enc_ctr\().16b,v\enc_ctr\().16b,vKey10.16b
+ eor v\enc_ctr\().16b,v\enc_ctr\().16b,v\aadhash\().16b
+ .ifc \is_enc, encrypt
+ rbit v\aadhash\().16b,v\enc_ctr\().16b
+ .endif
+ .ifc \is_enc , decrypt
+ rbit v\aadhash\().16b,v\aadhash\().16b
+ .endif
+ str q\enc_ctr,[\out_adr],16
+.endm
+
+.macro aes_gcm_init is_enc:req,aadhash:req,dat_adr:req,hashkey_adr:req, \
+ hashkey0:req,hashkey0_ext:req, high:req,low:req, \
+ ctr:req,enc_ctr:req,one:req,out_adr:req, \
+ tmp0:req,tmp1:req,left_count:req
+ ldr q\hashkey0,[\hashkey_adr],16
+ add v\ctr\().4s,v\ctr\().4s,v\one\().4s //increase ctr
+ rev32 v\enc_ctr\().16b,v\ctr\().16b
+ aes_encrypt_round \enc_ctr,Key0
+ ldr q\hashkey0_ext,[\hashkey_adr],16
+ aes_encrypt_round \enc_ctr,Key1
+ pmull2 v\high\().1q,v\aadhash\().2d,v\hashkey0\().2d
+ pmull v\low\().1q,v\aadhash\().1d,v\hashkey0\().1d
+
+ .if \left_count > 1
+ ldr q\hashkey0,[\hashkey_adr],16
+ .endif
+ aes_encrypt_round \enc_ctr,Key2
+ pmull v\tmp1\().1q,v\aadhash\().1d,v\hashkey0_ext\().1d
+ pmull2 v\tmp0\().1q,v\aadhash\().2d,v\hashkey0_ext\().2d
+ eor v\aadhash\().16b,v\aadhash\().16b,v\aadhash\().16b
+
+ .if \left_count > 1
+ ldr q\hashkey0_ext,[\hashkey_adr],16
+ .endif
+ aes_encrypt_round \enc_ctr,Key3
+ eor v\tmp0\().16b,v\tmp1\().16b,v\tmp0\().16b
+
+ aes_encrypt_round \enc_ctr,Key4
+ ext v\tmp1\().16b,v\aadhash\().16b,v\tmp0\().16b,8 //low
+ ext v\tmp0\().16b,v\tmp0\().16b,v\aadhash\().16b,8 //high
+ aes_encrypt_round \enc_ctr,Key5
+ eor v\low\().16b,v\low\().16b,v\tmp1\().16b
+ eor v\high\().16b,v\high\().16b,v\tmp0\().16b
+ aes_encrypt_round \enc_ctr,Key6
+ ldr q\aadhash,[\dat_adr],16
+ aes_encrypt_round \enc_ctr,Key7
+ aes_encrypt_round \enc_ctr,Key8
+ aese v\enc_ctr\().16b,vKey9.16b
+ eor v\enc_ctr\().16b,v\enc_ctr\().16b,vKey10.16b
+ eor v\enc_ctr\().16b,v\enc_ctr\().16b,v\aadhash\().16b
+ .ifc \is_enc , encrypt
+ rbit v\aadhash\().16b,v\enc_ctr\().16b
+ .endif
+ .ifc \is_enc , decrypt
+ rbit v\aadhash\().16b,v\aadhash\().16b
+ .endif
+ str q\enc_ctr,[\out_adr],16
+.endm
+
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_common_256.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_common_256.S
new file mode 100644
index 000000000..fb6a6e94d
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_common_256.S
@@ -0,0 +1,181 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+#define KEY_LEN 256
+#include "gcm_common.S"
+ declare_var_vector_reg Key11,27
+ declare_var_vector_reg Key12,28
+ declare_var_vector_reg Key13,29
+ declare_var_vector_reg Key14,30
+#define KEY_REGS 0,1,2,3,4,5,6,7,8,9,10,11,12
+.macro aes_encrypt_block block:req
+ aes_encrypt_round \block,Key0
+ aes_encrypt_round \block,Key1
+ aes_encrypt_round \block,Key2
+ aes_encrypt_round \block,Key3
+ aes_encrypt_round \block,Key4
+ aes_encrypt_round \block,Key5
+ aes_encrypt_round \block,Key6
+ aes_encrypt_round \block,Key7
+ aes_encrypt_round \block,Key8
+ aes_encrypt_round \block,Key9
+ aes_encrypt_round \block,Key10
+ aes_encrypt_round \block,Key11
+ aes_encrypt_round \block,Key12
+ aese v\block\().16b,vKey13.16b
+ eor v\block\().16b,v\block\().16b,vKey14.16b
+.endm
+
+/*
+ Load Aes Keys to [vKey0..vKey8,vKeyLast0,vKeyLast1]
+ */
+.macro load_aes_keys key_addr:req
+ ld1 { vKey0.4s- vKey3.4s},[\key_addr],64
+ ld1 { vKey4.4s- vKey7.4s},[\key_addr],64
+ ld1 { vKey8.4s- vKey11.4s},[\key_addr],64
+ ld1 {vKey12.4s- vKey14.4s},[\key_addr],48
+.endm
+
+
+
+/*
+ [low,middle,tmp0,high] +=aadhash * [hashkey0,hashkey0_ext]
+ dat=*dat_adr
+ enc_dat=aes_encrypt(ctr)^dat
+ aadhash=rbit(enc_dat)
+ [hashkey0,hashkey0_ext] = *hashkey_adr
+ dat_adr+=16
+ hashkey_adr+=32
+*/
+.macro aes_gcm_middle is_enc:req,aadhash:req,dat_adr:req,hashkey_adr:req, \
+ hashkey0:req,hashkey0_ext:req,high:req,low:req, \
+ ctr:req,enc_ctr:req,one:req,out_adr:req, \
+ tmp0:req,tmp1:req,left_count:req
+
+ pmull2 v\tmp0\().1q,v\aadhash\().2d,v\hashkey0\().2d
+ pmull v\tmp1\().1q,v\aadhash\().1d,v\hashkey0\().1d
+ .if \left_count > 1
+ ldr q\hashkey0,[\hashkey_adr],16
+ .endif
+
+ add v\ctr\().4s,v\ctr\().4s,v\one\().4s //increase ctr
+
+ rev32 v\enc_ctr\().16b,v\ctr\().16b
+ aes_encrypt_round \enc_ctr,Key0
+ eor v\high\().16b,v\high\().16b,v\tmp0\().16b
+ pmull v\tmp0\().1q,v\aadhash\().1d,v\hashkey0_ext\().1d
+ eor v\low\().16b,v\low\().16b,v\tmp1\().16b
+ pmull2 v\tmp1\().1q,v\aadhash\().2d,v\hashkey0_ext\().2d
+ .if \left_count > 1
+ ldr q\hashkey0_ext,[\hashkey_adr],16
+ .endif
+ eor v\aadhash\().16b,v\aadhash\().16b,v\aadhash\().16b
+ aes_encrypt_round \enc_ctr,Key1
+ aes_encrypt_round \enc_ctr,Key2
+ eor v\tmp0\().16b,v\tmp1\().16b,v\tmp0\().16b
+ aes_encrypt_round \enc_ctr,Key3
+ ext v\tmp1\().16b,v\aadhash\().16b,v\tmp0\().16b,8
+ ext v\tmp0\().16b,v\tmp0\().16b,v\aadhash\().16b,8
+ aes_encrypt_round \enc_ctr,Key4
+ eor v\low\().16b,v\low\().16b,v\tmp1\().16b
+ eor v\high\().16b,v\high\().16b,v\tmp0\().16b
+ aes_encrypt_round \enc_ctr,Key5
+ ldr q\aadhash,[\dat_adr],16
+ aes_encrypt_round \enc_ctr,Key6
+ aes_encrypt_round \enc_ctr,Key7
+ aes_encrypt_round \enc_ctr,Key8
+ aes_encrypt_round \enc_ctr,Key9
+ aes_encrypt_round \enc_ctr,Key10
+ aes_encrypt_round \enc_ctr,Key11
+ aes_encrypt_round \enc_ctr,Key12
+ aese v\enc_ctr\().16b,vKey13.16b
+ eor v\enc_ctr\().16b,v\enc_ctr\().16b,vKey14.16b
+ eor v\enc_ctr\().16b,v\enc_ctr\().16b,v\aadhash\().16b
+ .ifc \is_enc , encrypt
+ rbit v\aadhash\().16b,v\enc_ctr\().16b
+ .endif
+ .ifc \is_enc , decrypt
+ rbit v\aadhash\().16b,v\aadhash\().16b
+ .endif
+ str q\enc_ctr,[\out_adr],16
+.endm
+
+.macro aes_gcm_init is_enc:req,aadhash:req,dat_adr:req,hashkey_adr:req, \
+ hashkey0:req,hashkey0_ext:req, high:req,low:req, \
+ ctr:req,enc_ctr:req,one:req,out_adr:req, \
+ tmp0:req,tmp1:req,left_count:req
+ ldr q\hashkey0,[\hashkey_adr],16
+ add v\ctr\().4s,v\ctr\().4s,v\one\().4s /*increase ctr */
+ rev32 v\enc_ctr\().16b,v\ctr\().16b
+ aes_encrypt_round \enc_ctr,Key0
+ ldr q\hashkey0_ext,[\hashkey_adr],16
+ aes_encrypt_round \enc_ctr,Key1
+ pmull2 v\high\().1q,v\aadhash\().2d,v\hashkey0\().2d
+ pmull v\low\().1q,v\aadhash\().1d,v\hashkey0\().1d
+
+ .if \left_count > 1
+ ldr q\hashkey0,[\hashkey_adr],16
+ .endif
+ aes_encrypt_round \enc_ctr,Key2
+ pmull v\tmp1\().1q,v\aadhash\().1d,v\hashkey0_ext\().1d
+ pmull2 v\tmp0\().1q,v\aadhash\().2d,v\hashkey0_ext\().2d
+ eor v\aadhash\().16b,v\aadhash\().16b,v\aadhash\().16b
+
+ .if \left_count > 1
+ ldr q\hashkey0_ext,[\hashkey_adr],16
+ .endif
+ aes_encrypt_round \enc_ctr,Key3
+ eor v\tmp0\().16b,v\tmp1\().16b,v\tmp0\().16b
+
+ aes_encrypt_round \enc_ctr,Key4
+ ext v\tmp1\().16b,v\aadhash\().16b,v\tmp0\().16b,8 /*low */
+ ext v\tmp0\().16b,v\tmp0\().16b,v\aadhash\().16b,8 /* high */
+ aes_encrypt_round \enc_ctr,Key5
+ eor v\low\().16b,v\low\().16b,v\tmp1\().16b
+ eor v\high\().16b,v\high\().16b,v\tmp0\().16b
+ aes_encrypt_round \enc_ctr,Key6
+ ldr q\aadhash,[\dat_adr],16
+ aes_encrypt_round \enc_ctr,Key7
+ aes_encrypt_round \enc_ctr,Key8
+ aes_encrypt_round \enc_ctr,Key9
+ aes_encrypt_round \enc_ctr,Key10
+ aes_encrypt_round \enc_ctr,Key11
+ aes_encrypt_round \enc_ctr,Key12
+ aese v\enc_ctr\().16b,vKey13.16b
+ eor v\enc_ctr\().16b,v\enc_ctr\().16b,vKey14.16b
+ eor v\enc_ctr\().16b,v\enc_ctr\().16b,v\aadhash\().16b
+ .ifc \is_enc , encrypt
+ rbit v\aadhash\().16b,v\enc_ctr\().16b
+ .endif
+ .ifc \is_enc , decrypt
+ rbit v\aadhash\().16b,v\aadhash\().16b
+ .endif
+ str q\enc_ctr,[\out_adr],16
+.endm
+
+
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_enc_dec.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_enc_dec.S
new file mode 100644
index 000000000..927179cfc
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_enc_dec.S
@@ -0,0 +1,588 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+/*
+void gist_aes_gcm_dec_##mode( \
+ const struct gcm_key_data *key_data, \
+ struct gcm_context_data *context, \
+ uint8_t *out, \
+ uint8_t const *in, \
+ uint64_t len, \
+ uint8_t *iv, \
+ \
+ uint8_t const *aad, \
+ uint64_t aad_len, \
+ uint8_t *auth_tag, \
+ uint64_t auth_tag_len \
+ \
+ )
+ */
+
+ declare_var_generic_reg key_data ,0
+ declare_var_generic_reg context ,1
+ declare_var_generic_reg out ,2
+ declare_var_generic_reg in ,3
+ declare_var_generic_reg len ,4
+ declare_var_generic_reg iv ,5
+ declare_var_generic_reg aad ,6
+ declare_var_generic_reg aad_len ,7
+
+ declare_var_generic_reg hashkey_base,0
+ declare_var_generic_reg hashkey_addr,5
+ declare_var_generic_reg left_len ,12
+ declare_var_generic_reg aad_left ,13
+ declare_var_generic_reg temp0 ,14
+ declare_var_generic_reg temp1 ,15
+
+ declare_var_generic_reg auth_tag ,0 /* input param */
+ declare_var_generic_reg auth_tag_len,1 /* input param */
+
+
+ declare_var_vector_reg Ctr,0
+ declare_var_vector_reg AadHash,1
+ declare_var_vector_reg HashKey0,2
+ declare_var_vector_reg HashKey0Ext,3
+ declare_var_vector_reg High,4
+ declare_var_vector_reg Low,5
+ declare_var_vector_reg EncCtr,6
+ declare_var_vector_reg Dat0,6
+ declare_var_vector_reg Middle0,7
+
+ declare_var_vector_reg Tmp0,8
+ declare_var_vector_reg Tmp1,9
+ declare_var_vector_reg Zero,10
+ declare_var_vector_reg Poly,11
+ declare_var_vector_reg LeftDat ,12
+ declare_var_vector_reg Len ,13
+ declare_var_vector_reg Tmp2,14
+ declare_var_vector_reg Tmp3,15
+
+ declare_var_vector_reg One,31
+ .set stack_size,64
+ .macro push_stack
+ stp d8, d9,[sp,-stack_size]!
+ stp d10,d11,[sp,16]
+ stp d12,d13,[sp,32]
+ stp d14,d15,[sp,48]
+
+ .endm
+
+ .macro pop_stack
+ ldp d10,d11,[sp,16]
+ ldp d12,d13,[sp,32]
+ ldp d14,d15,[sp,48]
+ ldp d8, d9, [sp], stack_size
+ .endm
+
+START_FUNC(enc,KEY_LEN,_)
+START_FUNC(enc,KEY_LEN,_nt_)
+ push_stack
+ /*save in_length and aad_length*/
+ stp aad_len,len,[context,AAD_LEN_OFF]
+ load_aes_keys key_data
+ /* Init Consts and IV */
+ mov wtemp1,1
+ eor vOne.16b,vOne.16b,vOne.16b
+ ld1 {vCtr.d}[0],[iv],8
+ eor vZero.16b,vZero.16b,vZero.16b
+ ld1 {vCtr.s}[2],[iv]
+ mov temp0,0x87
+ rev32 vCtr.16b,vCtr.16b /* to cpu order */
+ ins vOne.s[3],wtemp1
+ mov vAadHash.16b,vZero.16b
+ dup vPoly.2d,temp0
+ ins vCtr.s[3],wtemp1 /* Initial Ctr and Orig IV */
+
+
+ and left_len,aad_len,0xf
+ cbz aad_len,24f
+ lsr aad_len,aad_len,4
+ /* Read small data */
+ cbz left_len,2f /* aad_len >= 16,skip */
+ add aad_left,aad,aad_len,lsl 4
+ read_small_data_start LeftDat,aad_left,left_len,temp0,Tmp0
+ cbnz left_len,1f /* aad_len & 0xf != 0 */
+2:
+ cbz aad_len,1f /* aad_len <16 skip*/
+ /* left_len == 0 && aad_len !=0 */
+ sub aad_len,aad_len,1
+ /* leftDat = aad[-1] */
+ ldr qLeftDat,[aad,aad_len,lsl 4]
+1:
+ cbnz aad_len,1f /* aad_len >16,skip */
+ rbit vAadHash.16b,vLeftDat.16b
+ b 24f /* aad_len <=16, skip aadhash caculate */
+1:
+ /* aad_len > 16 */
+ ldr qAadHash,[aad],16
+ rbit vAadHash.16b,vAadHash.16b
+ sub aad_len,aad_len,1
+
+1:
+ /* loop ghash_block */
+ cmp aad_len,HASHKEY_TOTAL_NUM - 1
+ bls 1f // break loop
+ sub aad_len,aad_len,HASHKEY_TOTAL_NUM
+ ghash_block_n HASHKEY_TOTAL_NUM,AadHash,Dat0,aad,hashkey_addr,hashkey_base, \
+ HashKey0,HashKey0Ext,High,Low,Middle0,Zero,Poly , \
+ Tmp0,Tmp1
+ b 1b /* back to loop start */
+1:
+ cbnz aad_len,1f /* left aad_len >32,skip */
+ ldp qHashKey0,qHashKey0Ext,[hashkey_base,(HASHKEY_TOTAL_NUM-1)*32]
+ ghash_block_reg AadHash,LeftDat, \
+ HashKey0,HashKey0Ext,High,Low,Middle0,Zero,Poly , \
+ Tmp0
+ b 24f /* left aad_len <=32,skip below check */
+1:
+ mov temp0,HASHKEY_TOTAL_NUM - 1
+ sub temp0,temp0,aad_len
+ add hashkey_addr,hashkey_base,temp0,lsl 5
+
+ ghash_mult_init_round AadHash,aad,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Middle0,Tmp0,Dat0,2 /* load next hash */
+ sub aad_len,aad_len,1
+
+1:
+ cbz aad_len,1f
+ ghash_mult_round AadHash,aad,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Middle0,Tmp0,Tmp1,Dat0, 2
+
+ sub aad_len,aad_len,1
+ b 1b
+1:
+ ghash_mult_round_noload AadHash,HashKey0,HashKey0Ext,High,Low,Middle0,Tmp0,Tmp1
+ rbit vAadHash.16b, vLeftDat.16b
+ ghash_mult_final_round AadHash,High,Low,Middle0,Tmp0,Zero,Poly
+
+24:
+
+ /* Enc/Dec loop */
+ and left_len,len,15
+ cbz len,24f
+ lsr len,len,4
+1:
+ /* loop aes gcm enc/dec loop */
+ cmp len,HASHKEY_TOTAL_NUM - 1
+ bls 1f // break loop
+ sub len,len,HASHKEY_TOTAL_NUM
+ aes_gcm_n_round encrypt,HASHKEY_TOTAL_NUM,AadHash,in,hashkey_addr,hashkey_base, \
+ HashKey0,HashKey0Ext,High,Low,Poly, \
+ Ctr,EncCtr,One,out,Tmp0,Tmp1
+ b 1b /* back to loop start */
+1:
+ cbz len,24f /* left len == 0 */
+ mov temp0,HASHKEY_TOTAL_NUM
+ sub temp0,temp0,len
+ add hashkey_addr,hashkey_base,temp0,lsl 5
+
+ sub len,len,1
+ aes_gcm_init encrypt,AadHash,in,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Ctr,EncCtr,One,out,Tmp0,Tmp1,2 /* load next hash */
+ cbz len,2f
+ sub len,len,1
+1:
+
+ cbz len,1f
+ aes_gcm_middle encrypt,AadHash,in,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Ctr,EncCtr,One,out,Tmp0,Tmp1,2 /* load next hash */
+ sub len,len,1
+ b 1b
+1:
+ aes_gcm_middle encrypt,AadHash,in,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Ctr,EncCtr,One,out,Tmp0,Tmp1,1 /* load next hash */
+2:
+ poly_mult_final_x2 AadHash,High,Low,Tmp0,Tmp1,Poly
+24:
+ /* complete part */
+ cmp left_len,0
+ movi vHigh.16b,0
+ mov temp0,HASHKEY_TOTAL_NUM-3
+ movi vLow.16b,0
+ cinc hashkey_addr,temp0,eq
+ movi vMiddle0.16b,0
+ add hashkey_addr,hashkey_base,hashkey_addr,lsl 5
+ ldp qHashKey0,qHashKey0Ext,[hashkey_addr],32
+ beq 2f
+ read_small_data_start LeftDat,in,left_len,temp0,Tmp0
+ add vCtr.4s,vCtr.4s,vOne.4s
+ rev32 vEncCtr.16b,vCtr.16b
+ aes_encrypt_round EncCtr,Key0
+ pmull2 vHigh.1q,vAadHash.2d,vHashKey0.2d
+ aes_encrypt_round EncCtr,Key1
+ pmull vLow.1q ,vAadHash.1d,vHashKey0.1d
+ aes_encrypt_round EncCtr,Key2
+ ldr qHashKey0,[hashkey_addr],16
+ aes_encrypt_round EncCtr,Key3
+ pmull vMiddle0.1q,vAadHash.1d,vHashKey0Ext.1d
+ aes_encrypt_round EncCtr,Key4
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0Ext.2d
+ aes_encrypt_round EncCtr,Key5
+ ldr qHashKey0Ext,[hashkey_addr],16
+ aes_encrypt_round EncCtr,Key6
+ eor vMiddle0.16b,vMiddle0.16b,vTmp0.16b
+ aes_encrypt_round EncCtr,Key7
+ aes_encrypt_round EncCtr,Key8
+#if KEY_LEN==256
+ aes_encrypt_round EncCtr,Key9
+ aes_encrypt_round EncCtr,Key10
+ aes_encrypt_round EncCtr,Key11
+ aes_encrypt_round EncCtr,Key12
+ aese vEncCtr.16b,vKey13.16b
+ eor vEncCtr.16b,vEncCtr.16b,vKey14.16b
+#else
+ aese vEncCtr.16b,vKey9.16b
+ eor vEncCtr.16b,vEncCtr.16b,vKey10.16b
+#endif
+ eor vEncCtr.16b,vEncCtr.16b,vLeftDat.16b
+ write_small_data_start EncCtr,out,left_len,temp0,Tmp0
+ clear_small_data EncCtr,Zero,left_len,temp0,Tmp0
+ rbit vAadHash.16b,vEncCtr.16b
+2:
+
+ ldr qLen,[context,AAD_LEN_OFF] /* Len */
+ mov wtemp0,1 /* Ek */
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0.2d /* auth_dat * HashKey[Total-2] */
+ shl vLen.2d,vLen.2d,3 /* Len */
+ pmull vTmp1.1q ,vAadHash.1d,vHashKey0.1d /* auth_dat * HashKey[Total-2] */
+ rev64 vLen.16b,vLen.16b /* Len */
+ ins vCtr.4s[3],wtemp0 /* Ek */
+ ldr qHashKey0,[hashkey_addr],16 /* auth_dat * HashKey[Total-2] */
+ pmull vTmp2.1q,vAadHash.1d,vHashKey0Ext.1d /* auth_dat * HashKey[Total-2] */
+ rev32 vEncCtr.16b,vCtr.16b /* Ek */
+ eor vHigh.16b,vHigh.16b,vTmp0.16b /* auth_dat * HashKey[Total-2] */
+ pmull2 vTmp3.1q ,vAadHash.2d,vHashKey0Ext.2d /* auth_dat * HashKey[Total-2] */
+ rbit vAadHash.16b,vLen.16b /* Len */
+
+ aes_encrypt_round EncCtr,Key0 /* Ek */
+ eor vLow.16b,vLow.16b,vTmp1.16b /* auth_dat * HashKey[Total-2] */
+ aes_encrypt_round EncCtr,Key1 /* Ek */
+ ldr qHashKey0Ext,[hashkey_addr],16 /* auth_dat * HashKey[Total-2] */
+ aes_encrypt_round EncCtr,Key2 /* Ek */
+ eor vMiddle0.16b,vMiddle0.16b,vTmp2.16b /* auth_dat * HashKey[Total-2] */
+ aes_encrypt_round EncCtr,Key3 /* Ek */
+ eor vMiddle0.16b,vMiddle0.16b,vTmp3.16b /* auth_dat * HashKey[Total-2] */
+ aes_encrypt_round EncCtr,Key4 /* Ek */
+
+ pmull2 vTmp0.1q,vAadHash.2d,vHashKey0.2d /* Len * HashKey[Total-1] */
+ pmull vTmp1.1q ,vAadHash.1d,vHashKey0.1d /* Len * HashKey[Total-1] */
+ aes_encrypt_round EncCtr,Key5 /* Ek */
+ aes_encrypt_round EncCtr,Key6 /* Ek */
+ pmull vTmp2.1q,vAadHash.1d,vHashKey0Ext.1d /* Len * HashKey[Total-1] */
+ aes_encrypt_round EncCtr,Key7 /* Ek */
+ eor vHigh.16b,vHigh.16b,vTmp0.16b /* Len * HashKey[Total-1] */
+ pmull2 vTmp3.1q ,vAadHash.2d,vHashKey0Ext.2d /* Len * HashKey[Total-1] */
+ aes_encrypt_round EncCtr,Key8 /* Ek */
+ eor vLow.16b,vLow.16b,vTmp1.16b /* Len * HashKey[Total-1] */
+#if KEY_LEN==256
+ aes_encrypt_round EncCtr,Key9 /* Ek */
+ aes_encrypt_round EncCtr,Key10 /* Ek */
+ aes_encrypt_round EncCtr,Key11 /* Ek */
+ aes_encrypt_round EncCtr,Key12 /* Ek */
+ aese vEncCtr.16b,vKey13.16b /* Ek */
+ eor vEncCtr.16b,vEncCtr.16b,vKey14.16b /* Ek */
+#else
+ aese vEncCtr.16b,vKey9.16b /* Ek */
+ eor vEncCtr.16b,vEncCtr.16b,vKey10.16b /* Ek */
+#endif
+ eor vMiddle0.16b,vMiddle0.16b,vTmp2.16b /* Len * HashKey[Total-1] */
+ eor vMiddle0.16b,vMiddle0.16b,vTmp3.16b /* Len * HashKey[Total-1] */
+ rbit vAadHash.16b,vEncCtr.16b /* Aad */
+
+ ghash_mult_final_round AadHash,High,Low,Middle0,Tmp0,Zero,Poly
+
+ ldp auth_tag,auth_tag_len,[sp,stack_size] /* Adjust here : TODO TBD */
+ rbit vAadHash.16b,vAadHash.16b /* Aad */
+
+
+ /* output auth_tag */
+ cmp auth_tag_len,16
+ bne 1f
+ /* most likely auth_tag_len=16 */
+ str qAadHash,[auth_tag]
+ pop_stack
+ ret
+1: /* auth_tag_len=12 */
+ cmp auth_tag_len,12
+ bne 1f
+ str dAadHash,[auth_tag],8
+ st1 {vAadHash.s}[2],[auth_tag]
+ pop_stack
+ ret
+1: /* auth_tag_len=8 */
+ str dAadHash,[auth_tag]
+ pop_stack
+ ret
+END_FUNC(enc,KEY_LEN,_)
+END_FUNC(enc,KEY_LEN,_nt_)
+
+
+START_FUNC(dec,KEY_LEN,_)
+START_FUNC(dec,KEY_LEN,_nt_)
+ push_stack
+ /* save in_length and aad_length */
+ stp aad_len,len,[context,AAD_LEN_OFF]
+ load_aes_keys key_data
+ /* Init Consts and IV */
+ mov wtemp1,1
+ eor vOne.16b,vOne.16b,vOne.16b
+ ld1 {vCtr.d}[0],[iv],8
+ eor vZero.16b,vZero.16b,vZero.16b
+ ld1 {vCtr.s}[2],[iv]
+ mov temp0,0x87
+ rev32 vCtr.16b,vCtr.16b /* to cpu order */
+ mov vAadHash.16b,vZero.16b
+ ins vOne.s[3],wtemp1
+ dup vPoly.2d,temp0
+ ins vCtr.s[3],wtemp1 /* Initial Ctr and Orig IV */
+
+ ldp qHashKey0,qHashKey0Ext,[hashkey_base]
+ and left_len,aad_len,0xf
+ cbz aad_len,24f
+ lsr aad_len,aad_len,4
+ /* Read small data */
+ cbz left_len,2f /* aad_len >= 16,skip */
+ add aad_left,aad,aad_len,lsl 4
+ read_small_data_start LeftDat,aad_left,left_len,temp0,Tmp0
+ cbnz left_len,1f /* aad_len & 0xf != 0 */
+2:
+ cbz aad_len,1f /* aad_len <16 skip */
+ /* left_len == 0 && aad_len !=0 */
+ sub aad_len,aad_len,1
+ /* leftDat = aad[-1] */
+ ldr qLeftDat,[aad,aad_len,lsl 4]
+1:
+ cbnz aad_len,1f /* aad_len >16,skip */
+ rbit vAadHash.16b,vLeftDat.16b
+ b 24f /* aad_len <=16, skip aadhash caculate */
+1:
+ /* aad_len > 16 */
+ ldr qAadHash,[aad],16
+ rbit vAadHash.16b,vAadHash.16b
+ sub aad_len,aad_len,1
+
+1:
+ /** loop ghash_block */
+ cmp aad_len,HASHKEY_TOTAL_NUM - 1
+ bls 1f /* break loop */
+ sub aad_len,aad_len,HASHKEY_TOTAL_NUM
+ ghash_block_n HASHKEY_TOTAL_NUM,AadHash,Dat0,aad,hashkey_addr,hashkey_base, \
+ HashKey0,HashKey0Ext,High,Low,Middle0,Zero,Poly , \
+ Tmp0,Tmp1
+ b 1b /* back to loop start */
+1:
+ cbnz aad_len,1f /* left aad_len >32,skip */
+ ldp qHashKey0,qHashKey0Ext,[hashkey_base,(HASHKEY_TOTAL_NUM-1)*32]
+ ghash_block_reg AadHash,LeftDat, \
+ HashKey0,HashKey0Ext,High,Low,Middle0,Zero,Poly , \
+ Tmp0
+ b 24f /* left aad_len <=32,skip below check */
+1:
+ mov temp0,HASHKEY_TOTAL_NUM - 1
+ sub temp0,temp0,aad_len
+ add hashkey_addr,hashkey_base,temp0,lsl 5
+
+ ghash_mult_init_round AadHash,aad,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Middle0,Tmp0,Dat0,2 /* load next hash */
+ sub aad_len,aad_len,1
+
+1:
+ cbz aad_len,1f
+ ghash_mult_round AadHash,aad,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Middle0,Tmp0,Tmp1,Dat0, 2
+
+ sub aad_len,aad_len,1
+ b 1b
+1:
+ ghash_mult_round_noload AadHash,HashKey0,HashKey0Ext,High,Low,Middle0,Tmp0,Tmp1
+ rbit vAadHash.16b, vLeftDat.16b
+ ghash_mult_final_round AadHash,High,Low,Middle0,Tmp0,Zero,Poly
+
+24:
+
+
+ /* Enc/Dec loop */
+ and left_len,len,15
+ cbz len,24f
+ lsr len,len,4
+1:
+ /* loop aes gcm enc/dec loop */
+ cmp len,HASHKEY_TOTAL_NUM - 1
+ bls 1f // break loop
+ sub len,len,HASHKEY_TOTAL_NUM
+ aes_gcm_n_round decrypt,HASHKEY_TOTAL_NUM,AadHash,in,hashkey_addr,hashkey_base, \
+ HashKey0,HashKey0Ext,High,Low,Poly, \
+ Ctr,EncCtr,One,out,Tmp0,Tmp1
+ b 1b /* back to loop start */
+1:
+ cbz len,24f /* left len == 0 */
+ mov temp0,HASHKEY_TOTAL_NUM
+ sub temp0,temp0,len
+ add hashkey_addr,hashkey_base,temp0,lsl 5
+
+ sub len,len,1
+ aes_gcm_init decrypt,AadHash,in,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Ctr,EncCtr,One,out,Tmp0,Tmp1,2 /* load next hash */
+ cbz len,2f
+ sub len,len,1
+1:
+
+ cbz len,1f
+ aes_gcm_middle decrypt,AadHash,in,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Ctr,EncCtr,One,out,Tmp0,Tmp1,2 /* load next hash */
+ sub len,len,1
+ b 1b
+1:
+ aes_gcm_middle decrypt,AadHash,in,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Ctr,EncCtr,One,out,Tmp0,Tmp1,1 /* load next hash */
+2:
+ poly_mult_final_x2 AadHash,High,Low,Tmp0,Tmp1,Poly
+24:
+ /* complete part */
+ cmp left_len,0
+ movi vHigh.16b,0
+ mov temp0,21
+ movi vLow.16b,0
+ cinc hashkey_addr,temp0,eq
+ movi vMiddle0.16b,0
+ add hashkey_addr,hashkey_base,hashkey_addr,lsl 5
+ ldp qHashKey0,qHashKey0Ext,[hashkey_addr],32
+ beq 2f
+ read_small_data_start LeftDat,in,left_len,temp0,Tmp0
+ add vCtr.4s,vCtr.4s,vOne.4s
+ rev32 vEncCtr.16b,vCtr.16b
+ aes_encrypt_round EncCtr,Key0
+ pmull2 vHigh.1q,vAadHash.2d,vHashKey0.2d
+ aes_encrypt_round EncCtr,Key1
+ pmull vLow.1q ,vAadHash.1d,vHashKey0.1d
+ aes_encrypt_round EncCtr,Key2
+ ldr qHashKey0,[hashkey_addr],16
+ aes_encrypt_round EncCtr,Key3
+ pmull vMiddle0.1q,vAadHash.1d,vHashKey0Ext.1d
+ aes_encrypt_round EncCtr,Key4
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0Ext.2d
+ aes_encrypt_round EncCtr,Key5
+ ldr qHashKey0Ext,[hashkey_addr],16
+ aes_encrypt_round EncCtr,Key6
+ eor vMiddle0.16b,vMiddle0.16b,vTmp0.16b
+ aes_encrypt_round EncCtr,Key7
+ aes_encrypt_round EncCtr,Key8
+#if KEY_LEN==256
+ aes_encrypt_round EncCtr,Key9
+ aes_encrypt_round EncCtr,Key10
+ aes_encrypt_round EncCtr,Key11
+ aes_encrypt_round EncCtr,Key12
+ aese vEncCtr.16b,vKey13.16b
+ eor vEncCtr.16b,vEncCtr.16b,vKey14.16b
+ eor vEncCtr.16b,vEncCtr.16b,vLeftDat.16b
+#endif
+#if KEY_LEN==128
+ aese vEncCtr.16b,vKey9.16b
+ eor vEncCtr.16b,vEncCtr.16b,vKey10.16b
+ eor vEncCtr.16b,vEncCtr.16b,vLeftDat.16b
+#endif
+ write_small_data_start EncCtr,out,left_len,temp0,Tmp0
+ rbit vAadHash.16b,vLeftDat.16b
+
+2:
+
+ ldr qLen,[context,AAD_LEN_OFF] /* Len */
+ mov wtemp0,1 /* Ek */
+ pmull2 vTmp0.1q ,vAadHash.2d,vHashKey0.2d /* auth_dat * HashKey[Total-2] */
+ shl vLen.2d,vLen.2d,3 /* Len */
+ pmull vTmp1.1q ,vAadHash.1d,vHashKey0.1d /* auth_dat * HashKey[Total-2] */
+ rev64 vLen.16b,vLen.16b /* Len */
+ ins vCtr.4s[3],wtemp0 /* Ek */
+ ldr qHashKey0,[hashkey_addr],16 /* auth_dat * HashKey[Total-2] */
+ pmull vTmp2.1q,vAadHash.1d,vHashKey0Ext.1d /* auth_dat * HashKey[Total-2] */
+ rev32 vEncCtr.16b,vCtr.16b /* Ek */
+ eor vHigh.16b,vHigh.16b,vTmp0.16b /* auth_dat * HashKey[Total-2] */
+ pmull2 vTmp3.1q ,vAadHash.2d,vHashKey0Ext.2d /* auth_dat * HashKey[Total-2] */
+ rbit vAadHash.16b,vLen.16b /* Len */
+
+ aes_encrypt_round EncCtr,Key0 /* Ek */
+ eor vLow.16b,vLow.16b,vTmp1.16b /* auth_dat * HashKey[Total-2] */
+ aes_encrypt_round EncCtr,Key1 /* Ek */
+ ldr qHashKey0Ext,[hashkey_addr],16 /* auth_dat * HashKey[Total-2] */
+ aes_encrypt_round EncCtr,Key2 /* Ek */
+ eor vMiddle0.16b,vMiddle0.16b,vTmp2.16b /* auth_dat * HashKey[Total-2] */
+ aes_encrypt_round EncCtr,Key3 /* Ek */
+ eor vMiddle0.16b,vMiddle0.16b,vTmp3.16b /* auth_dat * HashKey[Total-2] */
+ aes_encrypt_round EncCtr,Key4 /* Ek */
+
+ pmull2 vTmp0.1q,vAadHash.2d,vHashKey0.2d /* Len * HashKey[Total-1] */
+ pmull vTmp1.1q ,vAadHash.1d,vHashKey0.1d /* Len * HashKey[Total-1] */
+ aes_encrypt_round EncCtr,Key5 /* Ek */
+ aes_encrypt_round EncCtr,Key6 /* Ek */
+ pmull vTmp2.1q,vAadHash.1d,vHashKey0Ext.1d /* Len * HashKey[Total-1] */
+ aes_encrypt_round EncCtr,Key7 /* Ek */
+ eor vHigh.16b,vHigh.16b,vTmp0.16b /* Len * HashKey[Total-1] */
+ pmull2 vTmp3.1q ,vAadHash.2d,vHashKey0Ext.2d /* Len * HashKey[Total-1] */
+ aes_encrypt_round EncCtr,Key8 /* Ek */
+ eor vLow.16b,vLow.16b,vTmp1.16b /* Len * HashKey[Total-1] */
+#if KEY_LEN==256
+ aes_encrypt_round EncCtr,Key9 /* Ek */
+ aes_encrypt_round EncCtr,Key10 /* Ek */
+ aes_encrypt_round EncCtr,Key11 /* Ek */
+ aes_encrypt_round EncCtr,Key12 /* Ek */
+ aese vEncCtr.16b,vKey13.16b /* Ek */
+ eor vEncCtr.16b,vEncCtr.16b,vKey14.16b /* Ek */
+#else
+ aese vEncCtr.16b,vKey9.16b /* Ek */
+ eor vEncCtr.16b,vEncCtr.16b,vKey10.16b /* Ek */
+#endif
+ eor vMiddle0.16b,vMiddle0.16b,vTmp2.16b /* Len * HashKey[Total-1] */
+ eor vMiddle0.16b,vMiddle0.16b,vTmp3.16b /* Len * HashKey[Total-1] */
+ rbit vAadHash.16b,vEncCtr.16b /* Aad */
+
+ ghash_mult_final_round AadHash,High,Low,Middle0,Tmp0,Zero,Poly
+
+ ldp auth_tag,auth_tag_len,[sp,stack_size] /* Adjust here : TODO TBD */
+ rbit vAadHash.16b,vAadHash.16b /* Aad */
+
+
+ /* output auth_tag */
+ cmp auth_tag_len,16
+ bne 1f
+ /* most likely auth_tag_len=16 */
+ str qAadHash,[auth_tag]
+ pop_stack
+ ret
+1: /* auth_tag_len=12 */
+ cmp auth_tag_len,12
+ bne 1f
+ str dAadHash,[auth_tag],8
+ st1 {vAadHash.s}[2],[auth_tag]
+ pop_stack
+ ret
+1: /* auth_tag_len=8 */
+ str dAadHash,[auth_tag]
+ pop_stack
+ ret
+END_FUNC(dec,KEY_LEN,_)
+END_FUNC(dec,KEY_LEN,_nt_)
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_multibinary_aarch64.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_multibinary_aarch64.S
new file mode 100644
index 000000000..b5433a1df
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_multibinary_aarch64.S
@@ -0,0 +1,58 @@
+/**********************************************************************
+ Copyright(c) 2020 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+
+#include "aarch64_multibinary.h"
+
+mbin_interface aes_gcm_enc_128
+mbin_interface aes_gcm_dec_128
+mbin_interface aes_gcm_precomp_128
+mbin_interface aes_gcm_enc_256
+mbin_interface aes_gcm_dec_256
+mbin_interface aes_gcm_precomp_256
+
+
+mbin_interface aes_gcm_enc_128_update
+mbin_interface aes_gcm_enc_128_finalize
+mbin_interface aes_gcm_dec_128_update
+mbin_interface aes_gcm_dec_128_finalize
+mbin_interface aes_gcm_enc_256_update
+mbin_interface aes_gcm_enc_256_finalize
+mbin_interface aes_gcm_dec_256_update
+mbin_interface aes_gcm_dec_256_finalize
+
+mbin_interface aes_gcm_init_256
+mbin_interface aes_gcm_init_128
+mbin_interface aes_gcm_enc_128_nt
+mbin_interface aes_gcm_enc_128_update_nt
+mbin_interface aes_gcm_dec_128_nt
+mbin_interface aes_gcm_dec_128_update_nt
+mbin_interface aes_gcm_enc_256_nt
+mbin_interface aes_gcm_enc_256_update_nt
+mbin_interface aes_gcm_dec_256_nt
+mbin_interface aes_gcm_dec_256_update_nt
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_precomp.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_precomp.S
new file mode 100644
index 000000000..e555c9798
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_precomp.S
@@ -0,0 +1,83 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+/*
+void aes_gcm_precomp(struct gcm_key_data *key_data);
+*/
+ declare_var_generic_reg key_data ,0
+ declare_var_generic_reg temp0 ,1
+ declare_var_generic_reg hashkey_base,0
+ declare_var_generic_reg hashkey_addr,1
+
+ declare_var_vector_reg Low ,0
+ declare_var_vector_reg Middle0 ,1
+ declare_var_vector_reg Middle1 ,2
+ declare_var_vector_reg High ,3
+ declare_var_vector_reg HashKeyIter ,4
+ declare_var_vector_reg HashKey ,5
+ declare_var_vector_reg HashKeyExt ,6
+ declare_var_vector_reg Poly ,7
+ declare_var_vector_reg Zero ,31
+
+START_FUNC(precomp,KEY_LEN,_)
+ load_aes_keys key_data
+ mov temp0,0x87
+ eor vZero.16b,vZero.16b,vZero.16b
+ eor vHashKey.16b,vHashKey.16b,vHashKey.16b
+ dup vPoly.2d,temp0
+ aes_encrypt_block HashKey
+ add hashkey_addr,hashkey_base,(HASHKEY_TOTAL_NUM-1)*32
+ rbit vHashKey.16b,vHashKey.16b
+ ext vHashKeyExt.16b,vHashKey.16b,vHashKey.16b,8
+ mov vHashKeyIter.16b,vHashKey.16b
+ stp qHashKey,qHashKeyExt,[hashkey_addr],-32
+
+1:
+ pmull vMiddle0.1q,vHashKeyIter.1d,vHashKeyExt.1d
+ pmull2 vMiddle1.1q,vHashKeyIter.2d,vHashKeyExt.2d
+ pmull vLow.1q ,vHashKeyIter.1d,vHashKey.1d
+ eor vMiddle0.16b,vMiddle0.16b,vMiddle1.16b
+ pmull2 vHigh.1q ,vHashKeyIter.2d,vHashKey.2d
+ ext vMiddle1.16b,vMiddle0.16b,vZero.16b,8 //high
+ ext vMiddle0.16b,vZero.16b,vMiddle0.16b,8 //low
+ eor vHigh.16b ,vHigh.16b,vMiddle1.16b
+ eor vLow.16b ,vLow.16b ,vMiddle0.16b
+ pmull2 vMiddle0.1q ,vHigh.2d ,vPoly.2d
+ ext vMiddle1.16b,vMiddle0.16b,vZero.16b,8 //high
+ ext vMiddle0.16b,vZero.16b,vMiddle0.16b,8 //low
+ eor vHigh.16b ,vHigh.16b,vMiddle1.16b
+ eor vLow.16b ,vLow.16b ,vMiddle0.16b
+ pmull vMiddle0.1q ,vHigh.1d ,vPoly.1d
+ eor vHashKeyIter.16b,vLow.16b,vMiddle0.16b
+ ext vLow.16b,vHashKeyIter.16b,vHashKeyIter.16b,8
+ stp qHashKeyIter,qLow,[hashkey_addr],-32
+ cmp hashkey_addr,hashkey_base
+ bcs 1b
+
+ ret
+END_FUNC(precomp,KEY_LEN,_)
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_update.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_update.S
new file mode 100644
index 000000000..d47c52212
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/gcm_update.S
@@ -0,0 +1,277 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+/*
+void gist_aes_gcm_dec_update_##mode( \
+ const struct gcm_key_data *key_data, \
+ struct gcm_context_data *context, \
+ uint8_t *out, \
+ const uint8_t *in, \
+ uint64_t len \
+ )
+ */
+
+ declare_var_generic_reg key_data ,0
+ declare_var_generic_reg context ,1
+ declare_var_generic_reg out ,2
+ declare_var_generic_reg in ,3
+ declare_var_generic_reg len ,4
+ declare_var_generic_reg partial_block_length,5
+ declare_var_generic_reg blocks ,5
+ declare_var_generic_reg hashkey_base,0
+ declare_var_generic_reg hashkey_addr,6
+ declare_var_generic_reg temp0 ,14
+ declare_var_generic_reg temp1 ,15
+ declare_var_generic_reg temp2 ,13
+
+
+
+ declare_var_vector_reg Ctr,0
+ declare_var_vector_reg AadHash,1
+ declare_var_vector_reg HashKey0,2
+ declare_var_vector_reg HashKey0Ext,3
+ declare_var_vector_reg High,4
+ declare_var_vector_reg Low,5
+ declare_var_vector_reg EncCtr,6
+ declare_var_vector_reg Middle,7
+
+ declare_var_vector_reg Tmp0,8
+ declare_var_vector_reg Tmp1,9
+ declare_var_vector_reg Zero,10
+ declare_var_vector_reg Poly,11
+ declare_var_vector_reg PartialBlock ,12
+ declare_var_vector_reg One,31
+ .set stack_size,48
+ .macro push_stack
+ stp d8, d9, [sp,-stack_size]!
+ stp d10,d11,[sp,16]
+ stp d12,d13,[sp,32]
+
+ .endm
+
+ .macro pop_stack
+ ldp d10,d11,[sp,16]
+ ldp d12,d13,[sp,32]
+ ldp d8, d9, [sp], stack_size
+ .endm
+/*
+ 20:exit_without_popstack
+ 21:start_of_mainloop
+ 22:exit_with_popstack
+ 23:partial_block_start
+ */
+START_FUNC(enc,KEY_LEN,_update_)
+START_FUNC(enc,KEY_LEN,_update_nt_)
+ ldr temp0,[context,IN_LENGTH_OFF] /*load in_length */
+ ldr partial_block_length,[context,PARTIAL_BLOCK_LENGTH_OFF]
+ ldr qAadHash,[context]
+ cbz len,20f /** if(len==0)return; exit_without_popstack*/
+ push_stack
+ add temp0,temp0,len /* temp0=temp0+len */
+ load_aes_keys key_data
+ str temp0,[context,IN_LENGTH_OFF] /* save in_length */
+ /* Init Consts and IV */
+ ldr qCtr,[context,CTR_OFF]
+ mov wtemp1,1
+ eor vOne.16b,vOne.16b,vOne.16b
+ mov temp0,0x87
+ eor vZero.16b,vZero.16b,vZero.16b
+ ins vOne.s[3],wtemp1
+ dup vPoly.2d,temp0
+ cbnz partial_block_length,23f /* if(partial_block_length!=0) not normal case*/
+21: /* start_of_mainloop */
+ cbz len,24f
+ lsr blocks,len,4
+ cmp blocks,HASHKEY_TOTAL_NUM - 1
+ and len,len,0xf
+ /* loop aes gcm enc/dec loop */
+ bls 2f /* skip loop */
+1:
+ sub blocks,blocks,HASHKEY_TOTAL_NUM
+ cmp blocks,HASHKEY_TOTAL_NUM - 1
+ aes_gcm_n_round encrypt,HASHKEY_TOTAL_NUM,AadHash,in,hashkey_addr,hashkey_base, \
+ HashKey0,HashKey0Ext,High,Low,Poly, \
+ Ctr,EncCtr,One,out,Tmp0,Tmp1
+ bhi 1b /* back to loop start */
+2:
+ cbz blocks,4f // left blocks == 0
+ /* -(blocks - HASHKEY_TOTAL_NUM) */
+ sub temp0,blocks,HASHKEY_TOTAL_NUM
+ neg temp0,temp0
+ sub blocks,blocks,1
+ add hashkey_addr,hashkey_base,temp0,lsl 5
+
+ aes_gcm_init encrypt,AadHash,in,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Ctr,EncCtr,One,out,Tmp0,Tmp1,2 /* load next hash */
+ cbz blocks,3f /* origin_blocks == 1 */
+ sub blocks,blocks,1
+
+ cbz blocks,2f /* origin_blocks == 2 */
+1:
+ sub blocks,blocks,1
+ aes_gcm_middle encrypt,AadHash,in,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Ctr,EncCtr,One,out,Tmp0,Tmp1,2 /* load next hash */
+ cbnz blocks,1b
+2:
+ aes_gcm_middle encrypt,AadHash,in,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Ctr,EncCtr,One,out,Tmp0,Tmp1,1 /* not load next hash */
+3:
+ poly_mult_final_x2 AadHash,High,Low,Tmp0,Tmp1,Poly
+4:
+ str qAadHash,[context]
+ str qCtr,[context,CTR_OFF]
+ cbnz len,24f
+22: /* exit_with_popstack */
+ pop_stack
+20: /* exit_without_popstack */
+ ret
+23: /* partial_block_start */
+
+ generic_partial_block_start encrypt,len,in,out,context, \
+ temp2,partial_block_length,temp0,temp1,hashkey_addr
+ cbnz partial_block_length,22b
+ ldr qHashKey0Ext,[hashkey_base,(HASHKEY_TOTAL_NUM-1)*32+16]
+ ldr qHashKey0 ,[hashkey_base,(HASHKEY_TOTAL_NUM-1)*32]
+ ldr qPartialBlock,[context,PARTIAL_BLOCK_ENC_KEY_OFF]
+ ghash_block_reg AadHash,PartialBlock,HashKey0,HashKey0Ext, \
+ High,Low,Middle,Zero,Poly,Tmp0
+ str qAadHash,[context]
+ cbz len,4b
+ cmp len,15
+ bhi 21b
+24: /*partial_block_end */
+ add vCtr.4s,vCtr.4s,vOne.4s
+ read_small_data_start PartialBlock,in,len,temp0,Tmp0
+ rev32 vEncCtr.16b,vCtr.16b
+ str qCtr,[context,CTR_OFF]
+ aes_encrypt_block EncCtr
+ eor vPartialBlock.16b,vPartialBlock.16b,vEncCtr.16b
+ str qPartialBlock,[context,PARTIAL_BLOCK_ENC_KEY_OFF]
+ write_small_data_start PartialBlock,out,len,temp0,Tmp0
+ str len,[context,PARTIAL_BLOCK_LENGTH_OFF]
+ pop_stack
+ ret
+
+END_FUNC(enc,KEY_LEN,_update_)
+END_FUNC(enc,KEY_LEN,_update_nt_)
+
+
+START_FUNC(dec,KEY_LEN,_update_)
+START_FUNC(dec,KEY_LEN,_update_nt_)
+ ldr temp0,[context,IN_LENGTH_OFF] /*load in_length */
+ ldr partial_block_length,[context,PARTIAL_BLOCK_LENGTH_OFF]
+ ldr qAadHash,[context]
+ cbz len,20f /** if(len==0)return; exit_without_popstack*/
+ push_stack
+ add temp0,temp0,len /* temp0=temp0+len */
+ load_aes_keys key_data
+ str temp0,[context,IN_LENGTH_OFF] /* save in_length */
+ /* Init Consts and IV */
+ ldr qCtr,[context,CTR_OFF]
+ mov wtemp1,1
+ eor vOne.16b,vOne.16b,vOne.16b
+ mov temp0,0x87
+ eor vZero.16b,vZero.16b,vZero.16b
+ ins vOne.s[3],wtemp1
+ dup vPoly.2d,temp0
+ cbnz partial_block_length,23f /* if(partial_block_length!=0) not normal case*/
+21: /* start_of_mainloop */
+ cbz len,24f
+ lsr blocks,len,4
+ cmp blocks,HASHKEY_TOTAL_NUM - 1
+ and len,len,0xf
+ /** loop aes gcm enc/dec loop */
+ bls 2f /* skip loop */
+1:
+ sub blocks,blocks,HASHKEY_TOTAL_NUM
+ cmp blocks,HASHKEY_TOTAL_NUM - 1
+ aes_gcm_n_round decrypt,HASHKEY_TOTAL_NUM,AadHash,in,hashkey_addr,hashkey_base, \
+ HashKey0,HashKey0Ext,High,Low,Poly, \
+ Ctr,EncCtr,One,out,Tmp0,Tmp1
+ bhi 1b /* back to loop start */
+2:
+ cbz blocks,4f /* left blocks == 0 */
+ /* -(blocks - HASHKEY_TOTAL_NUM) */
+ sub temp0,blocks,HASHKEY_TOTAL_NUM
+ neg temp0,temp0
+ sub blocks,blocks,1
+ add hashkey_addr,hashkey_base,temp0,lsl 5
+
+ aes_gcm_init decrypt,AadHash,in,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Ctr,EncCtr,One,out,Tmp0,Tmp1,2 // load next hash
+ cbz blocks,3f /* origin_blocks == 1 */
+ sub blocks,blocks,1
+
+ cbz blocks,2f /* origin_blocks == 2 */
+1:
+ sub blocks,blocks,1
+ aes_gcm_middle decrypt,AadHash,in,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Ctr,EncCtr,One,out,Tmp0,Tmp1,2 /* load next hash */
+ cbnz blocks,1b
+2:
+ aes_gcm_middle decrypt,AadHash,in,hashkey_addr,HashKey0,HashKey0Ext, \
+ High,Low,Ctr,EncCtr,One,out,Tmp0,Tmp1,1 /* not load next hash */
+3:
+ poly_mult_final_x2 AadHash,High,Low,Tmp0,Tmp1,Poly
+4:
+ str qAadHash,[context]
+ str qCtr,[context,CTR_OFF]
+ cbnz len,24f
+22: /* exit_with_popstack */
+ pop_stack
+20: /* exit_without_popstack */
+ ret
+23: /* partial_block_start */
+
+ generic_partial_block_start decrypt,len,in,out,context, \
+ temp2,partial_block_length,temp0,temp1,hashkey_addr
+ cbnz partial_block_length,22b
+ ldr qHashKey0Ext,[hashkey_base,(HASHKEY_TOTAL_NUM-1)*32+16]
+ ldr qHashKey0 ,[hashkey_base,(HASHKEY_TOTAL_NUM-1)*32]
+ ldr qPartialBlock,[context,PARTIAL_BLOCK_ENC_KEY_OFF]
+ ghash_block_reg AadHash,PartialBlock,HashKey0,HashKey0Ext, \
+ High,Low,Middle,Zero,Poly,Tmp0
+ str qAadHash,[context]
+ cbz len,4b
+ cmp len,15
+ bhi 21b
+24: /* partial_block_end */
+ add vCtr.4s,vCtr.4s,vOne.4s
+ read_small_data_start PartialBlock,in,len,temp0,Tmp0
+ rev32 vEncCtr.16b,vCtr.16b
+ str qCtr,[context,CTR_OFF]
+ aes_encrypt_block EncCtr
+ eor vEncCtr.16b,vPartialBlock.16b,vEncCtr.16b
+ tbx_small_data_start EncCtr,PartialBlock,len,temp0,Tmp0
+ write_small_data_start EncCtr,out,len,temp0,Tmp0
+ str qPartialBlock,[context,PARTIAL_BLOCK_ENC_KEY_OFF]
+ str len,[context,PARTIAL_BLOCK_LENGTH_OFF]
+ pop_stack
+ ret
+END_FUNC(dec,KEY_LEN,_update_)
+END_FUNC(dec,KEY_LEN,_update_nt_)
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_128_aarch64_aes.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_128_aarch64_aes.S
new file mode 100644
index 000000000..4a3e990c3
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_128_aarch64_aes.S
@@ -0,0 +1,134 @@
+/**********************************************************************
+ Copyright(c) 2020 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+.arch armv8-a+crypto
+
+ .text
+/*
+Macros
+*/
+#define NUM_ROUNDS(a) (7+(a)/32)
+.macro declare_var_vector_reg name:req,reg:req
+ q\name .req q\reg
+ v\name .req v\reg
+ s\name .req s\reg
+.endm
+.macro round_128 off:req,rcon:req
+ .if \off == 0
+ ldp w_tmp2,w_tmp3,[key,8]
+ ldp w_tmp0,w_tmp1,[key]
+ movi vzero.4s,0
+ dup vsrc.4s,w_tmp3
+ stp w_tmp2,w_tmp3,[exp_key_enc,8]
+ stp w_tmp0,w_tmp1,[exp_key_enc]
+ .endif
+ mov w0,\rcon
+ mov vdest.16b,vzero.16b
+ aese vdest.16b,vsrc.16b
+ mov w_tmp4,vdest.s[0]
+ eor w_tmp0,w_tmp0,w0
+ eor w_tmp0,w_tmp0,w_tmp4,ror 8
+ eor w_tmp1,w_tmp0,w_tmp1
+ eor w_tmp2,w_tmp1,w_tmp2
+ eor w_tmp3,w_tmp2,w_tmp3
+ stp w_tmp0,w_tmp1,[exp_key_enc,KEY_LEN*\off+KEY_LEN]
+ stp w_tmp2,w_tmp3,[exp_key_enc,KEY_LEN*\off+8+KEY_LEN]
+ .if \off != 10
+ dup vsrc.4s,w_tmp3
+ .endif
+.endm
+.macro export_dec_key rounds:req,enc_key:req,dec_key:req
+ ldr q0,[\enc_key]
+ ldr q1,[\enc_key,(\rounds-1)*16]
+ str q0,[\dec_key,(\rounds-1)*16]
+ str q1,[\dec_key]
+ ldp q0,q1,[\enc_key,1*16]
+ ldp q2,q3,[\enc_key,(1+2)*16]
+ ldp q4,q5,[\enc_key,(1+4)*16]
+ aesimc v0.16b,v0.16b
+ aesimc v1.16b,v1.16b
+ ldp q6,q7,[\enc_key,(1+6)*16]
+ aesimc v2.16b,v2.16b
+ aesimc v3.16b,v3.16b
+ stp q1,q0,[\dec_key,(\rounds-1-2)*16]
+ aesimc v4.16b,v4.16b
+ aesimc v5.16b,v5.16b
+ stp q3,q2,[\dec_key,(\rounds-1-4)*16]
+ ldr q0,[\enc_key,(1+8)*16]
+ aesimc v6.16b,v6.16b
+ aesimc v7.16b,v7.16b
+ stp q5,q4,[\dec_key,(\rounds-1-6)*16]
+ aesimc v0.16b,v0.16b
+ stp q7,q6,[\dec_key,(\rounds-1-8)*16]
+ str q0,[\dec_key,(\rounds-1-9)*16]
+.endm
+/**
+ void aes_keyexp_128_aes(const uint8_t * key,
+ uint8_t * exp_key_enc, uint8_t * exp_key_dec)
+*/
+ key .req x0
+ exp_key_enc .req x1
+ exp_key_dec .req x2
+ .equ KEY_LEN, (128/8)
+ w_tmp0 .req w3
+ w_tmp1 .req w4
+ w_tmp2 .req w5
+ w_tmp3 .req w6
+ w_tmp4 .req w7
+ declare_var_vector_reg dest,0
+ declare_var_vector_reg zero,1
+ declare_var_vector_reg src, 2
+
+
+ .global aes_keyexp_128_aes
+ .type aes_keyexp_128_aes, %function
+
+aes_keyexp_128_aes:
+ .set rcon,1
+ .set off,0
+ .rept 10
+ round_128 off,rcon
+ .set off,off+1
+ .set rcon,(rcon << 1) ^ ((rcon >> 7) * 0x11b)
+ .endr
+
+ export_dec_key NUM_ROUNDS(128),exp_key_enc,exp_key_dec
+ ret
+ .size aes_keyexp_128_aes, .-aes_keyexp_128_aes
+ .global aes_keyexp_128_enc_aes
+ .type aes_keyexp_128_enc_aes, %function
+aes_keyexp_128_enc_aes:
+ .set rcon,1
+ .set off,0
+ .rept 10
+ round_128 off,rcon
+ .set off,off+1
+ .set rcon,(rcon << 1) ^ ((rcon >> 7) * 0x11b)
+ .endr
+ ret
+ .size aes_keyexp_128_enc_aes, .-aes_keyexp_128_enc_aes \ No newline at end of file
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_192_aarch64_aes.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_192_aarch64_aes.S
new file mode 100644
index 000000000..2ba46060c
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_192_aarch64_aes.S
@@ -0,0 +1,136 @@
+/**********************************************************************
+ Copyright(c) 2020 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+ .text
+/*
+Macros
+*/
+#define NUM_ROUNDS(a) (7+(a)/32)
+.macro declare_var_vector_reg name:req,reg:req
+ q\name .req q\reg
+ v\name .req v\reg
+ s\name .req s\reg
+.endm
+.macro round_192 off:req,rcon:req
+ .if \off == 0
+ ldp w_tmp0,w_tmp1,[key]
+ ldp w_tmp2,w_tmp3,[key,8]
+ ldp w_tmp4,w_tmp5,[key,16]
+ movi vzero.4s,0
+ dup vsrc.4s,w_tmp5
+ stp w_tmp0,w_tmp1,[exp_key_enc]
+ stp w_tmp4,w_tmp5,[exp_key_enc,16]
+ stp w_tmp2,w_tmp3,[exp_key_enc,8]
+ .endif
+ mov w0,\rcon
+ mov vdest.16b,vzero.16b
+ aese vdest.16b,vsrc.16b
+ mov w_tmp,vdest.s[0]
+ eor w_tmp0,w_tmp0,w0
+ eor w_tmp0,w_tmp0,w_tmp,ror 8
+ eor w_tmp1,w_tmp0,w_tmp1
+ eor w_tmp2,w_tmp1,w_tmp2
+ eor w_tmp3,w_tmp2,w_tmp3
+ .if \off < 7
+ eor w_tmp4,w_tmp4,w_tmp3
+ eor w_tmp5,w_tmp5,w_tmp4
+ dup vsrc.4s,w_tmp5
+ stp w_tmp0,w_tmp1,[exp_key_enc,KEY_LEN*(\off+1)]
+ stp w_tmp2,w_tmp3,[exp_key_enc,KEY_LEN*(\off+1)+8]
+ stp w_tmp4,w_tmp5,[exp_key_enc,KEY_LEN*(\off+1)+16]
+ .else
+ stp w_tmp0,w_tmp1,[exp_key_enc,KEY_LEN*(\off+1)]
+ stp w_tmp2,w_tmp3,[exp_key_enc,KEY_LEN*(\off+1)+8]
+ .endif
+.endm
+
+.macro export_dec_key rounds:req,enc_key:req,dec_key:req
+ ldr q0,[\enc_key]
+ ldr q1,[\enc_key,(\rounds-1)*16]
+ str q0,[\dec_key,(\rounds-1)*16]
+ str q1,[\dec_key]
+ ldp q0,q1,[\enc_key,1*16]
+ ldp q2,q3,[\enc_key,(1+2)*16]
+ ldp q4,q5,[\enc_key,(1+4)*16]
+ aesimc v0.16b,v0.16b
+ aesimc v1.16b,v1.16b
+ ldp q6,q7,[\enc_key,(1+6)*16]
+ aesimc v2.16b,v2.16b
+ aesimc v3.16b,v3.16b
+ stp q1,q0,[\dec_key,(\rounds-1-2)*16]
+ ldp q0,q1,[\enc_key,(1+8)*16]
+ aesimc v4.16b,v4.16b
+ aesimc v5.16b,v5.16b
+ stp q3,q2,[\dec_key,(\rounds-1-4)*16]
+ aesimc v6.16b,v6.16b
+ aesimc v7.16b,v7.16b
+ stp q5,q4,[\dec_key,(\rounds-1-6)*16]
+ ldr q2,[\enc_key,(1+10)*16]
+ aesimc v0.16b,v0.16b
+ aesimc v1.16b,v1.16b
+ stp q7,q6,[\dec_key,(\rounds-1-8)*16]
+ aesimc v2.16b,v2.16b
+ stp q1,q0,[\dec_key,(\rounds-1-10)*16]
+ str q2,[\dec_key,(\rounds-1-11)*16]
+.endm
+/**
+ void aes_keyexp_192_aes(const uint8_t * key,
+ uint8_t * exp_key_enc, uint8_t * exp_key_dec)
+*/
+ key .req x0
+ exp_key_enc .req x1
+ exp_key_dec .req x2
+ .equ KEY_LEN, (192/8)
+ w_tmp0 .req w3
+ w_tmp1 .req w4
+ w_tmp2 .req w5
+ w_tmp3 .req w6
+ w_tmp .req w7
+ w_tmp4 .req w9
+ w_tmp5 .req w10
+ declare_var_vector_reg dest,0
+ declare_var_vector_reg zero,1
+ declare_var_vector_reg src, 2
+
+
+ .global aes_keyexp_192_aes
+ .type aes_keyexp_192_aes, %function
+
+aes_keyexp_192_aes:
+ .set rcon,1
+ .set off,0
+ .rept 8
+ round_192 off,rcon
+ .set off,off+1
+ .set rcon,(rcon << 1) ^ ((rcon >> 7) * 0x11b)
+ .endr
+ export_dec_key NUM_ROUNDS(192),exp_key_enc,exp_key_dec
+ ret
+ .size aes_keyexp_192_aes, .-aes_keyexp_192_aes
+
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_256_aarch64_aes.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_256_aarch64_aes.S
new file mode 100644
index 000000000..5433b2ff6
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_256_aarch64_aes.S
@@ -0,0 +1,153 @@
+/**********************************************************************
+ Copyright(c) 2020 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+
+ .text
+/*
+Macros
+*/
+#define NUM_ROUNDS(a) (7+(a)/32)
+.macro declare_var_vector_reg name:req,reg:req
+ q\name .req q\reg
+ v\name .req v\reg
+ s\name .req s\reg
+.endm
+.macro round_256 off:req,rcon:req,export_dec_key
+ .if \off == 0
+ ldp w_tmp6,w_tmp7,[key,24]
+ ldp w_tmp0,w_tmp1,[key]
+ ldp w_tmp2,w_tmp3,[key,8]
+ ldp w_tmp4,w_tmp5,[key,16]
+ movi vzero.4s,0
+ dup vsrc.4s,w_tmp7
+ stp w_tmp6,w_tmp7,[exp_key_enc,24]
+ stp w_tmp0,w_tmp1,[exp_key_enc]
+ stp w_tmp4,w_tmp5,[exp_key_enc,16]
+ stp w_tmp2,w_tmp3,[exp_key_enc,8]
+ .endif
+ mov w0,\rcon
+ mov vdest.16b,vzero.16b
+ aese vdest.16b,vsrc.16b
+ mov w_tmp,vdest.s[0]
+ eor w_tmp0,w_tmp0,w0
+ eor w_tmp0,w_tmp0,w_tmp,ror 8
+ eor w_tmp1,w_tmp0,w_tmp1
+ eor w_tmp2,w_tmp1,w_tmp2
+ eor w_tmp3,w_tmp2,w_tmp3
+ .if \off < 6
+ dup vsrc.4s,w_tmp3
+ mov vdest.16b,vzero.16b
+ aese vdest.16b,vsrc.16b
+ mov w_tmp,vdest.s[0]
+ eor w_tmp4,w_tmp4,w_tmp
+ eor w_tmp5,w_tmp5,w_tmp4
+ eor w_tmp6,w_tmp6,w_tmp5
+ eor w_tmp7,w_tmp7,w_tmp6
+ dup vsrc.4s,w_tmp7
+ stp w_tmp0,w_tmp1,[exp_key_enc,KEY_LEN*(\off+1)]
+ stp w_tmp2,w_tmp3,[exp_key_enc,KEY_LEN*(\off+1)+8]
+ stp w_tmp4,w_tmp5,[exp_key_enc,KEY_LEN*(\off+1)+16]
+ stp w_tmp6,w_tmp7,[exp_key_enc,KEY_LEN*(\off+1)+24]
+ .else
+ stp w_tmp0,w_tmp1,[exp_key_enc,KEY_LEN*(\off+1)]
+ stp w_tmp2,w_tmp3,[exp_key_enc,KEY_LEN*(\off+1)+8]
+ .endif
+.endm
+
+.macro export_dec_key rounds:req,enc_key:req,dec_key:req
+ ldr q0,[\enc_key]
+ ldr q1,[\enc_key,(\rounds-1)*16]
+ str q0,[\dec_key,(\rounds-1)*16]
+ str q1,[\dec_key]
+ ldp q0,q1,[\enc_key,1*16]
+ ldp q2,q3,[\enc_key,(1+2)*16]
+ ldp q4,q5,[\enc_key,(1+4)*16]
+ aesimc v0.16b,v0.16b
+ aesimc v1.16b,v1.16b
+ ldp q6,q7,[\enc_key,(1+6)*16]
+ aesimc v2.16b,v2.16b
+ aesimc v3.16b,v3.16b
+ stp q1,q0,[\dec_key,(\rounds-1-2)*16]
+ ldp q0,q1,[\enc_key,(1+8)*16]
+ aesimc v4.16b,v4.16b
+ aesimc v5.16b,v5.16b
+ stp q3,q2,[\dec_key,(\rounds-1-4)*16]
+ ldp q2,q3,[\enc_key,(1+10)*16]
+
+ aesimc v6.16b,v6.16b
+ aesimc v7.16b,v7.16b
+ stp q5,q4,[\dec_key,(\rounds-1-6)*16]
+ ldr q4,[\enc_key,(1+12)*16]
+ aesimc v0.16b,v0.16b
+ aesimc v1.16b,v1.16b
+ stp q7,q6,[\dec_key,(\rounds-1-8)*16]
+ aesimc v2.16b,v2.16b
+ aesimc v3.16b,v3.16b
+ stp q1,q0,[\dec_key,(\rounds-1-10)*16]
+ aesimc v4.16b,v4.16b
+ stp q3,q2,[\dec_key,(\rounds-1-12)*16]
+ str q4,[\dec_key,(\rounds-1-13)*16]
+.endm
+/**
+ void aes_keyexp_256_aes(const uint8_t * key,
+ uint8_t * exp_key_enc, uint8_t * exp_key_dec)
+*/
+ key .req x0
+ exp_key_enc .req x1
+ exp_key_dec .req x2
+ .equ KEY_LEN, (256/8)
+ w_tmp0 .req w3
+ w_tmp1 .req w4
+ w_tmp2 .req w5
+ w_tmp3 .req w6
+ w_tmp .req w7
+ w_tmp4 .req w9
+ w_tmp5 .req w10
+ w_tmp6 .req w11
+ w_tmp7 .req w12
+ declare_var_vector_reg dest,0
+ declare_var_vector_reg zero,1
+ declare_var_vector_reg src, 2
+
+
+ .global aes_keyexp_256_aes
+ .type aes_keyexp_256_aes, %function
+
+aes_keyexp_256_aes:
+ .set rcon,1
+ .set off,0
+ .rept 7
+ round_256 off,rcon,1
+ .set off,off+1
+ .set rcon,(rcon << 1) ^ ((rcon >> 7) * 0x11b)
+ .endr
+ export_dec_key NUM_ROUNDS(256),exp_key_enc,exp_key_dec
+ ret
+ .size aes_keyexp_256_aes, .-aes_keyexp_256_aes
+
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_aarch64_dispatcher.c b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_aarch64_dispatcher.c
new file mode 100644
index 000000000..14c9889ac
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_aarch64_dispatcher.c
@@ -0,0 +1,72 @@
+/**********************************************************************
+ Copyright(c) 2020 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+#include <aarch64_multibinary.h>
+
+#undef PROVIDER_BASIC
+#define PROVIDER_BASIC(a) (void*)0
+
+DEFINE_INTERFACE_DISPATCHER(aes_keyexp_128)
+{
+ unsigned long auxval = getauxval(AT_HWCAP);
+ if ((auxval & (HWCAP_ASIMD | HWCAP_AES)) == (HWCAP_ASIMD | HWCAP_AES))
+ return PROVIDER_INFO(aes_keyexp_128_aes);
+
+ return PROVIDER_BASIC(aes_keyexp_128);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_keyexp_128_enc)
+{
+ unsigned long auxval = getauxval(AT_HWCAP);
+ if ((auxval & (HWCAP_ASIMD | HWCAP_AES)) == (HWCAP_ASIMD | HWCAP_AES))
+ return PROVIDER_INFO(aes_keyexp_128_enc_aes);
+
+ return PROVIDER_BASIC(aes_keyexp_128_enc);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_keyexp_192)
+{
+ unsigned long auxval = getauxval(AT_HWCAP);
+ if ((auxval & (HWCAP_ASIMD | HWCAP_AES)) == (HWCAP_ASIMD | HWCAP_AES))
+ return PROVIDER_INFO(aes_keyexp_192_aes);
+
+ return PROVIDER_BASIC(aes_keyexp_192);
+
+}
+
+DEFINE_INTERFACE_DISPATCHER(aes_keyexp_256)
+{
+ unsigned long auxval = getauxval(AT_HWCAP);
+ if ((auxval & (HWCAP_ASIMD | HWCAP_AES)) == (HWCAP_ASIMD | HWCAP_AES))
+ return PROVIDER_INFO(aes_keyexp_256_aes);
+
+ return PROVIDER_BASIC(aes_keyexp_256);
+
+}
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_multibinary_aarch64.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_multibinary_aarch64.S
new file mode 100644
index 000000000..aa7c32576
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_multibinary_aarch64.S
@@ -0,0 +1,35 @@
+/**********************************************************************
+ Copyright(c) 2020 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+
+#include "aarch64_multibinary.h"
+
+mbin_interface aes_keyexp_128
+mbin_interface aes_keyexp_128_enc
+mbin_interface aes_keyexp_192
+mbin_interface aes_keyexp_256
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aarch64_dispatcher.c b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aarch64_dispatcher.c
new file mode 100644
index 000000000..6c918858e
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aarch64_dispatcher.c
@@ -0,0 +1,102 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+#include <aarch64_multibinary.h>
+
+#undef PROVIDER_BASIC
+#define PROVIDER_BASIC(a) (void*)0
+
+static unsigned long is_crypto_available(void)
+{
+ unsigned long auxval = getauxval(AT_HWCAP);
+ return (auxval & (HWCAP_ASIMD | HWCAP_AES)) == (HWCAP_ASIMD | HWCAP_AES);
+}
+
+DEFINE_INTERFACE_DISPATCHER(XTS_AES_128_enc)
+{
+ if (is_crypto_available()) {
+ return PROVIDER_INFO(XTS_AES_128_enc_ce);
+ }
+ return PROVIDER_BASIC(XTS_AES_128_enc);
+}
+
+DEFINE_INTERFACE_DISPATCHER(XTS_AES_128_dec)
+{
+ if (is_crypto_available()) {
+ return PROVIDER_INFO(XTS_AES_128_dec_ce);
+ }
+ return PROVIDER_BASIC(XTS_AES_128_dec);
+}
+
+DEFINE_INTERFACE_DISPATCHER(XTS_AES_128_enc_expanded_key)
+{
+ if (is_crypto_available()) {
+ return PROVIDER_INFO(XTS_AES_128_enc_expanded_key_ce);
+ }
+ return PROVIDER_BASIC(XTS_AES_128_enc_expanded_key);
+}
+
+DEFINE_INTERFACE_DISPATCHER(XTS_AES_128_dec_expanded_key)
+{
+ if (is_crypto_available()) {
+ return PROVIDER_INFO(XTS_AES_128_dec_expanded_key_ce);
+ }
+ return PROVIDER_BASIC(XTS_AES_128_dec_expanded_key);
+}
+
+DEFINE_INTERFACE_DISPATCHER(XTS_AES_256_enc)
+{
+ if (is_crypto_available()) {
+ return PROVIDER_INFO(XTS_AES_256_enc_ce);
+ }
+ return PROVIDER_BASIC(XTS_AES_256_enc);
+}
+
+DEFINE_INTERFACE_DISPATCHER(XTS_AES_256_dec)
+{
+ if (is_crypto_available()) {
+ return PROVIDER_INFO(XTS_AES_256_dec_ce);
+ }
+ return PROVIDER_BASIC(XTS_AES_256_dec);
+}
+
+DEFINE_INTERFACE_DISPATCHER(XTS_AES_256_enc_expanded_key)
+{
+ if (is_crypto_available()) {
+ return PROVIDER_INFO(XTS_AES_256_enc_expanded_key_ce);
+ }
+ return PROVIDER_BASIC(XTS_AES_256_enc_expanded_key);
+}
+
+DEFINE_INTERFACE_DISPATCHER(XTS_AES_256_dec_expanded_key)
+{
+ if (is_crypto_available()) {
+ return PROVIDER_INFO(XTS_AES_256_dec_expanded_key_ce);
+ }
+ return PROVIDER_BASIC(XTS_AES_256_dec_expanded_key);
+}
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_common.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_common.S
new file mode 100644
index 000000000..318c1e8a4
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_common.S
@@ -0,0 +1,214 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+
+.altmacro
+.macro aes_key_expand_next next:req,prev:req,ctx:req
+ .if \next == 9
+ mov \ctx, 0x1b
+ .endif
+ dup vdest.4s,vKey\prev\().s[3]
+ ext vtmp.16b,vzero.16b,vKey\prev\().16b,#12
+ aese vdest.16b,vzero.16b
+ eor vKey\next\().16b,vKey\prev\().16b,vtmp.16b
+ ext vtmp.16b,vzero.16b,vtmp.16b,#12
+ eor vKey\next\().16b,vKey\next\().16b,vtmp.16b
+ ext vtmp.16b,vzero.16b,vtmp.16b,#12
+ mov tmpw,vdest.s[0]
+ eor tmpw,\ctx,tmpw,ror 8
+ dup vdest.4s,tmpw
+ eor vKey\next\().16b,vKey\next\().16b,vtmp.16b
+ mov \ctx,ctx,lsl 1
+ eor vKey\next\().16b,vKey\next\().16b,vdest.16b
+.endm
+
+/* when loadin key = 0
+ * arg1 = input key
+ * arg2 = rcon ctx register (optional)
+ * when loading key > 0
+ * arg1 = rcon ctx register (optional)
+ */
+.macro aes_key_expand key:req,arg1,arg2
+ .if \key == 0
+ ld1 {vKey\key\().4s},[\arg1]
+ movi vzero.4s, 0
+ .ifb \arg2
+ mov rcon,#0x01
+ .endif
+
+ .ifnb \arg2
+ mov \arg2,#0x01
+ .endif
+ .endif
+
+ .if \key > 0
+ prev=\key-1
+ .ifb \arg1
+ aes_key_expand_next \key,%prev,rcon
+ .endif
+
+ .ifnb \arg1
+ aes_key_expand_next \key,%prev,\arg1
+ .endif
+ .endif
+.endm
+
+.macro aes_round block:req,key:req,mode:req
+ .if \key < 9
+ .if mode == 0
+ aese \block\().16b,vKey\key\().16b
+ aesmc \block\().16b,\block\().16b
+ .else
+ aesd \block\().16b,vKey\key\().16b
+ aesimc \block\().16b,\block\().16b
+ .endif
+ .endif
+ .if \key == 9
+ .if mode == 0
+ aese \block\().16b,vKey\key\().16b
+ .else
+ aesd \block\().16b,vKey\key\().16b
+ .endif
+ .endif
+ .if \key == 10
+ eor \block\().16b,\block\().16b,vKey\key\().16b
+ .endif
+.endm
+
+.macro aes_round_interleave b0:req,b1:req,b2:req,b3:req,key:req,mode:req,last_key
+ .if \key < 9
+ .if \mode == 0
+ aese \b0\().16b,vKey\key\().16b
+ aesmc \b0\().16b,\b0\().16b
+ aese \b1\().16b,vKey\key\().16b
+ aesmc \b1\().16b,\b1\().16b
+ aese \b2\().16b,vKey\key\().16b
+ aesmc \b2\().16b,\b2\().16b
+ aese \b3\().16b,vKey\key\().16b
+ aesmc \b3\().16b,\b3\().16b
+ .else
+ aesd \b0\().16b,vKey\key\().16b
+ aesimc \b0\().16b,\b0\().16b
+ aesd \b1\().16b,vKey\key\().16b
+ aesimc \b1\().16b,\b1\().16b
+ aesd \b2\().16b,vKey\key\().16b
+ aesimc \b2\().16b,\b2\().16b
+ aesd \b3\().16b,vKey\key\().16b
+ aesimc \b3\().16b,\b3\().16b
+ .endif
+ .endif
+
+ .if \key == 9
+ .if \mode == 0
+ aese \b0\().16b,vKey\key\().16b
+ eor \b0\().16b,\b0\().16b,vKey\last_key\().16b
+ aese \b1\().16b,vKey\key\().16b
+ eor \b1\().16b,\b1\().16b,vKey\last_key\().16b
+ aese \b2\().16b,vKey\key\().16b
+ eor \b2\().16b,\b2\().16b,vKey\last_key\().16b
+ aese \b3\().16b,vKey\key\().16b
+ eor \b3\().16b,\b3\().16b,vKey\last_key\().16b
+ .else
+ aesd \b0\().16b,vKey\key\().16b
+ eor \b0\().16b,\b0\().16b,vKey\last_key\().16b
+ aesd \b1\().16b,vKey\key\().16b
+ eor \b1\().16b,\b1\().16b,vKey\last_key\().16b
+ aesd \b2\().16b,vKey\key\().16b
+ eor \b2\().16b,\b2\().16b,vKey\last_key\().16b
+ aesd \b3\().16b,vKey\key\().16b
+ eor \b3\().16b,\b3\().16b,vKey\last_key\().16b
+ .endif
+ .endif
+.endm
+
+.macro aes_rounds_interleave b0:req,b1:req,b2:req,b3:req,mode
+ aes_round_interleave \b0,\b1,\b2,\b3,0,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,1,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,2,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,3,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,4,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,5,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,6,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,7,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,8,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,9,\mode,10
+.endm
+
+.macro aes_rounds blk:req,mode:req
+ aes_round \blk,0,\mode
+ aes_round \blk,1,\mode
+ aes_round \blk,2,\mode
+ aes_round \blk,3,\mode
+ aes_round \blk,4,\mode
+ aes_round \blk,5,\mode
+ aes_round \blk,6,\mode
+ aes_round \blk,7,\mode
+ aes_round \blk,8,\mode
+ aes_round \blk,9,\mode
+ aes_round \blk,10,\mode
+.endm
+
+/* load k1/k2 from memory and encrypt the tweak by k2
+ * boths keys will share the same set of registers
+ * but will never overlap (k2 is used only once and discarded)
+ */
+.macro keyload_and_encrypt_tweak iv:req,k2:req,k1:req
+ ldp qKey0,qKey1,[\k2],#32
+ aes_enc_round \iv,0
+ ldp qKey2,qKey3,[\k2],#32
+ aes_enc_round \iv,1
+ ldp qKey0,qKey1,[\k1],#32
+ aes_enc_round \iv,2
+ ldp qKey4,qKey5,[\k2],#32
+ aes_enc_round \iv,3
+ ldp qKey2,qKey3,[\k1],#32
+ aes_enc_round \iv,4
+ ldp qKey6,qKey7,[\k2],#32
+ aes_enc_round \iv,5
+ ldp qKey4,qKey5,[\k1],#32
+ aes_enc_round \iv,6
+ ldp qKey8,qKey9,[k2],#32
+ aes_enc_round \iv,7
+ ldp qKey6,qKey7,[\k1],#32
+ aes_enc_round \iv,8
+ ld1 {vKey10.16b},[\k2],#16
+ aes_enc_round \iv,9
+ ldp qKey8,qKey9,[\k1],#32
+ aes_enc_round \iv,10
+ ld1 {vKey10.16b},[\k1],#16
+.endm
+
+.macro save_stack
+ stp d8,d9,[sp, -32]!
+ add tmpbuf,sp,16
+.endm
+
+.macro restore_stack
+ ldp d8,d9,[sp],32
+.endm
+
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_dec.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_dec.S
new file mode 100644
index 000000000..ceae2d3c0
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_dec.S
@@ -0,0 +1,116 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+ .text
+
+#include "xts_aes_128_common.S"
+#include "xts_aes_common.S"
+
+.macro vswap vec1:req,vec2:req
+ mov vtmp.16b,\vec1\().16b
+ mov \vec1\().16b,\vec2\().16b
+ mov \vec2\().16b,vtmp.16b
+.endm
+
+/* encrypt the tweak by tweak key (k2), and at the same time
+ * to expand encryption key (k1)
+ * even though two sets of keys share the same set of registers
+ * they never overlap at any given time (k2 is used only once and discarded)
+ */
+.macro keyexp_and_encrypt_tweak iv:req,k2:req,k1:req
+ aes_key_expand 0,\k2
+ aes_enc_round \iv,0
+ aes_key_expand 1
+ aes_enc_round \iv,1
+ aes_key_expand 0,\k1,rcon2
+ aes_key_expand 2
+ aes_enc_round \iv,2
+ aes_key_expand 1,rcon2
+ aes_key_expand 3
+ aes_enc_round \iv,3
+ aes_key_expand 2,rcon2
+ aes_key_expand 4
+ aes_enc_round \iv,4
+ aes_key_expand 3,rcon2
+ aes_key_expand 5
+ aes_enc_round \iv,5
+ aes_key_expand 4,rcon2
+ aes_key_expand 6
+ aes_enc_round \iv,6
+ aes_key_expand 5,rcon2
+ aes_key_expand 7
+ aes_enc_round \iv,7
+ aes_key_expand 6,rcon2
+ aes_key_expand 8
+ aes_enc_round \iv,8
+ aes_key_expand 7,rcon2
+ aes_key_expand 9
+ aes_enc_round \iv,9
+ aes_key_expand 8,rcon2
+ aes_key_expand 10
+ aes_enc_round \iv,10
+ aes_key_expand 9,rcon2
+ aes_key_expand 10,rcon2
+
+ // transform encryption key into decrption key
+ aesimc vKey1.16b,vKey1.16b
+ vswap vKey0,vKey10
+ aesimc vKey9.16b,vKey9.16b
+
+ aesimc vKey2.16b,vKey2.16b
+ aesimc vKey8.16b,vKey8.16b
+ vswap vKey1,vKey9
+
+ aesimc vKey3.16b,vKey3.16b
+ aesimc vKey7.16b,vKey7.16b
+ vswap vKey2,vKey8
+
+ aesimc vKey4.16b,vKey4.16b
+ aesimc vKey6.16b,vKey6.16b
+ vswap vKey3,vKey7
+
+ aesimc vKey5.16b,vKey5.16b
+ vswap vKey4,vKey6
+.endm
+
+/*
+ * void XTS_AES_128_dec_ce(
+ * uint8_t *k2, //!< key used for tweaking, 16 bytes
+ * uint8_t *k1, //!< key used for decryption of tweaked ciphertext, 16 bytes
+ * uint8_t *TW_initial, //!< initial tweak value, 16 bytes
+ * uint64_t N, //!< sector size, in bytes
+ * const uint8_t *ct, //!< ciphertext sector input data
+ * uint8_t *pt //!< plaintext sector output data
+ * );
+*/
+ .global XTS_AES_128_dec_ce
+ .type XTS_AES_128_dec_ce, %function
+XTS_AES_128_dec_ce:
+ xts_aes_crypt 1,keyexp_and_encrypt_tweak vIV0,key2,key1
+ .size XTS_AES_128_dec_ce, .-XTS_AES_128_dec_ce
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_enc.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_enc.S
new file mode 100644
index 000000000..23ed14a38
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_enc.S
@@ -0,0 +1,91 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+ .text
+
+#include "xts_aes_128_common.S"
+#include "xts_aes_common.S"
+
+/* encrypt the tweak by tweak key (k2), and at the same time
+ * to expand encryption key (k1)
+ * even though two sets of keys share the same set of registers
+ * they never overlap at any given time (k2 is used once and discarded)
+ */
+.macro keyexp_and_encrypt_tweak iv:req,k2:req,k1:req
+ aes_key_expand 0,\k2
+ aes_enc_round \iv,0
+ aes_key_expand 1
+ aes_enc_round \iv,1
+ aes_key_expand 0,\k1,rcon2
+ aes_key_expand 2
+ aes_enc_round \iv,2
+ aes_key_expand 1,rcon2
+ aes_key_expand 3
+ aes_enc_round \iv,3
+ aes_key_expand 2,rcon2
+ aes_key_expand 4
+ aes_enc_round \iv,4
+ aes_key_expand 3,rcon2
+ aes_key_expand 5
+ aes_enc_round \iv,5
+ aes_key_expand 4,rcon2
+ aes_key_expand 6
+ aes_enc_round \iv,6
+ aes_key_expand 5,rcon2
+ aes_key_expand 7
+ aes_enc_round \iv,7
+ aes_key_expand 6,rcon2
+ aes_key_expand 8
+ aes_enc_round \iv,8
+ aes_key_expand 7,rcon2
+ aes_key_expand 9
+ aes_enc_round \iv,9
+ aes_key_expand 8,rcon2
+ aes_key_expand 10
+ aes_enc_round \iv,10
+ aes_key_expand 9,rcon2
+ aes_key_expand 10,rcon2
+.endm
+
+
+/*
+ * void XTS_AES_128_enc_ce(
+ * uint8_t *k2, //!< key used for tweaking, 16 bytes
+ * uint8_t *k1, //!< key used for decryption of tweaked ciphertext, 16 bytes
+ * uint8_t *TW_initial, //!< initial tweak value, 16 bytes
+ * uint64_t N, //!< sector size, in bytes
+ * const uint8_t *pt, //!< cleartext sector input data
+ * uint8_t *ct //!< ciphertext sector output data
+ * );
+ */
+ .global XTS_AES_128_enc_ce
+ .type XTS_AES_128_enc_ce, %function
+XTS_AES_128_enc_ce:
+ xts_aes_crypt 0,keyexp_and_encrypt_tweak vIV0,key2,key1
+ .size XTS_AES_128_enc_ce, .-XTS_AES_128_enc_ce
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_256_common.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_256_common.S
new file mode 100644
index 000000000..e6535dba3
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_256_common.S
@@ -0,0 +1,247 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+
+.altmacro
+.macro aes_key_expand_next out0:req,out1:req,in0:req,in1:req,ctx:req
+ dup vdest.4s,vKey\in1\().s[3]
+ ext vtmp.16b,vzero.16b,vKey\in0\().16b,#12
+ aese vdest.16b,vzero.16b
+ eor vKey\out0\().16b,vKey\in0\().16b,vtmp.16b
+ ext vtmp.16b,vzero.16b,vtmp.16b,#12
+ eor vKey\out0\().16b,vKey\out0\().16b,vtmp.16b
+ ext vtmp.16b,vzero.16b,vtmp.16b,#12
+ mov tmpw,vdest.s[0]
+ eor tmpw,\ctx,tmpw,ror 8
+ dup vdest.4s,tmpw
+ eor vKey\out0\().16b,vKey\out0\().16b,vtmp.16b
+ mov \ctx,ctx,lsl 1
+ eor vKey\out0\().16b,vKey\out0\().16b,vdest.16b
+
+ .if \out1 < 14
+ dup vdest.4s, vKey\out0\().s[3]
+ ext vtmp.16b, vzero.16b,vKey\in1\().16b,#12
+ aese vdest.16b,vzero.16b
+ eor vKey\out1\().16b,vKey\in1\().16b,vtmp.16b
+ ext vtmp.16b,vzero.16b,vtmp.16b,#12
+ eor vKey\out1\().16b,vKey\out1\().16b,vtmp.16b
+ ext vtmp.16b,vzero.16b,vtmp.16b,#12
+ eor vKey\out1\().16b,vKey\out1\().16b,vtmp.16b
+ eor vKey\out1\().16b,vKey\out1\().16b,vdest.16b
+ .endif
+.endm
+
+/* when loadin key = 0
+ * arg1 = input key
+ * arg2 = rcon ctx register (optional)
+ * when loading key > 0
+ * arg1 = rcon ctx register (optional)
+ */
+.macro aes_key_expand key:req,arg1,arg2
+ .if \key == 0
+ ld1 {vKey0.4s,vKey1.4s},[\arg1]
+ movi vzero.4s, 0
+ .ifb \arg2
+ mov rcon,#0x01
+ .endif
+
+ .ifnb \arg2
+ mov \arg2,#0x01
+ .endif
+ .endif
+
+ .if \key > 0
+ in0=\key-2
+ in1=\key-1
+ out0=\key
+ out1=\key+1
+ .ifb \arg1
+ aes_key_expand_next %out0,%out1,%in0,%in1,rcon
+ .endif
+
+ .ifnb \arg1
+ aes_key_expand_next %out0,%out1,%in0,%in1,\arg1
+ .endif
+ .endif
+.endm
+
+.macro aes_round block:req,key:req,mode:req
+ .if \key < 13
+ .if mode == 0
+ aese \block\().16b,vKey\key\().16b
+ aesmc \block\().16b,\block\().16b
+ .else
+ aesd \block\().16b,vKey\key\().16b
+ aesimc \block\().16b,\block\().16b
+ .endif
+ .endif
+ .if \key == 13
+ .if mode == 0
+ aese \block\().16b,vKey\key\().16b
+ .else
+ aesd \block\().16b,vKey\key\().16b
+ .endif
+ .endif
+ .if \key == 14
+ eor \block\().16b,\block\().16b,vKey\key\().16b
+ .endif
+.endm
+
+.macro aes_round_interleave b0:req,b1:req,b2:req,b3:req,key:req,mode:req,last_key
+ .if \key < 13
+ .if \mode == 0
+ aese \b0\().16b,vKey\key\().16b
+ aesmc \b0\().16b,\b0\().16b
+ aese \b1\().16b,vKey\key\().16b
+ aesmc \b1\().16b,\b1\().16b
+ aese \b2\().16b,vKey\key\().16b
+ aesmc \b2\().16b,\b2\().16b
+ aese \b3\().16b,vKey\key\().16b
+ aesmc \b3\().16b,\b3\().16b
+ .else
+ aesd \b0\().16b,vKey\key\().16b
+ aesimc \b0\().16b,\b0\().16b
+ aesd \b1\().16b,vKey\key\().16b
+ aesimc \b1\().16b,\b1\().16b
+ aesd \b2\().16b,vKey\key\().16b
+ aesimc \b2\().16b,\b2\().16b
+ aesd \b3\().16b,vKey\key\().16b
+ aesimc \b3\().16b,\b3\().16b
+ .endif
+ .endif
+
+ .if \key == 13
+ .if \mode == 0
+ aese \b0\().16b,vKey\key\().16b
+ eor \b0\().16b,\b0\().16b,vKey\last_key\().16b
+ aese \b1\().16b,vKey\key\().16b
+ eor \b1\().16b,\b1\().16b,vKey\last_key\().16b
+ aese \b2\().16b,vKey\key\().16b
+ eor \b2\().16b,\b2\().16b,vKey\last_key\().16b
+ aese \b3\().16b,vKey\key\().16b
+ eor \b3\().16b,\b3\().16b,vKey\last_key\().16b
+ .else
+ aesd \b0\().16b,vKey\key\().16b
+ eor \b0\().16b,\b0\().16b,vKey\last_key\().16b
+ aesd \b1\().16b,vKey\key\().16b
+ eor \b1\().16b,\b1\().16b,vKey\last_key\().16b
+ aesd \b2\().16b,vKey\key\().16b
+ eor \b2\().16b,\b2\().16b,vKey\last_key\().16b
+ aesd \b3\().16b,vKey\key\().16b
+ eor \b3\().16b,\b3\().16b,vKey\last_key\().16b
+ .endif
+ .endif
+.endm
+
+
+
+.macro aes_rounds_interleave b0:req,b1:req,b2:req,b3:req,mode
+ aes_round_interleave \b0,\b1,\b2,\b3,0,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,1,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,2,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,3,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,4,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,5,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,6,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,7,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,8,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,9,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,10,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,11,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,12,\mode
+ aes_round_interleave \b0,\b1,\b2,\b3,13,\mode,14
+.endm
+
+
+.macro aes_rounds blk:req,mode:req
+ aes_round \blk,0,\mode
+ aes_round \blk,1,\mode
+ aes_round \blk,2,\mode
+ aes_round \blk,3,\mode
+ aes_round \blk,4,\mode
+ aes_round \blk,5,\mode
+ aes_round \blk,6,\mode
+ aes_round \blk,7,\mode
+ aes_round \blk,8,\mode
+ aes_round \blk,9,\mode
+ aes_round \blk,10,\mode
+ aes_round \blk,11,\mode
+ aes_round \blk,12,\mode
+ aes_round \blk,13,\mode
+ aes_round \blk,14,\mode
+.endm
+
+/* load k1/k2 from memory and encrypt the tweak by k2
+ * boths keys will share the same set of registers
+ * but will never overlap (k2 is used only once and discarded)
+ */
+.macro keyload_and_encrypt_tweak iv:req,k2:req,k1:req
+ ldp qKey0,qKey1,[\k2],#32
+ aes_enc_round \iv,0
+ ldp qKey2,qKey3,[\k2],#32
+ aes_enc_round \iv,1
+ ldp qKey0,qKey1,[\k1],#32
+ aes_enc_round \iv,2
+ ldp qKey4,qKey5,[\k2],#32
+ aes_enc_round \iv,3
+ ldp qKey2,qKey3,[\k1],#32
+ aes_enc_round \iv,4
+ ldp qKey6,qKey7,[\k2],#32
+ aes_enc_round \iv,5
+ ldp qKey4,qKey5,[\k1],#32
+ aes_enc_round \iv,6
+ ldp qKey8,qKey9,[k2],#32
+ aes_enc_round \iv,7
+ ldp qKey6,qKey7,[\k1],#32
+ aes_enc_round \iv,8
+ ldp qKey10,qKey11,[k2],#32
+ aes_enc_round \iv,9
+ ldp qKey8,qKey9,[\k1],#32
+ aes_enc_round \iv,10
+ ldp qKey12,qKey13,[k2],#32
+ aes_enc_round \iv,11
+ ldp qKey10,qKey11,[\k1],#32
+ aes_enc_round \iv,12
+ ld1 {vKey14.16b},[k2],#16
+ aes_enc_round \iv,13
+ ldp qKey12,qKey13,[\k1],#32
+ aes_enc_round \iv,14
+ ld1 {vKey14.16b},[\k1],#16
+.endm
+
+.macro save_stack
+ stp d8,d9,[sp, -48]!
+ stp d10,d11,[sp, 16]
+ add tmpbuf,sp,32
+.endm
+
+.macro restore_stack
+ ldp d10,d11,[sp, 16]
+ ldp d8,d9,[sp], 48
+.endm
+
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_256_dec.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_256_dec.S
new file mode 100644
index 000000000..aa46ded08
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_256_dec.S
@@ -0,0 +1,116 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+ .text
+
+#include "xts_aes_256_common.S"
+#include "xts_aes_common.S"
+
+.macro vswap vec1:req,vec2:req
+ mov vtmp.16b,\vec1\().16b
+ mov \vec1\().16b,\vec2\().16b
+ mov \vec2\().16b,vtmp.16b
+.endm
+
+/* encrypt the tweak by tweak key (k2), and at the same time
+ * to expand encryption key (k1)
+ * even though two sets of keys share the same set of registers
+ * they never overlap at any given time (k2 is used only once and discarded)
+ */
+.macro keyexp_and_encrypt_tweak iv:req,k2:req,k1:req
+ aes_key_expand 0,\k2
+ aes_enc_round \iv,0
+ aes_enc_round \iv,1
+ aes_key_expand 2
+ aes_key_expand 0,\k1,rcon2
+ aes_enc_round \iv,2
+ aes_enc_round \iv,3
+ aes_key_expand 4
+ aes_key_expand 2,rcon2
+ aes_enc_round \iv,4
+ aes_enc_round \iv,5
+ aes_key_expand 6
+ aes_key_expand 4,rcon2
+ aes_enc_round \iv,6
+ aes_enc_round \iv,7
+ aes_key_expand 8
+ aes_key_expand 6,rcon2
+ aes_enc_round \iv,8
+ aes_enc_round \iv,9
+ aes_key_expand 10
+ aes_key_expand 8,rcon2
+ aes_enc_round \iv,10
+ aes_enc_round \iv,11
+ aes_key_expand 12
+ aes_key_expand 10,rcon2
+ aes_enc_round \iv,12
+ aes_enc_round \iv,13
+ aes_key_expand 14
+ aes_key_expand 12,rcon2
+ aes_enc_round \iv,14
+ aes_key_expand 14,rcon2
+
+ // transform encryption key into decrption key
+ aesimc vKey1.16b,vKey1.16b
+ vswap vKey0,vKey14
+ aesimc vKey13.16b,vKey13.16b
+ aesimc vKey2.16b,vKey2.16b
+ vswap vKey1,vKey13
+ aesimc vKey12.16b,vKey12.16b
+ aesimc vKey3.16b,vKey3.16b
+ vswap vKey2,vKey12
+ aesimc vKey11.16b,vKey11.16b
+ aesimc vKey4.16b,vKey4.16b
+ vswap vKey3,vKey11
+ aesimc vKey10.16b,vKey10.16b
+ aesimc vKey5.16b,vKey5.16b
+ vswap vKey4,vKey10
+ aesimc vKey9.16b,vKey9.16b
+ aesimc vKey6.16b,vKey6.16b
+ vswap vKey5,vKey9
+ aesimc vKey8.16b,vKey8.16b
+ aesimc vKey7.16b,vKey7.16b
+ vswap vKey6,vKey8
+.endm
+
+/*
+ * void XTS_AES_256_dec_ce(
+ * uint8_t *k2, //!< key used for tweaking, 32 bytes
+ * uint8_t *k1, //!< key used for decryption of tweaked ciphertext, 32 bytes
+ * uint8_t *TW_initial, //!< initial tweak value, 16 bytes
+ * uint64_t N, //!< sector size, in bytes
+ * const uint8_t *ct, //!< ciphertext sector input data
+ * uint8_t *pt //!< plaintext sector output data
+ * );
+*/
+ .global XTS_AES_256_dec_ce
+ .type XTS_AES_256_dec_ce, %function
+XTS_AES_256_dec_ce:
+ xts_aes_crypt 1,keyexp_and_encrypt_tweak vIV0,key2,key1
+ .size XTS_AES_256_dec_ce, .-XTS_AES_256_dec_ce
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_256_enc.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_256_enc.S
new file mode 100644
index 000000000..8e4088a4d
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_256_enc.S
@@ -0,0 +1,88 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+ .text
+
+#include "xts_aes_256_common.S"
+#include "xts_aes_common.S"
+
+/* encrypt the tweak by tweak key (k2), and at the same time
+ * to expand encryption key (k1)
+ * even though two sets of keys share the same set of registers
+ * they never overlap at any given time (k2 is used once and discarded)
+ */
+.macro keyexp_and_encrypt_tweak iv:req,k2:req,k1:req
+ aes_key_expand 0,\k2
+ aes_enc_round \iv,0
+ aes_enc_round \iv,1
+ aes_key_expand 2
+ aes_key_expand 0,\k1,rcon2
+ aes_enc_round \iv,2
+ aes_enc_round \iv,3
+ aes_key_expand 4
+ aes_key_expand 2,rcon2
+ aes_enc_round \iv,4
+ aes_enc_round \iv,5
+ aes_key_expand 6
+ aes_key_expand 4,rcon2
+ aes_enc_round \iv,6
+ aes_enc_round \iv,7
+ aes_key_expand 8
+ aes_key_expand 6,rcon2
+ aes_enc_round \iv,8
+ aes_enc_round \iv,9
+ aes_key_expand 10
+ aes_key_expand 8,rcon2
+ aes_enc_round \iv,10
+ aes_enc_round \iv,11
+ aes_key_expand 12
+ aes_key_expand 10,rcon2
+ aes_enc_round \iv,12
+ aes_enc_round \iv,13
+ aes_key_expand 14
+ aes_key_expand 12,rcon2
+ aes_enc_round \iv,14
+ aes_key_expand 14,rcon2
+.endm
+
+/*
+ * void XTS_AES_256_enc_ce(
+ * uint8_t *k2, //!< key used for tweaking, 16 bytes
+ * uint8_t *k1, //!< key used for decryption of tweaked ciphertext, 16 bytes
+ * uint8_t *TW_initial, //!< initial tweak value, 16 bytes
+ * uint64_t N, //!< sector size, in bytes
+ * const uint8_t *pt, //!< cleartext sector input data
+ * uint8_t *ct //!< ciphertext sector output data
+ * );
+ */
+ .global XTS_AES_256_enc_ce
+ .type XTS_AES_256_enc_ce, %function
+XTS_AES_256_enc_ce:
+ xts_aes_crypt 0,keyexp_and_encrypt_tweak vIV0,key2,key1
+ .size XTS_AES_256_enc_ce, .-XTS_AES_256_enc_ce
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_common.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_common.S
new file mode 100644
index 000000000..c32a13820
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_common.S
@@ -0,0 +1,232 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+
+.macro declare_var_vector_reg name:req,reg:req
+.ifdef q\name
+ .unreq q\name
+ .unreq v\name
+ .unreq s\name
+ .unreq d\name
+.endif
+ .set q\name , \reg
+ q\name .req q\reg
+ v\name .req v\reg
+ s\name .req s\reg
+ d\name .req d\reg
+.endm
+
+.macro declare_var_generic_reg name:req,reg:req
+ \name .req x\reg
+ x\name .req x\reg
+ w\name .req w\reg
+.endm
+
+ declare_var_vector_reg zero ,0
+ declare_var_vector_reg tmp,1
+ declare_var_vector_reg mask,2
+ declare_var_vector_reg dest,3
+ declare_var_vector_reg blk0,4
+ declare_var_vector_reg blk1,5
+ declare_var_vector_reg blk2,6
+ declare_var_vector_reg blk3,7
+ declare_var_vector_reg Key11,8
+ declare_var_vector_reg Key12,9
+ declare_var_vector_reg Key13,10
+ declare_var_vector_reg Key14,11
+ declare_var_vector_reg SavedIv,16
+ declare_var_vector_reg IV0,17
+ declare_var_vector_reg IV1,18
+ declare_var_vector_reg IV2,19
+ declare_var_vector_reg IV3,20
+ declare_var_vector_reg Key0,21
+ declare_var_vector_reg Key1,22
+ declare_var_vector_reg Key2,23
+ declare_var_vector_reg Key3,24
+ declare_var_vector_reg Key4,25
+ declare_var_vector_reg Key5,26
+ declare_var_vector_reg Key6,27
+ declare_var_vector_reg Key7,28
+ declare_var_vector_reg Key8,29
+ declare_var_vector_reg Key9,30
+ declare_var_vector_reg Key10,31
+
+.macro aes_enc_round block:req,key:req
+ aes_round \block,\key,0
+.endm
+
+.macro aes_dec_round block:req,key:req
+ aes_round \block,\key,1
+.endm
+
+.macro update_iv current:req,next:req
+ mov ivh,\current\().d[1]
+ mov ivl,\current\().d[0]
+ mov tmpw,#0x87
+ extr tmpx2,ivh,ivh,#32
+ extr ivh,ivh,ivl,#63
+ and tmpw,tmpw,tmpw2,asr#31
+ eor ivl,tmpx,ivl,lsl#1
+ mov \next\().d[1],ivh
+ mov \next\().d[0],ivl
+.endm
+
+.macro process_4_blks inp:req,outp:req,mode:req,is_tail
+ update_iv vIV0,vIV1
+ update_iv vIV1,vIV2
+ ldp qblk0,qblk1,[\inp],#32
+ ldp qblk2,qblk3,[\inp],#32
+ .ifnb \is_tail
+ update_iv vIV2, vSavedIv
+ update_iv vSavedIv,vIV3
+ .else
+ update_iv vIV2,vIV3
+ .endif
+ eor vblk0.16b,vblk0.16b,vIV0.16b
+ eor vblk1.16b,vblk1.16b,vIV1.16b
+ eor vblk2.16b,vblk2.16b,vIV2.16b
+ eor vblk3.16b,vblk3.16b,vIV3.16b
+
+ aes_rounds_interleave vblk0,vblk1,vblk2,vblk3,\mode
+ eor vblk0.16b,vblk0.16b,vIV0.16b
+ eor vblk1.16b,vblk1.16b,vIV1.16b
+ stp qblk0,qblk1,[\outp],#32
+ eor vblk2.16b,vblk2.16b,vIV2.16b
+ eor vblk3.16b,vblk3.16b,vIV3.16b
+ stp qblk2,qblk3,[\outp],#32
+ .ifb \is_tail
+ update_iv vIV3,vIV0
+ .endif
+.endm
+
+.macro process_1_blk inp:req,outp:req,mode:req
+ ld1 {vblk0.16b},[\inp],#16
+ eor vblk0.16b,vblk0.16b,vIV0.16b
+ aes_rounds vblk0,\mode
+ eor vblk0.16b,vblk0.16b,vIV0.16b
+ str qblk0,[\outp], #16
+.endm
+
+ key2 .req x0
+ key1 .req x1
+ iv .req x2
+ bytes .req x3
+ inp .req x4
+ outp .req x5
+ rcon .req w6
+ blocks .req x7
+ tmpx .req x8
+ tmpw .req w8
+ tmpw2 .req w9
+ tmpx2 .req x9
+ ivl .req x10
+ ivh .req x11
+ lastblk .req x12
+ tmpbuf .req x13
+ tailcnt .req x14
+ rcon2 .req w15
+
+.macro xts_aes_crypt mode:req,expander,more:vararg
+ save_stack
+
+ ld1 {vIV0.16b},[iv],16
+ .ifnb \expander
+ \expander\() \more
+ .endif
+ lsr blocks,bytes,4
+ and tailcnt,bytes,#0x0F
+
+ cmp bytes,16
+ b.lt .return
+
+.process_4_blks:
+ cmp blocks, 4
+ b.lt .singles
+ subs blocks,blocks,4
+ /* in decryption mode, check whether this is
+ * last block before the less-than-one-block tail
+ * need to swap tweak in this case
+ */
+ .if \mode == 1
+ b.gt .not_tail_4blk
+ cmp tailcnt,1
+ b.lt .not_tail_4blk
+ process_4_blks inp,outp,\mode,1
+ b .process_4_blks
+.not_tail_4blk:
+ .endif
+ process_4_blks inp,outp,\mode
+ b .process_4_blks
+
+.singles:
+ subs blocks,blocks,#1
+ b.lt .checktail
+ /* in decryption mode, check whether this is
+ *last block before the less-than-one-block tail
+ * need to swap tweak in this case
+ */
+ .if \mode == 1
+ b.gt .not_tail_1blk
+ cmp tailcnt,1
+ b.lt .not_tail_1blk
+ mov vSavedIv.16b, vIV0.16b
+ update_iv vSavedIv, vIV0
+ process_1_blk inp,outp,\mode
+ b .checktail
+.not_tail_1blk:
+ .endif
+ process_1_blk inp,outp,\mode
+ update_iv vIV0,vIV0
+ b .singles
+.checktail:
+ cmp tailcnt,1
+ b.lt .return
+ sub lastblk,outp,#16
+.copytail:
+ subs tailcnt,tailcnt,#1
+ ldrb tmpw,[lastblk,tailcnt]
+ strb tmpw,[outp,tailcnt]
+ ldrb tmpw,[inp,tailcnt]
+ strb tmpw,[tmpbuf,tailcnt]
+ b.gt .copytail
+ and tailcnt,bytes,#0x0F
+.steal:
+ cmp tailcnt,15
+ ldrb tmpw,[lastblk,tailcnt]
+ strb tmpw,[tmpbuf,tailcnt]
+ add tailcnt,tailcnt,#1
+ b.lt .steal
+ .if \mode == 1
+ mov vIV0.16b,vSavedIv.16b
+ .endif
+ process_1_blk tmpbuf,lastblk,\mode
+.return:
+ restore_stack
+ ret
+.endm
+
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_128_dec.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_128_dec.S
new file mode 100644
index 000000000..9549ebfa0
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_128_dec.S
@@ -0,0 +1,49 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+ .text
+
+#include "xts_aes_128_common.S"
+#include "xts_aes_common.S"
+
+/*
+ * void XTS_AES_128_dec_expanded_key_ce(
+ * uint8_t *k2, //!< expanded key used for tweaking, 16*11 bytes - encryption key is used
+ * uint8_t *k1, //!< expanded decryption key used for decryption of tweaked ciphertext, 16*11 bytes
+ * uint8_t *TW_initial, //!< initial tweak value, 16 bytes
+ * uint64_t N, //!< sector size, in bytes
+ * const uint8_t *ct, //!< ciphertext sector input data
+ * uint8_t *pt //!< plaintext sector output data
+ * );
+*/
+ .global XTS_AES_128_dec_expanded_key_ce
+ .type XTS_AES_128_dec_expanded_key_ce, %function
+XTS_AES_128_dec_expanded_key_ce:
+ xts_aes_crypt 1,keyload_and_encrypt_tweak,vIV0,key2,key1
+ .size XTS_AES_128_dec_expanded_key_ce, .-XTS_AES_128_dec_expanded_key_ce
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_128_enc.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_128_enc.S
new file mode 100644
index 000000000..1f2d2db2e
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_128_enc.S
@@ -0,0 +1,49 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+ .text
+
+#include "xts_aes_128_common.S"
+#include "xts_aes_common.S"
+
+/*
+ * void XTS_AES_128_enc_expanded_key_ce(
+ * uint8_t *k2, //!< expanded key used for tweaking, 16*11 bytes
+ * uint8_t *k1, //!< expanded key used for encryption of tweaked plaintext, 16*11 bytes
+ * uint8_t *TW_initial, //!< initial tweak value, 16 bytes
+ * uint64_t N, //!< sector size, in bytes
+ * const uint8_t *pt, //!< plaintext sector input data
+ * uint8_t *ct //!< ciphertext sector output data
+ * );
+ */
+ .global XTS_AES_128_enc_expanded_key_ce
+ .type XTS_AES_128_enc_expanded_key_ce, %function
+XTS_AES_128_enc_expanded_key_ce:
+ xts_aes_crypt 0,keyload_and_encrypt_tweak,vIV0,key2,key1
+ .size XTS_AES_128_enc_expanded_key_ce, .-XTS_AES_128_enc_expanded_key_ce
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_256_dec.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_256_dec.S
new file mode 100644
index 000000000..95c8bf63d
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_256_dec.S
@@ -0,0 +1,49 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+ .text
+
+#include "xts_aes_256_common.S"
+#include "xts_aes_common.S"
+
+/*
+ * void XTS_AES_256_dec_expanded_key_ce(
+ * uint8_t *k2, //!< expanded key used for tweaking, 16*15 bytes - encryption key is used
+ * uint8_t *k1, //!< expanded decryption key used for decryption of tweaked ciphertext, 16*15 bytes
+ * uint8_t *TW_initial, //!< initial tweak value, 16 bytes
+ * uint64_t N, //!< sector size, in bytes
+ * const uint8_t *ct, //!< ciphertext sector input data
+ * uint8_t *pt //!< plaintext sector output data
+ * );
+*/
+ .global XTS_AES_256_dec_expanded_key_ce
+ .type XTS_AES_256_dec_expanded_key_ce, %function
+XTS_AES_256_dec_expanded_key_ce:
+ xts_aes_crypt 1,keyload_and_encrypt_tweak,vIV0,key2,key1
+ .size XTS_AES_256_dec_expanded_key_ce, .-XTS_AES_256_dec_expanded_key_ce
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_256_enc.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_256_enc.S
new file mode 100644
index 000000000..bd840a994
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_keyexp_aes_256_enc.S
@@ -0,0 +1,49 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8-a+crypto
+ .text
+
+#include "xts_aes_256_common.S"
+#include "xts_aes_common.S"
+
+/*
+ * void XTS_AES_256_enc_expanded_key_ce(
+ * uint8_t *k2, //!< expanded key used for tweaking, 16*15 bytes
+ * uint8_t *k1, //!< expanded key used for encryption of tweaked plaintext, 16*15 bytes
+ * uint8_t *TW_initial, //!< initial tweak value, 16 bytes
+ * uint64_t N, //!< sector size, in bytes
+ * const uint8_t *pt, //!< plaintext sector input data
+ * uint8_t *ct //!< ciphertext sector output data
+ * );
+ */
+ .global XTS_AES_256_enc_expanded_key_ce
+ .type XTS_AES_256_enc_expanded_key_ce, %function
+XTS_AES_256_enc_expanded_key_ce:
+ xts_aes_crypt 0,keyload_and_encrypt_tweak,vIV0,key2,key1
+ .size XTS_AES_256_enc_expanded_key_ce, .-XTS_AES_256_enc_expanded_key_ce
diff --git a/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_multibinary_aarch64.S b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_multibinary_aarch64.S
new file mode 100644
index 000000000..af77d885b
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_multibinary_aarch64.S
@@ -0,0 +1,39 @@
+/**********************************************************************
+ Copyright(c) 2021 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+
+#include "aarch64_multibinary.h"
+
+mbin_interface XTS_AES_128_enc
+mbin_interface XTS_AES_128_dec
+mbin_interface XTS_AES_128_enc_expanded_key
+mbin_interface XTS_AES_128_dec_expanded_key
+mbin_interface XTS_AES_256_enc
+mbin_interface XTS_AES_256_dec
+mbin_interface XTS_AES_256_enc_expanded_key
+mbin_interface XTS_AES_256_dec_expanded_key