summaryrefslogtreecommitdiffstats
path: root/src/crypto/isa-l/isa-l_crypto/sm3_mb/aarch64/sm3_mb_sm_x2.S
diff options
context:
space:
mode:
Diffstat (limited to 'src/crypto/isa-l/isa-l_crypto/sm3_mb/aarch64/sm3_mb_sm_x2.S')
-rw-r--r--src/crypto/isa-l/isa-l_crypto/sm3_mb/aarch64/sm3_mb_sm_x2.S344
1 files changed, 344 insertions, 0 deletions
diff --git a/src/crypto/isa-l/isa-l_crypto/sm3_mb/aarch64/sm3_mb_sm_x2.S b/src/crypto/isa-l/isa-l_crypto/sm3_mb/aarch64/sm3_mb_sm_x2.S
new file mode 100644
index 000000000..4e4a6e738
--- /dev/null
+++ b/src/crypto/isa-l/isa-l_crypto/sm3_mb/aarch64/sm3_mb_sm_x2.S
@@ -0,0 +1,344 @@
+/**********************************************************************
+ Copyright(c) 2020 Arm Corporation All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Arm Corporation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**********************************************************************/
+ .arch armv8.2-a+sm4
+ .text
+ .align 2
+ .p2align 3,,7
+
+.macro declare_var_vector_reg name:req,reg:req
+ q\name\() .req q\reg
+ v\name\() .req v\reg
+ s\name\() .req s\reg
+.endm
+
+.macro do_ext job,arg0,arg1,arg2,arg3
+ ext v\job\()_\arg0\().16b,v\job\()_\arg1\().16b,v\job\()_\arg2\().16b,\arg3
+.endm
+.macro do_sm3partw1 job,msg4,msg0,msg3
+ sm3partw1 v\job\()_\msg4\().4s, v\job\()_\msg0\().4s, v\job\()_\msg3\().4s
+.endm
+.macro do_sm3partw2 job,msg4,tmp1,tmp0
+ sm3partw2 v\job\()_\msg4\().4s, v\job\()_\tmp1\().4s, v\job\()_\tmp0\().4s
+.endm
+
+.macro message_expand msg0:req,msg1:req,msg2:req,msg3:req,msg4:req,tmp0:req,tmp1:req
+ .irp j,0,1
+ do_ext job\j,\msg4,\msg1,\msg2,#12
+ .endr
+ .irp j,0,1
+ do_ext job\j,\tmp0,\msg0,\msg1,#12
+ .endr
+ .irp j,0,1
+ do_ext job\j,\tmp1,\msg2,\msg3,#8
+ .endr
+
+ .irp j,0,1
+ do_sm3partw1 job\j,\msg4, \msg0, \msg3
+ .endr
+ .irp j,0,1
+ do_sm3partw2 job\j,\msg4, \tmp1, \tmp0
+ .endr
+
+.endm
+
+.macro do_eor job,arg0,arg1,arg2
+ eor v\job\()_\arg0\().16b,v\job\()_\arg1\().16b,v\job\()_\arg2\().16b
+.endm
+.macro do_sm3ss1 job,tmp1,dig0,dig1,const
+ sm3ss1 v\job\()_\tmp1\().4s, v\job\()_\dig0\().4s, v\job\()_\dig1\().4s, v\const\().4s
+.endm
+
+.macro do_sm3tt1 job,ab,dig0,tmp1,tmp0,lane
+ sm3tt1\ab v\job\()_\dig0\().4s, v\job\()_\tmp1\().4s, v\job\()_\tmp0\().4s[\lane]
+
+.endm
+.macro do_sm3tt2 job,ab,dig1,tmp1,msg0,lane
+ sm3tt2\ab v\job\()_\dig1\().4s, v\job\()_\tmp1\().4s, v\job\()_\msg0\().4s[\lane]
+.endm
+
+.macro quad_round ab:req,const:req,dig0:req,dig1:req,msg0:req,msg1:req,tmp0:req,tmp1:req
+ .irp j,0,1
+ do_eor job\j,\tmp0,\msg0,\msg1
+ .endr
+ .irp lane,0,1,2,3
+ .irp j,0,1
+ do_sm3ss1 job\j,\tmp1,\dig0,\dig1,\const
+ .endr
+
+ ext v\const\().16b,v\const\().16b,v\const\().16b,12
+ .irp j,0,1
+ do_sm3tt1 job\j,\ab,\dig0,\tmp1,\tmp0,\lane
+ .endr
+ .irp j,0,1
+ do_sm3tt2 job\j,\ab,\dig1,\tmp1,\msg0,\lane
+ .endr
+ .endr
+.endm
+
+.macro quad_round_expand ab:req,const:req,dig0:req,dig1:req,msg0:req,msg1:req,msg2:req,msg3:req,msg4:req,tmp0:req,tmp1:req
+ message_expand \msg0,\msg1,\msg2,\msg3,\msg4,\tmp0,\tmp1
+ quad_round \ab,\const,\dig0,\dig1,\msg0,\msg1,\tmp0,\tmp1
+.endm
+
+/*
+ Variables
+*/
+ job0 .req x0
+ job1 .req x1
+ len .req x2
+
+ job0_data .req x3
+ job1_data .req x4
+ job0_digest .req x0
+ job1_digest .req x1
+
+ const_adr .req x5
+ end_ptr .req x2
+
+ declare_var_vector_reg job0_msg0, 0
+ declare_var_vector_reg job0_msg1, 1
+ declare_var_vector_reg job0_msg2, 2
+ declare_var_vector_reg job0_msg3, 3
+ declare_var_vector_reg job0_msg4, 4
+ declare_var_vector_reg job0_dig0, 5
+ declare_var_vector_reg job0_dig1, 6
+ declare_var_vector_reg job0_tmp0, 7
+ declare_var_vector_reg job0_tmp1, 8
+ declare_var_vector_reg job0_backup_dig0, 9
+ declare_var_vector_reg job0_backup_dig1, 10
+
+ declare_var_vector_reg job1_msg0, 11
+ declare_var_vector_reg job1_msg1, 12
+ declare_var_vector_reg job1_msg2, 13
+ declare_var_vector_reg job1_msg3, 14
+ declare_var_vector_reg job1_msg4, 15
+ declare_var_vector_reg job1_dig0, 16
+ declare_var_vector_reg job1_dig1, 17
+ declare_var_vector_reg job1_tmp0, 18
+ declare_var_vector_reg job1_tmp1, 19
+ declare_var_vector_reg job1_backup_dig0, 20
+ declare_var_vector_reg job1_backup_dig1, 21
+
+ declare_var_vector_reg const0, 22
+ declare_var_vector_reg const1, 23
+ declare_var_vector_reg const2, 24
+ declare_var_vector_reg const3, 25
+ declare_var_vector_reg const4, 26
+ declare_var_vector_reg const5, 27
+ declare_var_vector_reg const6, 28
+ declare_var_vector_reg const7, 29
+ declare_var_vector_reg const8, 30
+ declare_var_vector_reg const9, 31
+ declare_var_vector_reg const10, 22
+ declare_var_vector_reg const11, 23
+
+.macro do_rev32_msg job:req,msg:req
+ rev32 v\job\()_\msg\().16b,v\job\()_\msg\().16b
+.endm
+.macro do_rev32_job job:req
+ .irp m,0,1,2,3
+ do_rev32_msg \job,msg\m
+ .endr
+.endm
+.macro rev32_msgs
+ .irp j,0,1
+ do_rev32_job job\j
+ .endr
+.endm
+
+
+ .global sm3_mb_sm_x2
+ .type sm3_mb_sm_x2, %function
+sm3_mb_sm_x2:
+ //push d8~d15
+ stp d8,d9,[sp,-192]!
+ stp d10,d11,[sp,16]
+ stp d12,d13,[sp,32]
+ stp d14,d15,[sp,48]
+
+
+ adrp const_adr,.consts
+ ldr job0_data, [job0],64
+ add const_adr,const_adr,:lo12:.consts
+ ldr job1_data, [job1],64
+ ldp qjob0_dig0,qjob0_dig1,[job0_digest]
+ ldp qjob1_dig0,qjob1_dig1,[job1_digest]
+
+ ldp qconst2,qconst3,[const_adr,32]
+ ldp qconst4,qconst5,[const_adr,64]
+ ldp qconst6,qconst7,[const_adr,96]
+ ldp qconst8,qconst9,[const_adr,128]
+ add end_ptr,job0_data,len,lsl 6
+
+ //rev128
+ ext vjob0_dig0.16b,vjob0_dig0.16b,vjob0_dig0.16b,#8
+ ext vjob0_dig1.16b,vjob0_dig1.16b,vjob0_dig1.16b,#8
+ rev64 vjob0_dig0.16b,vjob0_dig0.16b
+ rev64 vjob0_dig1.16b,vjob0_dig1.16b
+ ext vjob1_dig0.16b,vjob1_dig0.16b,vjob1_dig0.16b,#8
+ ext vjob1_dig1.16b,vjob1_dig1.16b,vjob1_dig1.16b,#8
+ rev64 vjob1_dig0.16b,vjob1_dig0.16b
+ rev64 vjob1_dig1.16b,vjob1_dig1.16b
+
+
+
+
+
+start_loop:
+
+ ld1 {vjob0_msg0.16b-vjob0_msg3.16b},[job0_data],64
+ ld1 {vjob1_msg0.16b-vjob1_msg3.16b},[job1_data],64
+
+ mov vjob0_backup_dig0.16b,vjob0_dig0.16b
+ mov vjob0_backup_dig1.16b,vjob0_dig1.16b
+ mov vjob1_backup_dig0.16b,vjob1_dig0.16b
+ mov vjob1_backup_dig1.16b,vjob1_dig1.16b
+
+ // const10,const11,const0,const1 share registers
+ ldp qconst0,qconst1,[const_adr]
+
+ // big-endian to little-endian
+ rev32_msgs
+
+ cmp job0_data,end_ptr
+ quad_round_expand a, const0 , dig0, dig1, msg0, msg1, msg2, msg3, msg4, tmp0, tmp1
+
+
+ quad_round_expand a, const1 , dig0, dig1, msg1, msg2, msg3, msg4, msg0, tmp0, tmp1
+ // const10,const11,const0,const1 share registers
+ ldp qconst10,qconst11,[const_adr,160]
+ quad_round_expand a, const2 , dig0, dig1, msg2, msg3, msg4, msg0, msg1, tmp0, tmp1
+ quad_round_expand a, const3 , dig0, dig1, msg3, msg4, msg0, msg1, msg2, tmp0, tmp1
+ quad_round_expand b, const4 , dig0, dig1, msg4, msg0, msg1, msg2, msg3, tmp0, tmp1
+ quad_round_expand b, const5 , dig0, dig1, msg0, msg1, msg2, msg3, msg4, tmp0, tmp1
+ quad_round_expand b, const6 , dig0, dig1, msg1, msg2, msg3, msg4, msg0, tmp0, tmp1
+ quad_round_expand b, const7 , dig0, dig1, msg2, msg3, msg4, msg0, msg1, tmp0, tmp1
+ quad_round_expand b, const8 , dig0, dig1, msg3, msg4, msg0, msg1, msg2, tmp0, tmp1
+ quad_round_expand b, const9 , dig0, dig1, msg4, msg0, msg1, msg2, msg3, tmp0, tmp1
+ quad_round_expand b, const10, dig0, dig1, msg0, msg1, msg2, msg3, msg4, tmp0, tmp1
+ quad_round_expand b, const11, dig0, dig1, msg1, msg2, msg3, msg4, msg0, tmp0, tmp1
+ quad_round_expand b, const4 , dig0, dig1, msg2, msg3, msg4, msg0, msg1, tmp0, tmp1
+
+
+ quad_round b, const5, dig0, dig1, msg3, msg4, tmp0, tmp1
+
+ quad_round b, const6, dig0, dig1, msg4, msg0, tmp0, tmp1
+ quad_round b, const7, dig0, dig1, msg0, msg1, tmp0, tmp1
+
+ eor vjob0_dig0.16b,vjob0_dig0.16b,vjob0_backup_dig0.16b
+ eor vjob0_dig1.16b,vjob0_dig1.16b,vjob0_backup_dig1.16b
+ eor vjob1_dig0.16b,vjob1_dig0.16b,vjob1_backup_dig0.16b
+ eor vjob1_dig1.16b,vjob1_dig1.16b,vjob1_backup_dig1.16b
+
+
+ bcc start_loop
+
+ //rev128
+ ext vjob0_dig0.16b,vjob0_dig0.16b,vjob0_dig0.16b,#8
+ ext vjob0_dig1.16b,vjob0_dig1.16b,vjob0_dig1.16b,#8
+ rev64 vjob0_dig0.16b,vjob0_dig0.16b
+ rev64 vjob0_dig1.16b,vjob0_dig1.16b
+ stp qjob0_dig0,qjob0_dig1,[job0_digest]
+
+ ext vjob1_dig0.16b,vjob1_dig0.16b,vjob1_dig0.16b,#8
+ ext vjob1_dig1.16b,vjob1_dig1.16b,vjob1_dig1.16b,#8
+ rev64 vjob1_dig0.16b,vjob1_dig0.16b
+ rev64 vjob1_dig1.16b,vjob1_dig1.16b
+ stp qjob1_dig0,qjob1_dig1,[job1_digest]
+
+#if 1
+ mov v0.16b,vjob1_dig0.16b
+ mov v1.16b,vjob1_dig1.16b
+ b exit_ret
+#endif
+
+exit_ret:
+ ldp d10,d11,[sp,16]
+ ldp d12,d13,[sp,32]
+ ldp d14,d15,[sp,48]
+ ldp d8, d9, [sp], 192
+ ret
+
+ .align 2
+.consts:
+ .word 0xce6228cb // 3
+ .word 0xe7311465 // 2
+ .word 0xf3988a32 // 1
+ .word 0x79cc4519 // 0
+ .word 0xe6228cbc // 7
+ .word 0x7311465e // 6
+ .word 0x3988a32f // 5
+ .word 0x9cc45197 // 4
+ .word 0x6228cbce //11
+ .word 0x311465e7 //10
+ .word 0x988a32f3 // 9
+ .word 0xcc451979 // 8
+ .word 0x228cbce6 //15
+ .word 0x11465e73 //14
+ .word 0x88a32f39 //13
+ .word 0xc451979c //12
+ .word 0xec53d43c //19
+ .word 0x7629ea1e //18
+ .word 0x3b14f50f //17
+ .word 0x9d8a7a87 //16
+ .word 0xc53d43ce //23
+ .word 0x629ea1e7 //22
+ .word 0xb14f50f3 //21
+ .word 0xd8a7a879 //20
+ .word 0x53d43cec //27
+ .word 0x29ea1e76 //26
+ .word 0x14f50f3b //25
+ .word 0x8a7a879d //24
+ .word 0x3d43cec5 //31
+ .word 0x9ea1e762 //30
+ .word 0x4f50f3b1 //29
+ .word 0xa7a879d8 //28
+ .word 0xd43cec53 //35
+ .word 0xea1e7629 //34
+ .word 0xf50f3b14 //33
+ .word 0x7a879d8a //32
+ .word 0x43cec53d //39
+ .word 0xa1e7629e //38
+ .word 0x50f3b14f //37
+ .word 0xa879d8a7 //36
+ .word 0x3cec53d4 //43
+ .word 0x1e7629ea //42
+ .word 0x0f3b14f5 //41
+ .word 0x879d8a7a //40
+ .word 0xcec53d43 //47
+ .word 0xe7629ea1 //46
+ .word 0xf3b14f50 //45
+ .word 0x79d8a7a8 //44
+ .word 0xec53d43c //51
+ .word 0x7629ea1e //50
+ .word 0x3b14f50f //49
+
+
+ .size sm3_mb_sm_x2, .-sm3_mb_sm_x2
+