summaryrefslogtreecommitdiffstats
path: root/third_party/dav1d/tests/checkasm/arm
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/dav1d/tests/checkasm/arm')
-rw-r--r--third_party/dav1d/tests/checkasm/arm/checkasm_32.S201
-rw-r--r--third_party/dav1d/tests/checkasm/arm/checkasm_64.S211
2 files changed, 412 insertions, 0 deletions
diff --git a/third_party/dav1d/tests/checkasm/arm/checkasm_32.S b/third_party/dav1d/tests/checkasm/arm/checkasm_32.S
new file mode 100644
index 0000000000..a186ef8fc2
--- /dev/null
+++ b/third_party/dav1d/tests/checkasm/arm/checkasm_32.S
@@ -0,0 +1,201 @@
+/******************************************************************************
+ * Copyright © 2018, VideoLAN and dav1d authors
+ * Copyright © 2015 Martin Storsjo
+ * Copyright © 2015 Janne Grunau
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this
+ * list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *****************************************************************************/
+
+#define PRIVATE_PREFIX checkasm_
+
+#include "src/arm/asm.S"
+#include "src/arm/32/util.S"
+
+const register_init, align=3
+ .quad 0x21f86d66c8ca00ce
+ .quad 0x75b6ba21077c48ad
+ .quad 0xed56bb2dcb3c7736
+ .quad 0x8bda43d3fd1a7e06
+ .quad 0xb64a9c9e5d318408
+ .quad 0xdf9a54b303f1d3a3
+ .quad 0x4a75479abd64e097
+ .quad 0x249214109d5d1c88
+endconst
+
+const error_message_fpscr
+ .asciz "failed to preserve register FPSCR, changed bits: %x"
+error_message_gpr:
+ .asciz "failed to preserve register r%d"
+error_message_vfp:
+ .asciz "failed to preserve register d%d"
+error_message_stack:
+ .asciz "failed to preserve stack"
+endconst
+
+@ max number of args used by any asm function.
+#define MAX_ARGS 15
+
+#define ARG_STACK 4*(MAX_ARGS - 4)
+
+@ Align the used stack space to 8 to preserve the stack alignment.
+@ +8 for stack canary reference.
+#define ARG_STACK_A (((ARG_STACK + pushed + 7) & ~7) - pushed + 8)
+
+.macro clobbercheck variant
+.equ pushed, 4*9
+function checked_call_\variant, export=1
+ push {r4-r11, lr}
+.ifc \variant, vfp
+ vpush {d8-d15}
+ fmrx r4, FPSCR
+ push {r4}
+.equ pushed, pushed + 16*4 + 4
+.endif
+
+ movrel r12, register_init
+.ifc \variant, vfp
+ vldm r12, {d8-d15}
+.endif
+ ldm r12, {r4-r11}
+
+ sub sp, sp, #ARG_STACK_A
+.equ pos, 0
+.rept MAX_ARGS-4
+ ldr r12, [sp, #ARG_STACK_A + pushed + 8 + pos]
+ str r12, [sp, #pos]
+.equ pos, pos + 4
+.endr
+
+ @ For stack overflows, the callee is free to overwrite the parameters
+ @ that were passed on the stack (if any), so we can only check after
+ @ that point. First figure out how many parameters the function
+ @ really took on the stack:
+ ldr r12, [sp, #ARG_STACK_A + pushed + 8 + 4*(MAX_ARGS-4)]
+ @ Load the first non-parameter value from the stack, that should be
+ @ left untouched by the function. Store a copy of it inverted, so that
+ @ e.g. overwriting everything with zero would be noticed.
+ ldr r12, [sp, r12, lsl #2]
+ mvn r12, r12
+ str r12, [sp, #ARG_STACK_A - 4]
+
+ mov r12, r0
+ mov r0, r2
+ mov r1, r3
+ ldrd r2, r3, [sp, #ARG_STACK_A + pushed]
+ @ Call the target function
+ blx r12
+
+ @ Load the number of stack parameters, stack canary and its reference
+ ldr r12, [sp, #ARG_STACK_A + pushed + 8 + 4*(MAX_ARGS-4)]
+ ldr r2, [sp, r12, lsl #2]
+ ldr r3, [sp, #ARG_STACK_A - 4]
+
+ add sp, sp, #ARG_STACK_A
+ push {r0, r1}
+
+ mvn r3, r3
+ cmp r2, r3
+ bne 5f
+
+ movrel r12, register_init
+.ifc \variant, vfp
+.macro check_reg_vfp, dreg, offset
+ ldrd r2, r3, [r12, #8 * (\offset)]
+ vmov r0, lr, \dreg
+ eor r2, r2, r0
+ eor r3, r3, lr
+ orrs r2, r2, r3
+ bne 4f
+.endm
+
+.irp n, 8, 9, 10, 11, 12, 13, 14, 15
+ @ keep track of the checked double/SIMD register
+ mov r1, #\n
+ check_reg_vfp d\n, \n-8
+.endr
+.purgem check_reg_vfp
+
+ fmrx r1, FPSCR
+ ldr r3, [sp, #8]
+ eor r1, r1, r3
+ @ Ignore changes in bits 0-4 and 7
+ bic r1, r1, #0x9f
+ @ Ignore changes in the topmost 5 bits
+ bics r1, r1, #0xf8000000
+ bne 3f
+.endif
+
+ @ keep track of the checked GPR
+ mov r1, #4
+.macro check_reg reg1, reg2=
+ ldrd r2, r3, [r12], #8
+ eors r2, r2, \reg1
+ bne 2f
+ add r1, r1, #1
+.ifnb \reg2
+ eors r3, r3, \reg2
+ bne 2f
+.endif
+ add r1, r1, #1
+.endm
+ check_reg r4, r5
+ check_reg r6, r7
+@ r9 is a volatile register in the ios ABI
+#ifdef __APPLE__
+ check_reg r8
+#else
+ check_reg r8, r9
+#endif
+ check_reg r10, r11
+.purgem check_reg
+
+ b 0f
+5:
+ movrel r0, error_message_stack
+ b 1f
+4:
+ movrel r0, error_message_vfp
+ b 1f
+3:
+ movrel r0, error_message_fpscr
+ b 1f
+2:
+ movrel r0, error_message_gpr
+1:
+#ifdef PREFIX
+ bl _checkasm_fail_func
+#else
+ bl checkasm_fail_func
+#endif
+0:
+ pop {r0, r1}
+.ifc \variant, vfp
+ pop {r2}
+ fmxr FPSCR, r2
+ vpop {d8-d15}
+.endif
+ pop {r4-r11, pc}
+endfunc
+.endm
+
+clobbercheck vfp
diff --git a/third_party/dav1d/tests/checkasm/arm/checkasm_64.S b/third_party/dav1d/tests/checkasm/arm/checkasm_64.S
new file mode 100644
index 0000000000..25749145a5
--- /dev/null
+++ b/third_party/dav1d/tests/checkasm/arm/checkasm_64.S
@@ -0,0 +1,211 @@
+/******************************************************************************
+ * Copyright © 2018, VideoLAN and dav1d authors
+ * Copyright © 2015 Martin Storsjo
+ * Copyright © 2015 Janne Grunau
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this
+ * list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *****************************************************************************/
+
+#define PRIVATE_PREFIX checkasm_
+
+#include "src/arm/asm.S"
+#include "src/arm/64/util.S"
+
+const register_init, align=4
+ .quad 0x21f86d66c8ca00ce
+ .quad 0x75b6ba21077c48ad
+ .quad 0xed56bb2dcb3c7736
+ .quad 0x8bda43d3fd1a7e06
+ .quad 0xb64a9c9e5d318408
+ .quad 0xdf9a54b303f1d3a3
+ .quad 0x4a75479abd64e097
+ .quad 0x249214109d5d1c88
+ .quad 0x1a1b2550a612b48c
+ .quad 0x79445c159ce79064
+ .quad 0x2eed899d5a28ddcd
+ .quad 0x86b2536fcd8cf636
+ .quad 0xb0856806085e7943
+ .quad 0x3f2bf84fc0fcca4e
+ .quad 0xacbd382dcf5b8de2
+ .quad 0xd229e1f5b281303f
+ .quad 0x71aeaff20b095fd9
+ .quad 0xab63e2e11fa38ed9
+endconst
+
+
+const error_message_register
+ .asciz "failed to preserve register"
+error_message_stack:
+ .asciz "stack clobbered"
+endconst
+
+
+// max number of args used by any asm function.
+#define MAX_ARGS 15
+
+#define CLOBBER_STACK ((8*MAX_ARGS + 15) & ~15)
+
+function stack_clobber, export=1
+ mov x3, sp
+ mov x2, #CLOBBER_STACK
+1:
+ stp x0, x1, [sp, #-16]!
+ subs x2, x2, #16
+ b.gt 1b
+ mov sp, x3
+ ret
+endfunc
+
+// + 16 for stack canary reference
+#define ARG_STACK ((8*(MAX_ARGS - 8) + 15) & ~15 + 16)
+
+function checked_call, export=1
+ stp x29, x30, [sp, #-16]!
+ mov x29, sp
+ stp x19, x20, [sp, #-16]!
+ stp x21, x22, [sp, #-16]!
+ stp x23, x24, [sp, #-16]!
+ stp x25, x26, [sp, #-16]!
+ stp x27, x28, [sp, #-16]!
+ stp d8, d9, [sp, #-16]!
+ stp d10, d11, [sp, #-16]!
+ stp d12, d13, [sp, #-16]!
+ stp d14, d15, [sp, #-16]!
+
+ movrel x9, register_init
+ ldp d8, d9, [x9], #16
+ ldp d10, d11, [x9], #16
+ ldp d12, d13, [x9], #16
+ ldp d14, d15, [x9], #16
+ ldp x19, x20, [x9], #16
+ ldp x21, x22, [x9], #16
+ ldp x23, x24, [x9], #16
+ ldp x25, x26, [x9], #16
+ ldp x27, x28, [x9], #16
+
+ sub sp, sp, #ARG_STACK
+.equ pos, 0
+.rept MAX_ARGS-8
+ // Skip the first 8 args, that are loaded into registers
+ ldr x9, [x29, #16 + 8*8 + pos]
+ str x9, [sp, #pos]
+.equ pos, pos + 8
+.endr
+
+ // Fill x8-x17 with garbage. This doesn't have to be preserved,
+ // but avoids relying on them having any particular value.
+ movrel x9, register_init
+ ldp x10, x11, [x9], #32
+ ldp x12, x13, [x9], #32
+ ldp x14, x15, [x9], #32
+ ldp x16, x17, [x9], #32
+ ldp x8, x9, [x9]
+
+ // For stack overflows, the callee is free to overwrite the parameters
+ // that were passed on the stack (if any), so we can only check after
+ // that point. First figure out how many parameters the function
+ // really took on the stack:
+ ldr w2, [x29, #16 + 8*8 + (MAX_ARGS-8)*8]
+ // Load the first non-parameter value from the stack, that should be
+ // left untouched by the function. Store a copy of it inverted, so that
+ // e.g. overwriting everything with zero would be noticed.
+ ldr x2, [sp, x2, lsl #3]
+ mvn x2, x2
+ str x2, [sp, #ARG_STACK-8]
+
+ // Load the in-register arguments
+ mov x12, x0
+ ldp x0, x1, [x29, #16]
+ ldp x2, x3, [x29, #32]
+ ldp x4, x5, [x29, #48]
+ ldp x6, x7, [x29, #64]
+ // Call the target function
+ blr x12
+
+ // Load the number of stack parameters, stack canary and its reference
+ ldr w2, [x29, #16 + 8*8 + (MAX_ARGS-8)*8]
+ ldr x2, [sp, x2, lsl #3]
+ ldr x3, [sp, #ARG_STACK-8]
+
+ add sp, sp, #ARG_STACK
+ stp x0, x1, [sp, #-16]!
+
+ mvn x3, x3
+ cmp x2, x3
+ b.ne 2f
+
+ movrel x9, register_init
+ movi v3.8h, #0
+
+.macro check_reg_neon reg1, reg2
+ ldr q1, [x9], #16
+ uzp1 v2.2d, v\reg1\().2d, v\reg2\().2d
+ eor v1.16b, v1.16b, v2.16b
+ orr v3.16b, v3.16b, v1.16b
+.endm
+ check_reg_neon 8, 9
+ check_reg_neon 10, 11
+ check_reg_neon 12, 13
+ check_reg_neon 14, 15
+ uqxtn v3.8b, v3.8h
+ umov x3, v3.d[0]
+
+.macro check_reg reg1, reg2
+ ldp x0, x1, [x9], #16
+ eor x0, x0, \reg1
+ eor x1, x1, \reg2
+ orr x3, x3, x0
+ orr x3, x3, x1
+.endm
+ check_reg x19, x20
+ check_reg x21, x22
+ check_reg x23, x24
+ check_reg x25, x26
+ check_reg x27, x28
+
+ cbz x3, 0f
+
+ movrel x0, error_message_register
+ b 1f
+2:
+ movrel x0, error_message_stack
+1:
+#ifdef PREFIX
+ bl _checkasm_fail_func
+#else
+ bl checkasm_fail_func
+#endif
+0:
+ ldp x0, x1, [sp], #16
+ ldp d14, d15, [sp], #16
+ ldp d12, d13, [sp], #16
+ ldp d10, d11, [sp], #16
+ ldp d8, d9, [sp], #16
+ ldp x27, x28, [sp], #16
+ ldp x25, x26, [sp], #16
+ ldp x23, x24, [sp], #16
+ ldp x21, x22, [sp], #16
+ ldp x19, x20, [sp], #16
+ ldp x29, x30, [sp], #16
+ ret
+endfunc