summaryrefslogtreecommitdiffstats
path: root/vendor/rustix/src/imp/linux_raw/arch/outline
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/rustix/src/imp/linux_raw/arch/outline')
-rw-r--r--vendor/rustix/src/imp/linux_raw/arch/outline/aarch64.s119
-rw-r--r--vendor/rustix/src/imp/linux_raw/arch/outline/arm.s135
-rw-r--r--vendor/rustix/src/imp/linux_raw/arch/outline/mips.s213
-rw-r--r--vendor/rustix/src/imp/linux_raw/arch/outline/mips64.s189
-rw-r--r--vendor/rustix/src/imp/linux_raw/arch/outline/mod.rs33
-rw-r--r--vendor/rustix/src/imp/linux_raw/arch/outline/nr_last.rs166
-rw-r--r--vendor/rustix/src/imp/linux_raw/arch/outline/powerpc64.s132
-rw-r--r--vendor/rustix/src/imp/linux_raw/arch/outline/riscv64.s116
-rw-r--r--vendor/rustix/src/imp/linux_raw/arch/outline/x86.rs285
-rw-r--r--vendor/rustix/src/imp/linux_raw/arch/outline/x86.s381
-rw-r--r--vendor/rustix/src/imp/linux_raw/arch/outline/x86_64.s122
11 files changed, 1891 insertions, 0 deletions
diff --git a/vendor/rustix/src/imp/linux_raw/arch/outline/aarch64.s b/vendor/rustix/src/imp/linux_raw/arch/outline/aarch64.s
new file mode 100644
index 000000000..1fad2fa6d
--- /dev/null
+++ b/vendor/rustix/src/imp/linux_raw/arch/outline/aarch64.s
@@ -0,0 +1,119 @@
+// Assembly code for making aarch64 syscalls.
+//
+// aarch64 syscall argument register ordering is the same as the aarch64
+// userspace argument register ordering except that the syscall number
+// (nr) is passed in w8.
+//
+// outline.rs takes care of reordering the nr argument to the end for us,
+// so we only need to move nr into w8.
+//
+// arm64-ilp32 is not yet supported.
+
+ .file "aarch64.s"
+ .arch armv8-a
+
+ .section .text.rustix_syscall0_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall0_nr_last
+ .hidden rustix_syscall0_nr_last
+ .type rustix_syscall0_nr_last, @function
+rustix_syscall0_nr_last:
+ .cfi_startproc
+ mov w8, w0
+ svc #0
+ ret
+ .cfi_endproc
+ .size rustix_syscall0_nr_last, .-rustix_syscall0_nr_last
+
+ .section .text.rustix_syscall1_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall1_nr_last
+ .hidden rustix_syscall1_nr_last
+ .type rustix_syscall1_nr_last, @function
+rustix_syscall1_nr_last:
+ .cfi_startproc
+ mov w8, w1
+ svc #0
+ ret
+ .cfi_endproc
+ .size rustix_syscall1_nr_last, .-rustix_syscall1_nr_last
+
+ .section .text.rustix_syscall1_noreturn_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall1_noreturn_nr_last
+ .hidden rustix_syscall1_noreturn_nr_last
+ .type rustix_syscall1_noreturn_nr_last, @function
+rustix_syscall1_noreturn_nr_last:
+ .cfi_startproc
+ mov w8, w1
+ svc #0
+ brk #0x1
+ .cfi_endproc
+ .size rustix_syscall1_noreturn_nr_last, .-rustix_syscall1_noreturn_nr_last
+
+ .section .text.rustix_syscall2_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall2_nr_last
+ .hidden rustix_syscall2_nr_last
+ .type rustix_syscall2_nr_last, @function
+rustix_syscall2_nr_last:
+ .cfi_startproc
+ mov w8, w2
+ svc #0
+ ret
+ .cfi_endproc
+ .size rustix_syscall2_nr_last, .-rustix_syscall2_nr_last
+
+ .section .text.rustix_syscall3_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall3_nr_last
+ .hidden rustix_syscall3_nr_last
+ .type rustix_syscall3_nr_last, @function
+rustix_syscall3_nr_last:
+ .cfi_startproc
+ mov w8, w3
+ svc #0
+ ret
+ .cfi_endproc
+ .size rustix_syscall3_nr_last, .-rustix_syscall3_nr_last
+
+ .section .text.rustix_syscall4_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall4_nr_last
+ .hidden rustix_syscall4_nr_last
+ .type rustix_syscall4_nr_last, @function
+rustix_syscall4_nr_last:
+ .cfi_startproc
+ mov w8, w4
+ svc #0
+ ret
+ .cfi_endproc
+ .size rustix_syscall4_nr_last, .-rustix_syscall4_nr_last
+
+ .section .text.rustix_syscall5_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall5_nr_last
+ .hidden rustix_syscall5_nr_last
+ .type rustix_syscall5_nr_last, @function
+rustix_syscall5_nr_last:
+ .cfi_startproc
+ mov w8, w5
+ svc #0
+ ret
+ .cfi_endproc
+ .size rustix_syscall5_nr_last, .-rustix_syscall5_nr_last
+
+ .section .text.rustix_syscall6_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall6_nr_last
+ .hidden rustix_syscall6_nr_last
+ .type rustix_syscall6_nr_last, @function
+rustix_syscall6_nr_last:
+ .cfi_startproc
+ mov w8, w6
+ svc #0
+ ret
+ .cfi_endproc
+ .size rustix_syscall6_nr_last, .-rustix_syscall6_nr_last
+
+ .section .note.GNU-stack,"",@progbits
diff --git a/vendor/rustix/src/imp/linux_raw/arch/outline/arm.s b/vendor/rustix/src/imp/linux_raw/arch/outline/arm.s
new file mode 100644
index 000000000..7001686f1
--- /dev/null
+++ b/vendor/rustix/src/imp/linux_raw/arch/outline/arm.s
@@ -0,0 +1,135 @@
+// Assembly code for making arm syscalls.
+//
+// arm syscall argument register ordering is the similar to the arm
+// userspace argument register ordering except that the syscall number
+// (nr) is passed in r7.
+//
+// nr_last.rs takes care of reordering the nr argument to the end for us,
+// so we only need to move nr into r7 and take care of r4 and r5 if needed.
+
+ .file "arm.s"
+ .arch armv5t
+
+ .section .text.rustix_syscall0_nr_last,"ax",%progbits
+ .p2align 4
+ .weak rustix_syscall0_nr_last
+ .hidden rustix_syscall0_nr_last
+ .type rustix_syscall0_nr_last, %function
+rustix_syscall0_nr_last:
+ .fnstart
+ .cantunwind
+ push {r7, lr}
+ mov r7, r0
+ svc #0
+ pop {r7, pc}
+ .fnend
+ .size rustix_syscall0_nr_last, .-rustix_syscall0_nr_last
+
+ .section .text.rustix_syscall1_nr_last,"ax",%progbits
+ .p2align 4
+ .weak rustix_syscall1_nr_last
+ .hidden rustix_syscall1_nr_last
+ .type rustix_syscall1_nr_last, %function
+rustix_syscall1_nr_last:
+ .fnstart
+ .cantunwind
+ push {r7, lr}
+ mov r7, r1
+ svc #0
+ pop {r7, pc}
+ .fnend
+ .size rustix_syscall1_nr_last, .-rustix_syscall1_nr_last
+
+ .section .text.rustix_syscall1_noreturn_nr_last,"ax",%progbits
+ .p2align 4
+ .weak rustix_syscall1_noreturn_nr_last
+ .hidden rustix_syscall1_noreturn_nr_last
+ .type rustix_syscall1_noreturn_nr_last, %function
+rustix_syscall1_noreturn_nr_last:
+ .fnstart
+ .cantunwind
+ // Don't save r7 and lr; this is noreturn, so we'll never restore them.
+ mov r7, r1
+ svc #0
+ udf #16 // Trap instruction
+ .fnend
+ .size rustix_syscall1_noreturn_nr_last, .-rustix_syscall1_noreturn_nr_last
+
+ .section .text.rustix_syscall2_nr_last,"ax",%progbits
+ .p2align 4
+ .weak rustix_syscall2_nr_last
+ .hidden rustix_syscall2_nr_last
+ .type rustix_syscall2_nr_last, %function
+rustix_syscall2_nr_last:
+ .fnstart
+ .cantunwind
+ push {r7, lr}
+ mov r7, r2
+ svc #0
+ pop {r7, pc}
+ .fnend
+ .size rustix_syscall2_nr_last, .-rustix_syscall2_nr_last
+
+ .section .text.rustix_syscall3_nr_last,"ax",%progbits
+ .p2align 4
+ .weak rustix_syscall3_nr_last
+ .hidden rustix_syscall3_nr_last
+ .type rustix_syscall3_nr_last, %function
+rustix_syscall3_nr_last:
+ .fnstart
+ .cantunwind
+ push {r7, lr}
+ mov r7, r3
+ svc #0
+ pop {r7, pc}
+ .fnend
+ .size rustix_syscall3_nr_last, .-rustix_syscall3_nr_last
+
+ .section .text.rustix_syscall4_nr_last,"ax",%progbits
+ .p2align 4
+ .weak rustix_syscall4_nr_last
+ .hidden rustix_syscall4_nr_last
+ .type rustix_syscall4_nr_last, %function
+rustix_syscall4_nr_last:
+ .fnstart
+ .cantunwind
+ push {r7, lr}
+ ldr r7, [sp, #8]
+ svc #0
+ pop {r7, pc}
+ .fnend
+ .size rustix_syscall4_nr_last, .-rustix_syscall4_nr_last
+
+ .section .text.rustix_syscall5_nr_last,"ax",%progbits
+ .p2align 4
+ .weak rustix_syscall5_nr_last
+ .hidden rustix_syscall5_nr_last
+ .type rustix_syscall5_nr_last, %function
+rustix_syscall5_nr_last:
+ .fnstart
+ .cantunwind
+ push {r4, r7, r11, lr}
+ ldr r7, [sp, #20]
+ ldr r4, [sp, #16]
+ svc #0
+ pop {r4, r7, r11, pc}
+ .fnend
+ .size rustix_syscall5_nr_last, .-rustix_syscall5_nr_last
+
+ .section .text.rustix_syscall6_nr_last,"ax",%progbits
+ .p2align 4
+ .weak rustix_syscall6_nr_last
+ .hidden rustix_syscall6_nr_last
+ .type rustix_syscall6_nr_last, %function
+rustix_syscall6_nr_last:
+ .fnstart
+ .cantunwind
+ push {r4, r5, r7, lr}
+ add r7, sp, #16
+ ldm r7, {r4, r5, r7}
+ svc #0
+ pop {r4, r5, r7, pc}
+ .fnend
+ .size rustix_syscall6_nr_last, .-rustix_syscall6_nr_last
+
+ .section .note.GNU-stack,"",%progbits
diff --git a/vendor/rustix/src/imp/linux_raw/arch/outline/mips.s b/vendor/rustix/src/imp/linux_raw/arch/outline/mips.s
new file mode 100644
index 000000000..ab1bbfa2d
--- /dev/null
+++ b/vendor/rustix/src/imp/linux_raw/arch/outline/mips.s
@@ -0,0 +1,213 @@
+# Assembly code for making mips64 syscalls.
+#
+# mips64 syscall argument register ordering is the same as the mips64
+# userspace argument register ordering except that the syscall number
+# (nr) is passed in v0.
+#
+# outline.rs takes care of reordering the nr argument to the end for us,
+# so we only need to move nr into v0.
+
+ .file "mips.s"
+ .section .mdebug.abi32
+ .previous
+ .abicalls
+
+ .section .text.rustix_syscall0_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall0_nr_last
+ .hidden rustix_syscall0_nr_last
+ .type rustix_syscall0_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall0_nr_last
+rustix_syscall0_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $4
+ syscall
+ negu $8, $2
+ jr $31
+ movn $2, $8, $7
+ .end rustix_syscall0_nr_last
+ .size rustix_syscall0_nr_last, .-rustix_syscall0_nr_last
+
+ .section .text.rustix_syscall1_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall1_nr_last
+ .hidden rustix_syscall1_nr_last
+ .type rustix_syscall1_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall1_nr_last
+rustix_syscall1_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $5
+ syscall
+ negu $8, $2
+ jr $31
+ movn $2, $8, $7
+ .end rustix_syscall1_nr_last
+ .size rustix_syscall1_nr_last, .-rustix_syscall1_nr_last
+
+ .section .text.rustix_syscall1_noreturn_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall1_noreturn_nr_last
+ .hidden rustix_syscall1_noreturn_nr_last
+ .type rustix_syscall1_noreturn_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall1_noreturn_nr_last
+rustix_syscall1_noreturn_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $5
+ syscall
+ teq $zero, $zero
+ .end rustix_syscall1_noreturn_nr_last
+ .size rustix_syscall1_noreturn_nr_last, .-rustix_syscall1_noreturn_nr_last
+
+ .section .text.rustix_syscall2_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall2_nr_last
+ .hidden rustix_syscall2_nr_last
+ .type rustix_syscall2_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall2_nr_last
+rustix_syscall2_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $6
+ syscall
+ negu $8, $2
+ jr $31
+ movn $2, $8, $7
+ .end rustix_syscall2_nr_last
+ .size rustix_syscall2_nr_last, .-rustix_syscall2_nr_last
+
+ .section .text.rustix_syscall3_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall3_nr_last
+ .hidden rustix_syscall3_nr_last
+ .type rustix_syscall3_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall3_nr_last
+rustix_syscall3_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $7
+ syscall
+ negu $8, $2
+ jr $31
+ movn $2, $8, $7
+ .end rustix_syscall3_nr_last
+ .size rustix_syscall3_nr_last, .-rustix_syscall3_nr_last
+
+ .section .text.rustix_syscall4_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall4_nr_last
+ .hidden rustix_syscall4_nr_last
+ .type rustix_syscall4_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall4_nr_last
+rustix_syscall4_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ lw $2, 16($sp)
+ syscall
+ negu $8, $2
+ jr $31
+ movn $2, $8, $7
+ .end rustix_syscall4_nr_last
+ .size rustix_syscall4_nr_last, .-rustix_syscall4_nr_last
+
+ .section .text.rustix_syscall5_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall5_nr_last
+ .hidden rustix_syscall5_nr_last
+ .type rustix_syscall5_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall5_nr_last
+rustix_syscall5_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ lw $2, 20($sp)
+ syscall
+ negu $8, $2
+ jr $31
+ movn $2, $8, $7
+ .end rustix_syscall5_nr_last
+ .size rustix_syscall5_nr_last, .-rustix_syscall5_nr_last
+
+ .section .text.rustix_syscall6_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall6_nr_last
+ .hidden rustix_syscall6_nr_last
+ .type rustix_syscall6_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall6_nr_last
+rustix_syscall6_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ lw $2, 24($sp)
+ syscall
+ negu $8, $2
+ jr $31
+ movn $2, $8, $7
+ .end rustix_syscall6_nr_last
+ .size rustix_syscall6_nr_last, .-rustix_syscall6_nr_last
+
+ .section .note.GNU-stack,"",@progbits
+
+ .section .text.rustix_syscall7_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall7_nr_last
+ .hidden rustix_syscall7_nr_last
+ .type rustix_syscall7_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall7_nr_last
+rustix_syscall7_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ lw $2, 28($sp)
+ syscall
+ negu $8, $2
+ jr $31
+ movn $2, $8, $7
+ .end rustix_syscall7_nr_last
+ .size rustix_syscall7_nr_last, .-rustix_syscall7_nr_last
+
+ .section .note.GNU-stack,"",@progbits
diff --git a/vendor/rustix/src/imp/linux_raw/arch/outline/mips64.s b/vendor/rustix/src/imp/linux_raw/arch/outline/mips64.s
new file mode 100644
index 000000000..3c5e76e36
--- /dev/null
+++ b/vendor/rustix/src/imp/linux_raw/arch/outline/mips64.s
@@ -0,0 +1,189 @@
+# Assembly code for making mips64 syscalls.
+#
+# mips64 syscall argument register ordering is the same as the mips64
+# userspace argument register ordering except that the syscall number
+# (nr) is passed in v0.
+#
+# outline.rs takes care of reordering the nr argument to the end for us,
+# so we only need to move nr into v0.
+
+ .file "mips.s"
+ .section .mdebug.abi64
+ .previous
+ .abicalls
+
+ .section .text.rustix_syscall0_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall0_nr_last
+ .hidden rustix_syscall0_nr_last
+ .type rustix_syscall0_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall0_nr_last
+rustix_syscall0_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $4
+ syscall
+ dnegu $12, $2
+ jr $31
+ movn $2, $12, $7
+ .end rustix_syscall0_nr_last
+ .size rustix_syscall0_nr_last, .-rustix_syscall0_nr_last
+
+ .section .text.rustix_syscall1_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall1_nr_last
+ .hidden rustix_syscall1_nr_last
+ .type rustix_syscall1_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall1_nr_last
+rustix_syscall1_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $5
+ syscall
+ dnegu $12, $2
+ jr $31
+ movn $2, $12, $7
+ .end rustix_syscall1_nr_last
+ .size rustix_syscall1_nr_last, .-rustix_syscall1_nr_last
+
+ .section .text.rustix_syscall1_noreturn_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall1_noreturn_nr_last
+ .hidden rustix_syscall1_noreturn_nr_last
+ .type rustix_syscall1_noreturn_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall1_noreturn_nr_last
+rustix_syscall1_noreturn_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $5
+ syscall
+ teq $0, $0
+ .end rustix_syscall1_noreturn_nr_last
+ .size rustix_syscall1_noreturn_nr_last, .-rustix_syscall1_noreturn_nr_last
+
+ .section .text.rustix_syscall2_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall2_nr_last
+ .hidden rustix_syscall2_nr_last
+ .type rustix_syscall2_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall2_nr_last
+rustix_syscall2_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $6
+ syscall
+ dnegu $12, $2
+ jr $31
+ movn $2, $12, $7
+ .end rustix_syscall2_nr_last
+ .size rustix_syscall2_nr_last, .-rustix_syscall2_nr_last
+
+ .section .text.rustix_syscall3_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall3_nr_last
+ .hidden rustix_syscall3_nr_last
+ .type rustix_syscall3_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall3_nr_last
+rustix_syscall3_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $7
+ syscall
+ dnegu $12, $2
+ jr $31
+ movn $2, $12, $7
+ .end rustix_syscall3_nr_last
+ .size rustix_syscall3_nr_last, .-rustix_syscall3_nr_last
+
+ .section .text.rustix_syscall4_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall4_nr_last
+ .hidden rustix_syscall4_nr_last
+ .type rustix_syscall4_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall4_nr_last
+rustix_syscall4_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $8
+ syscall
+ dnegu $12, $2
+ jr $31
+ movn $2, $12, $7
+ .end rustix_syscall4_nr_last
+ .size rustix_syscall4_nr_last, .-rustix_syscall4_nr_last
+
+ .section .text.rustix_syscall5_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall5_nr_last
+ .hidden rustix_syscall5_nr_last
+ .type rustix_syscall5_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall5_nr_last
+rustix_syscall5_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $9
+ syscall
+ dnegu $12, $2
+ jr $31
+ movn $2, $12, $7
+ .end rustix_syscall5_nr_last
+ .size rustix_syscall5_nr_last, .-rustix_syscall5_nr_last
+
+ .section .text.rustix_syscall6_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall6_nr_last
+ .hidden rustix_syscall6_nr_last
+ .type rustix_syscall6_nr_last, @function
+ .set nomips16
+ .set nomicromips
+ .ent rustix_syscall6_nr_last
+rustix_syscall6_nr_last:
+ .frame $sp,0,$31
+ .mask 0x00000000,0
+ .fmask 0x00000000,0
+ .set noreorder
+ .set nomacro
+ move $2, $10
+ syscall
+ dnegu $12, $2
+ jr $31
+ movn $2, $12, $7
+ .end rustix_syscall6_nr_last
+ .size rustix_syscall6_nr_last, .-rustix_syscall6_nr_last
+
+ .section .note.GNU-stack,"",@progbits
diff --git a/vendor/rustix/src/imp/linux_raw/arch/outline/mod.rs b/vendor/rustix/src/imp/linux_raw/arch/outline/mod.rs
new file mode 100644
index 000000000..ce1352751
--- /dev/null
+++ b/vendor/rustix/src/imp/linux_raw/arch/outline/mod.rs
@@ -0,0 +1,33 @@
+//! Declare functions defined in out-of-line ("outline") asm files.
+//!
+//! Kernel calling conventions differ from userspace calling conventions,
+//! so we also define inline function wrappers which reorder the arguments
+//! so that they match with the kernel convention as closely as possible,
+//! to minimize the amount of out-of-line code we need.
+
+#[cfg(target_arch = "x86")]
+mod x86;
+// For these architectures, pass the `nr` argument last.
+#[cfg(any(
+ target_arch = "arm",
+ target_arch = "aarch64",
+ target_arch = "mips",
+ target_arch = "mips64",
+ target_arch = "powerpc64",
+ target_arch = "riscv64",
+ target_arch = "x86_64",
+))]
+mod nr_last;
+
+#[cfg(any(
+ target_arch = "arm",
+ target_arch = "aarch64",
+ target_arch = "mips",
+ target_arch = "mips64",
+ target_arch = "powerpc64",
+ target_arch = "riscv64",
+ target_arch = "x86_64",
+))]
+pub(in crate::imp) use nr_last::*;
+#[cfg(target_arch = "x86")]
+pub(in crate::imp) use x86::*;
diff --git a/vendor/rustix/src/imp/linux_raw/arch/outline/nr_last.rs b/vendor/rustix/src/imp/linux_raw/arch/outline/nr_last.rs
new file mode 100644
index 000000000..fdcd11021
--- /dev/null
+++ b/vendor/rustix/src/imp/linux_raw/arch/outline/nr_last.rs
@@ -0,0 +1,166 @@
+//! Syscall wrappers for platforms which pass the syscall number specially.
+//!
+//! Rustix aims to minimize the amount of assembly code it needs. To that end,
+//! this code reorders syscall arguments as close as feasible to the actual
+//! syscall convention before calling the assembly functions.
+//!
+//! Many architectures use a convention where the syscall number is passed in a
+//! special register, with the regular syscall arguments passed in either the
+//! same or similar registers as the platform C convention. This code
+//! approximates that order by passing the regular syscall arguments first, and
+//! the syscall number last. That way, the outline assembly code typically just
+//! needs to move the syscall number to its special register, and leave the
+//! other arguments mostly as they are.
+
+#[cfg(target_arch = "mips")]
+use crate::imp::reg::A6;
+use crate::imp::reg::{ArgReg, RetReg, SyscallNumber, A0, A1, A2, A3, A4, A5, R0};
+
+// First we declare the actual assembly routines with `*_nr_last` names and
+// reordered arguments. If the signatures or calling conventions are ever
+// changed, the symbol names should also be updated accordingly, to avoid
+// collisions with other versions of this crate.
+//
+// We don't define `_readonly` versions of these because we have no way to tell
+// Rust that calls to our outline assembly are readonly.
+extern "C" {
+ fn rustix_syscall0_nr_last(nr: SyscallNumber<'_>) -> RetReg<R0>;
+ fn rustix_syscall1_nr_last(a0: ArgReg<'_, A0>, nr: SyscallNumber<'_>) -> RetReg<R0>;
+ fn rustix_syscall1_noreturn_nr_last(a0: ArgReg<'_, A0>, nr: SyscallNumber<'_>) -> !;
+ fn rustix_syscall2_nr_last(
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ nr: SyscallNumber<'_>,
+ ) -> RetReg<R0>;
+ fn rustix_syscall3_nr_last(
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ nr: SyscallNumber<'_>,
+ ) -> RetReg<R0>;
+ fn rustix_syscall4_nr_last(
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+ nr: SyscallNumber<'_>,
+ ) -> RetReg<R0>;
+ fn rustix_syscall5_nr_last(
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+ nr: SyscallNumber<'_>,
+ ) -> RetReg<R0>;
+ fn rustix_syscall6_nr_last(
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+ a5: ArgReg<'_, A5>,
+ nr: SyscallNumber<'_>,
+ ) -> RetReg<R0>;
+ #[cfg(target_arch = "mips")]
+ fn rustix_syscall7_nr_last(
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+ a5: ArgReg<'_, A5>,
+ a6: ArgReg<'_, A6>,
+ nr: SyscallNumber<'_>,
+ ) -> RetReg<R0>;
+}
+
+// Then we define inline wrapper functions that do the reordering.
+
+#[inline]
+pub(in crate::imp) unsafe fn syscall0(nr: SyscallNumber<'_>) -> RetReg<R0> {
+ rustix_syscall0_nr_last(nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall1(nr: SyscallNumber<'_>, a0: ArgReg<'_, A0>) -> RetReg<R0> {
+ rustix_syscall1_nr_last(a0, nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall1_noreturn(nr: SyscallNumber<'_>, a0: ArgReg<'_, A0>) -> ! {
+ rustix_syscall1_noreturn_nr_last(a0, nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall2(
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+) -> RetReg<R0> {
+ rustix_syscall2_nr_last(a0, a1, nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall3(
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+) -> RetReg<R0> {
+ rustix_syscall3_nr_last(a0, a1, a2, nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall4(
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+) -> RetReg<R0> {
+ rustix_syscall4_nr_last(a0, a1, a2, a3, nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall5(
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+) -> RetReg<R0> {
+ rustix_syscall5_nr_last(a0, a1, a2, a3, a4, nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall6(
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+ a5: ArgReg<'_, A5>,
+) -> RetReg<R0> {
+ rustix_syscall6_nr_last(a0, a1, a2, a3, a4, a5, nr)
+}
+#[cfg(target_arch = "mips")]
+#[inline]
+pub(in crate::imp) unsafe fn syscall7(
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+ a5: ArgReg<'_, A5>,
+ a6: ArgReg<'_, A6>,
+) -> RetReg<R0> {
+ rustix_syscall7_nr_last(a0, a1, a2, a3, a4, a5, a6, nr)
+}
+
+// Then we define the `_readonly` versions of the wrappers. We don't have
+// separate `_readonly` implementations, so these can just be aliases to
+// their non-`_readonly` counterparts.
+#[cfg(target_arch = "mips")]
+pub(in crate::imp) use syscall7 as syscall7_readonly;
+pub(in crate::imp) use {
+ syscall0 as syscall0_readonly, syscall1 as syscall1_readonly, syscall2 as syscall2_readonly,
+ syscall3 as syscall3_readonly, syscall4 as syscall4_readonly, syscall5 as syscall5_readonly,
+ syscall6 as syscall6_readonly,
+};
diff --git a/vendor/rustix/src/imp/linux_raw/arch/outline/powerpc64.s b/vendor/rustix/src/imp/linux_raw/arch/outline/powerpc64.s
new file mode 100644
index 000000000..29d4c0a95
--- /dev/null
+++ b/vendor/rustix/src/imp/linux_raw/arch/outline/powerpc64.s
@@ -0,0 +1,132 @@
+# Assembly code for making powerpc64le syscalls.
+#
+# powerpc64le syscall argument register ordering is the same as the
+# powerpc64le userspace argument register ordering except that the syscall
+# number (nr) is passed in r0.
+#
+# outline.rs takes care of reordering the nr argument to the end for us,
+# so we only need to move nr into r0.
+
+ .file "powerpc64le.s"
+ .machine power8
+ .abiversion 2
+
+ .section .text.rustix_syscall0_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall0_nr_last
+ .hidden rustix_syscall0_nr_last
+ .type rustix_syscall0_nr_last, @function
+rustix_syscall0_nr_last:
+ .cfi_startproc
+ mr 0, 3
+ sc
+ bnslr
+ neg 3, 3
+ blr
+ .cfi_endproc
+ .size rustix_syscall0_nr_last, .-rustix_syscall0_nr_last
+
+ .section .text.rustix_syscall1_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall1_nr_last
+ .hidden rustix_syscall1_nr_last
+ .type rustix_syscall1_nr_last, @function
+rustix_syscall1_nr_last:
+ .cfi_startproc
+ mr 0, 4
+ sc
+ bnslr
+ neg 3, 3
+ blr
+ .cfi_endproc
+ .size rustix_syscall1_nr_last, .-rustix_syscall1_nr_last
+
+ .section .text.rustix_syscall1_noreturn_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall1_noreturn_nr_last
+ .hidden rustix_syscall1_noreturn_nr_last
+ .type rustix_syscall1_noreturn_nr_last, @function
+rustix_syscall1_noreturn_nr_last:
+ .cfi_startproc
+ mr 0, 4
+ sc
+ trap
+ .cfi_endproc
+ .size rustix_syscall1_noreturn_nr_last, .-rustix_syscall1_noreturn_nr_last
+
+ .section .text.rustix_syscall2_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall2_nr_last
+ .hidden rustix_syscall2_nr_last
+ .type rustix_syscall2_nr_last, @function
+rustix_syscall2_nr_last:
+ .cfi_startproc
+ mr 0, 5
+ sc
+ bnslr
+ neg 3, 3
+ blr
+ .cfi_endproc
+ .size rustix_syscall2_nr_last, .-rustix_syscall2_nr_last
+
+ .section .text.rustix_syscall3_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall3_nr_last
+ .hidden rustix_syscall3_nr_last
+ .type rustix_syscall3_nr_last, @function
+rustix_syscall3_nr_last:
+ .cfi_startproc
+ mr 0, 6
+ sc
+ bnslr
+ neg 3, 3
+ blr
+ .cfi_endproc
+ .size rustix_syscall3_nr_last, .-rustix_syscall3_nr_last
+
+ .section .text.rustix_syscall4_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall4_nr_last
+ .hidden rustix_syscall4_nr_last
+ .type rustix_syscall4_nr_last, @function
+rustix_syscall4_nr_last:
+ .cfi_startproc
+ mr 0, 7
+ sc
+ bnslr
+ neg 3, 3
+ blr
+ .cfi_endproc
+ .size rustix_syscall4_nr_last, .-rustix_syscall4_nr_last
+
+ .section .text.rustix_syscall5_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall5_nr_last
+ .hidden rustix_syscall5_nr_last
+ .type rustix_syscall5_nr_last, @function
+rustix_syscall5_nr_last:
+ .cfi_startproc
+ mr 0, 8
+ sc
+ bnslr
+ neg 3, 3
+ blr
+ .cfi_endproc
+ .size rustix_syscall5_nr_last, .-rustix_syscall5_nr_last
+
+ .section .text.rustix_syscall6_nr_last,"ax",@progbits
+ .p2align 2
+ .weak rustix_syscall6_nr_last
+ .hidden rustix_syscall6_nr_last
+ .type rustix_syscall6_nr_last, @function
+rustix_syscall6_nr_last:
+ .cfi_startproc
+ mr 0, 9
+ sc
+ bnslr
+ neg 3, 3
+ blr
+ .cfi_endproc
+ .size rustix_syscall6_nr_last, .-rustix_syscall6_nr_last
+
+ .section .note.GNU-stack,"",@progbits
diff --git a/vendor/rustix/src/imp/linux_raw/arch/outline/riscv64.s b/vendor/rustix/src/imp/linux_raw/arch/outline/riscv64.s
new file mode 100644
index 000000000..28d692f7c
--- /dev/null
+++ b/vendor/rustix/src/imp/linux_raw/arch/outline/riscv64.s
@@ -0,0 +1,116 @@
+# Assembly code for making riscv64 syscalls.
+#
+# riscv64 syscall argument register ordering is the same as the riscv64
+# userspace argument register ordering except that the syscall number
+# (nr) is passed in a7.
+#
+# nr_last.rs takes care of reordering the nr argument to the end for us,
+# so we only need to move nr into a7.
+
+ .file "riscv64.s"
+
+ .section .text.rustix_syscall0_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall0_nr_last
+ .hidden rustix_syscall0_nr_last
+ .type rustix_syscall0_nr_last, @function
+rustix_syscall0_nr_last:
+ .cfi_startproc
+ mv a7, a0
+ ecall
+ ret
+ .cfi_endproc
+ .size rustix_syscall0_nr_last, .-rustix_syscall0_nr_last
+
+ .section .text.rustix_syscall1_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall1_nr_last
+ .hidden rustix_syscall1_nr_last
+ .type rustix_syscall1_nr_last, @function
+rustix_syscall1_nr_last:
+ .cfi_startproc
+ mv a7, a1
+ ecall
+ ret
+ .cfi_endproc
+ .size rustix_syscall1_nr_last, .-rustix_syscall1_nr_last
+
+ .section .text.rustix_syscall1_noreturn_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall1_noreturn_nr_last
+ .hidden rustix_syscall1_noreturn_nr_last
+ .type rustix_syscall1_noreturn_nr_last, @function
+rustix_syscall1_noreturn_nr_last:
+ .cfi_startproc
+ mv a7, a1
+ ecall
+ unimp
+ .cfi_endproc
+ .size rustix_syscall1_noreturn_nr_last, .-rustix_syscall1_noreturn_nr_last
+
+ .section .text.rustix_syscall2_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall2_nr_last
+ .hidden rustix_syscall2_nr_last
+ .type rustix_syscall2_nr_last, @function
+rustix_syscall2_nr_last:
+ .cfi_startproc
+ mv a7, a2
+ ecall
+ ret
+ .cfi_endproc
+ .size rustix_syscall2_nr_last, .-rustix_syscall2_nr_last
+
+ .section .text.rustix_syscall3_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall3_nr_last
+ .hidden rustix_syscall3_nr_last
+ .type rustix_syscall3_nr_last, @function
+rustix_syscall3_nr_last:
+ .cfi_startproc
+ mv a7, a3
+ ecall
+ ret
+ .cfi_endproc
+ .size rustix_syscall3_nr_last, .-rustix_syscall3_nr_last
+
+ .section .text.rustix_syscall4_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall4_nr_last
+ .hidden rustix_syscall4_nr_last
+ .type rustix_syscall4_nr_last, @function
+rustix_syscall4_nr_last:
+ .cfi_startproc
+ mv a7, a4
+ ecall
+ ret
+ .cfi_endproc
+ .size rustix_syscall4_nr_last, .-rustix_syscall4_nr_last
+
+ .section .text.rustix_syscall5_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall5_nr_last
+ .hidden rustix_syscall5_nr_last
+ .type rustix_syscall5_nr_last, @function
+rustix_syscall5_nr_last:
+ .cfi_startproc
+ mv a7, a5
+ ecall
+ ret
+ .cfi_endproc
+ .size rustix_syscall5_nr_last, .-rustix_syscall5_nr_last
+
+ .section .text.rustix_syscall6_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall6_nr_last
+ .hidden rustix_syscall6_nr_last
+ .type rustix_syscall6_nr_last, @function
+rustix_syscall6_nr_last:
+ .cfi_startproc
+ mv a7, a6
+ ecall
+ ret
+ .cfi_endproc
+ .size rustix_syscall6_nr_last, .-rustix_syscall6_nr_last
+
+ .section .note.GNU-stack,"",@progbits
diff --git a/vendor/rustix/src/imp/linux_raw/arch/outline/x86.rs b/vendor/rustix/src/imp/linux_raw/arch/outline/x86.rs
new file mode 100644
index 000000000..938a4a09d
--- /dev/null
+++ b/vendor/rustix/src/imp/linux_raw/arch/outline/x86.rs
@@ -0,0 +1,285 @@
+//! Syscall wrappers for 32-bit x86.
+//!
+//! This module is similar to the `nr_last` module, except specialized for
+//! 32-bit x86.
+//!
+//! The syscall convention passes all arguments in registers. The closest we
+//! can easily get to that from Rust is to use the fastcall convention which
+//! passes the first two arguments in `ecx` and `edx`, which are the second
+//! and third Linux syscall arguments. To line them up, this function passes
+//! the second and third syscall argument as the first and second argument to
+//! the outline assembly, followed by the first syscall argument, and then the
+//! rest of the syscall arguments. The assembly code still has to do some work,
+//! but at least we can get up to two arguments into the right place for it.
+
+#![allow(dead_code, unused_imports)]
+
+use crate::imp::reg::{ArgReg, RetReg, SyscallNumber, A0, A1, A2, A3, A4, A5, R0};
+use crate::imp::vdso_wrappers::SyscallType;
+
+// First we declare the actual assembly routines with `*_nr_last_fastcall`
+// names and reordered arguments. If the signatures or calling conventions are
+// ever changed, the symbol names should also be updated accordingly, to avoid
+// collisions with other versions of this crate.
+//
+// We don't define `_readonly` versions of these because we have no way to tell
+// Rust that calls to our outline assembly are readonly.
+extern "fastcall" {
+ fn rustix_syscall0_nr_last_fastcall(nr: SyscallNumber<'_>) -> RetReg<R0>;
+ fn rustix_syscall1_nr_last_fastcall(a0: ArgReg<'_, A0>, nr: SyscallNumber<'_>) -> RetReg<R0>;
+ fn rustix_syscall1_noreturn_nr_last_fastcall(a0: ArgReg<'_, A0>, nr: SyscallNumber<'_>) -> !;
+ fn rustix_syscall2_nr_last_fastcall(
+ a1: ArgReg<'_, A1>,
+ a0: ArgReg<'_, A0>,
+ nr: SyscallNumber<'_>,
+ ) -> RetReg<R0>;
+ fn rustix_syscall3_nr_last_fastcall(
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a0: ArgReg<'_, A0>,
+ nr: SyscallNumber<'_>,
+ ) -> RetReg<R0>;
+ fn rustix_syscall4_nr_last_fastcall(
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a0: ArgReg<'_, A0>,
+ a3: ArgReg<'_, A3>,
+ nr: SyscallNumber<'_>,
+ ) -> RetReg<R0>;
+ fn rustix_syscall5_nr_last_fastcall(
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a0: ArgReg<'_, A0>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+ nr: SyscallNumber<'_>,
+ ) -> RetReg<R0>;
+ fn rustix_syscall6_nr_last_fastcall(
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a0: ArgReg<'_, A0>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+ a5: ArgReg<'_, A5>,
+ nr: SyscallNumber<'_>,
+ ) -> RetReg<R0>;
+}
+
+// Then we define inline wrapper functions that do the reordering.
+
+#[inline]
+pub(in crate::imp) unsafe fn syscall0(nr: SyscallNumber<'_>) -> RetReg<R0> {
+ rustix_syscall0_nr_last_fastcall(nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall1(nr: SyscallNumber<'_>, a0: ArgReg<'_, A0>) -> RetReg<R0> {
+ rustix_syscall1_nr_last_fastcall(a0, nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall1_noreturn(nr: SyscallNumber<'_>, a0: ArgReg<'_, A0>) -> ! {
+ rustix_syscall1_noreturn_nr_last_fastcall(a0, nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall2(
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+) -> RetReg<R0> {
+ rustix_syscall2_nr_last_fastcall(a1, a0, nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall3(
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+) -> RetReg<R0> {
+ rustix_syscall3_nr_last_fastcall(a1, a2, a0, nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall4(
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+) -> RetReg<R0> {
+ rustix_syscall4_nr_last_fastcall(a1, a2, a0, a3, nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall5(
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+) -> RetReg<R0> {
+ rustix_syscall5_nr_last_fastcall(a1, a2, a0, a3, a4, nr)
+}
+#[inline]
+pub(in crate::imp) unsafe fn syscall6(
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+ a5: ArgReg<'_, A5>,
+) -> RetReg<R0> {
+ rustix_syscall6_nr_last_fastcall(a1, a2, a0, a3, a4, a5, nr)
+}
+
+// Then we define the `_readonly` versions of the wrappers. We don't have
+// separate `_readonly` implementations, so these can just be aliases to
+// their non-`_readonly` counterparts.
+pub(in crate::imp) use {
+ syscall0 as syscall0_readonly, syscall1 as syscall1_readonly, syscall2 as syscall2_readonly,
+ syscall3 as syscall3_readonly, syscall4 as syscall4_readonly, syscall5 as syscall5_readonly,
+ syscall6 as syscall6_readonly,
+};
+
+// x86 prefers to route all syscalls through the vDSO, though this isn't
+// always possible, so it also has a special form for doing the dispatch.
+//
+// First we declare the actual assembly routines with `*_nr_last_fastcall`
+// names and reordered arguments. If the signatures or calling conventions are
+// ever changed, the symbol names should also be updated accordingly, to avoid
+// collisions with other versions of this crate.
+extern "fastcall" {
+ fn rustix_indirect_syscall0_nr_last_fastcall(
+ nr: SyscallNumber<'_>,
+ callee: SyscallType,
+ ) -> RetReg<R0>;
+ fn rustix_indirect_syscall1_nr_last_fastcall(
+ a0: ArgReg<'_, A0>,
+ nr: SyscallNumber<'_>,
+ callee: SyscallType,
+ ) -> RetReg<R0>;
+ fn rustix_indirect_syscall1_noreturn_nr_last_fastcall(
+ a0: ArgReg<'_, A0>,
+ nr: SyscallNumber<'_>,
+ callee: SyscallType,
+ ) -> !;
+ fn rustix_indirect_syscall2_nr_last_fastcall(
+ a1: ArgReg<'_, A1>,
+ a0: ArgReg<'_, A0>,
+ nr: SyscallNumber<'_>,
+ callee: SyscallType,
+ ) -> RetReg<R0>;
+ fn rustix_indirect_syscall3_nr_last_fastcall(
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a0: ArgReg<'_, A0>,
+ nr: SyscallNumber<'_>,
+ callee: SyscallType,
+ ) -> RetReg<R0>;
+ fn rustix_indirect_syscall4_nr_last_fastcall(
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a0: ArgReg<'_, A0>,
+ a3: ArgReg<'_, A3>,
+ nr: SyscallNumber<'_>,
+ callee: SyscallType,
+ ) -> RetReg<R0>;
+ fn rustix_indirect_syscall5_nr_last_fastcall(
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a0: ArgReg<'_, A0>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+ nr: SyscallNumber<'_>,
+ callee: SyscallType,
+ ) -> RetReg<R0>;
+ fn rustix_indirect_syscall6_nr_last_fastcall(
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a0: ArgReg<'_, A0>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+ a5: ArgReg<'_, A5>,
+ nr: SyscallNumber<'_>,
+ callee: SyscallType,
+ ) -> RetReg<R0>;
+}
+
+// Then we define inline wrapper functions that do the reordering.
+
+#[inline]
+pub(in crate::imp) unsafe fn indirect_syscall0(
+ callee: SyscallType,
+ nr: SyscallNumber<'_>,
+) -> RetReg<R0> {
+ rustix_indirect_syscall0_nr_last_fastcall(nr, callee)
+}
+#[inline]
+pub(in crate::imp) unsafe fn indirect_syscall1(
+ callee: SyscallType,
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+) -> RetReg<R0> {
+ rustix_indirect_syscall1_nr_last_fastcall(a0, nr, callee)
+}
+#[inline]
+pub(in crate::imp) unsafe fn indirect_syscall1_noreturn(
+ callee: SyscallType,
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+) -> ! {
+ rustix_indirect_syscall1_noreturn_nr_last_fastcall(a0, nr, callee)
+}
+#[inline]
+pub(in crate::imp) unsafe fn indirect_syscall2(
+ callee: SyscallType,
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+) -> RetReg<R0> {
+ rustix_indirect_syscall2_nr_last_fastcall(a1, a0, nr, callee)
+}
+#[inline]
+pub(in crate::imp) unsafe fn indirect_syscall3(
+ callee: SyscallType,
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+) -> RetReg<R0> {
+ rustix_indirect_syscall3_nr_last_fastcall(a1, a2, a0, nr, callee)
+}
+#[inline]
+pub(in crate::imp) unsafe fn indirect_syscall4(
+ callee: SyscallType,
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+) -> RetReg<R0> {
+ rustix_indirect_syscall4_nr_last_fastcall(a1, a2, a0, a3, nr, callee)
+}
+#[inline]
+pub(in crate::imp) unsafe fn indirect_syscall5(
+ callee: SyscallType,
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+) -> RetReg<R0> {
+ rustix_indirect_syscall5_nr_last_fastcall(a1, a2, a0, a3, a4, nr, callee)
+}
+#[inline]
+pub(in crate::imp) unsafe fn indirect_syscall6(
+ callee: SyscallType,
+ nr: SyscallNumber<'_>,
+ a0: ArgReg<'_, A0>,
+ a1: ArgReg<'_, A1>,
+ a2: ArgReg<'_, A2>,
+ a3: ArgReg<'_, A3>,
+ a4: ArgReg<'_, A4>,
+ a5: ArgReg<'_, A5>,
+) -> RetReg<R0> {
+ rustix_indirect_syscall6_nr_last_fastcall(a1, a2, a0, a3, a4, a5, nr, callee)
+}
diff --git a/vendor/rustix/src/imp/linux_raw/arch/outline/x86.s b/vendor/rustix/src/imp/linux_raw/arch/outline/x86.s
new file mode 100644
index 000000000..bda234e1a
--- /dev/null
+++ b/vendor/rustix/src/imp/linux_raw/arch/outline/x86.s
@@ -0,0 +1,381 @@
+// Assembly code for making x86 syscalls.
+//
+// On x86 we use the "fastcall" convention which passes the first two
+// arguments in ecx and edx. Outline.rs reorders the arguments to put
+// a1 and a2 in those registers so they we don't have to move them to
+// set up the kernel convention.
+//
+// "fastcall" expects callee to pop argument stack space, so we use
+// `ret imm` instructions to clean up the stack. We don't need callee
+// cleanup per se, it just comes along with using "fastcall".
+
+ .file "x86.s"
+ .intel_syntax noprefix
+
+ .section .text.rustix_indirect_syscall0_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_indirect_syscall0_nr_last_fastcall
+ .hidden rustix_indirect_syscall0_nr_last_fastcall
+ .type rustix_indirect_syscall0_nr_last_fastcall, @function
+rustix_indirect_syscall0_nr_last_fastcall:
+ .cfi_startproc
+ mov eax,ecx
+ call edx
+ ret
+ .cfi_endproc
+ .size rustix_indirect_syscall0_nr_last_fastcall, .-rustix_indirect_syscall0_nr_last_fastcall
+
+ .section .text.rustix_indirect_syscall1_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_indirect_syscall1_nr_last_fastcall
+ .hidden rustix_indirect_syscall1_nr_last_fastcall
+ .type rustix_indirect_syscall1_nr_last_fastcall, @function
+rustix_indirect_syscall1_nr_last_fastcall:
+ .cfi_startproc
+ push ebx
+ .cfi_def_cfa_offset 8
+ .cfi_offset ebx, -8
+ mov ebx,ecx
+ mov eax,edx
+ call DWORD PTR [esp+0x8]
+ pop ebx
+ .cfi_def_cfa_offset 4
+ ret 0x4
+ .cfi_endproc
+ .size rustix_indirect_syscall1_nr_last_fastcall, .-rustix_indirect_syscall1_nr_last_fastcall
+
+ .section .text.rustix_indirect_syscall1_noreturn_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_indirect_syscall1_noreturn_nr_last_fastcall
+ .hidden rustix_indirect_syscall1_noreturn_nr_last_fastcall
+ .type rustix_indirect_syscall1_noreturn_nr_last_fastcall, @function
+rustix_indirect_syscall1_noreturn_nr_last_fastcall:
+ .cfi_startproc
+ mov ebx,ecx
+ mov eax,edx
+ call DWORD PTR [esp+0x4]
+ ud2
+ .cfi_endproc
+ .size rustix_indirect_syscall1_noreturn_nr_last_fastcall, .-rustix_indirect_syscall1_noreturn_nr_last_fastcall
+
+ .section .text.rustix_indirect_syscall2_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_indirect_syscall2_nr_last_fastcall
+ .hidden rustix_indirect_syscall2_nr_last_fastcall
+ .type rustix_indirect_syscall2_nr_last_fastcall, @function
+rustix_indirect_syscall2_nr_last_fastcall:
+ .cfi_startproc
+ push ebx
+ .cfi_def_cfa_offset 8
+ .cfi_offset ebx, -8
+ mov ebx,edx
+ mov eax,DWORD PTR [esp+0x8]
+ call DWORD PTR [esp+0xc]
+ pop ebx
+ .cfi_def_cfa_offset 4
+ ret 0x8
+ .cfi_endproc
+ .size rustix_indirect_syscall2_nr_last_fastcall, .-rustix_indirect_syscall2_nr_last_fastcall
+
+ .section .text.rustix_indirect_syscall3_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_indirect_syscall3_nr_last_fastcall
+ .hidden rustix_indirect_syscall3_nr_last_fastcall
+ .type rustix_indirect_syscall3_nr_last_fastcall, @function
+rustix_indirect_syscall3_nr_last_fastcall:
+ .cfi_startproc
+ push ebx
+ .cfi_def_cfa_offset 8
+ .cfi_offset ebx, -8
+ mov ebx,DWORD PTR [esp+0x8]
+ mov eax,DWORD PTR [esp+0xc]
+ call DWORD PTR [esp+0x10]
+ pop ebx
+ .cfi_def_cfa_offset 4
+ ret 0xc
+ .cfi_endproc
+ .size rustix_indirect_syscall3_nr_last_fastcall, .-rustix_indirect_syscall3_nr_last_fastcall
+
+ .section .text.rustix_indirect_syscall4_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_indirect_syscall4_nr_last_fastcall
+ .hidden rustix_indirect_syscall4_nr_last_fastcall
+ .type rustix_indirect_syscall4_nr_last_fastcall, @function
+rustix_indirect_syscall4_nr_last_fastcall:
+ .cfi_startproc
+ push ebx
+ .cfi_def_cfa_offset 8
+ push esi
+ .cfi_def_cfa_offset 12
+ .cfi_offset esi, -12
+ .cfi_offset ebx, -8
+ mov ebx,DWORD PTR [esp+0xc]
+ mov esi,DWORD PTR [esp+0x10]
+ mov eax,DWORD PTR [esp+0x14]
+ call DWORD PTR [esp+0x18]
+ pop esi
+ .cfi_def_cfa_offset 8
+ pop ebx
+ .cfi_def_cfa_offset 4
+ ret 0x10
+ .cfi_endproc
+ .size rustix_indirect_syscall4_nr_last_fastcall, .-rustix_indirect_syscall4_nr_last_fastcall
+
+ .section .text.rustix_indirect_syscall5_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_indirect_syscall5_nr_last_fastcall
+ .hidden rustix_indirect_syscall5_nr_last_fastcall
+ .type rustix_indirect_syscall5_nr_last_fastcall, @function
+rustix_indirect_syscall5_nr_last_fastcall:
+ .cfi_startproc
+ push ebx
+ .cfi_def_cfa_offset 8
+ push esi
+ .cfi_def_cfa_offset 12
+ push edi
+ .cfi_def_cfa_offset 16
+ .cfi_offset edi, -16
+ .cfi_offset esi, -12
+ .cfi_offset ebx, -8
+ mov ebx,DWORD PTR [esp+0x10]
+ mov esi,DWORD PTR [esp+0x14]
+ mov edi,DWORD PTR [esp+0x18]
+ mov eax,DWORD PTR [esp+0x1c]
+ call DWORD PTR [esp+0x20]
+ pop edi
+ .cfi_def_cfa_offset 12
+ pop esi
+ .cfi_def_cfa_offset 8
+ pop ebx
+ .cfi_def_cfa_offset 4
+ ret 0x14
+ .cfi_endproc
+ .size rustix_indirect_syscall5_nr_last_fastcall, .-rustix_indirect_syscall5_nr_last_fastcall
+
+ .section .text.rustix_indirect_syscall6_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_indirect_syscall6_nr_last_fastcall
+ .hidden rustix_indirect_syscall6_nr_last_fastcall
+ .type rustix_indirect_syscall6_nr_last_fastcall, @function
+rustix_indirect_syscall6_nr_last_fastcall:
+ .cfi_startproc
+ push ebx
+ .cfi_def_cfa_offset 8
+ push esi
+ .cfi_def_cfa_offset 12
+ push edi
+ .cfi_def_cfa_offset 16
+ push ebp
+ .cfi_def_cfa_offset 20
+ .cfi_offset ebp, -20
+ .cfi_offset edi, -16
+ .cfi_offset esi, -12
+ .cfi_offset ebx, -8
+ mov ebx,DWORD PTR [esp+0x14]
+ mov esi,DWORD PTR [esp+0x18]
+ mov edi,DWORD PTR [esp+0x1c]
+ mov ebp,DWORD PTR [esp+0x20]
+ mov eax,DWORD PTR [esp+0x24]
+ call DWORD PTR [esp+0x28]
+ pop ebp
+ .cfi_def_cfa_offset 16
+ pop edi
+ .cfi_def_cfa_offset 12
+ pop esi
+ .cfi_def_cfa_offset 8
+ pop ebx
+ .cfi_def_cfa_offset 4
+ ret 0x18
+ .cfi_endproc
+ .size rustix_indirect_syscall6_nr_last_fastcall, .-rustix_indirect_syscall6_nr_last_fastcall
+
+ .section .text.rustix_syscall0_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall0_nr_last_fastcall
+ .hidden rustix_syscall0_nr_last_fastcall
+ .type rustix_syscall0_nr_last_fastcall, @function
+rustix_syscall0_nr_last_fastcall:
+ .cfi_startproc
+ mov eax,ecx
+ int 0x80
+ ret
+ .cfi_endproc
+ .size rustix_syscall0_nr_last_fastcall, .-rustix_syscall0_nr_last_fastcall
+
+ .section .text.rustix_syscall1_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall1_nr_last_fastcall
+ .hidden rustix_syscall1_nr_last_fastcall
+ .type rustix_syscall1_nr_last_fastcall, @function
+rustix_syscall1_nr_last_fastcall:
+ .cfi_startproc
+ push ebx
+ .cfi_def_cfa_offset 8
+ .cfi_offset ebx, -8
+ mov eax,edx
+ mov ebx,ecx
+ int 0x80
+ pop ebx
+ .cfi_def_cfa_offset 4
+ ret
+ .cfi_endproc
+ .size rustix_syscall1_nr_last_fastcall, .-rustix_syscall1_nr_last_fastcall
+
+ .section .text.rustix_syscall1_noreturn_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall1_noreturn_nr_last_fastcall
+ .hidden rustix_syscall1_noreturn_nr_last_fastcall
+ .type rustix_syscall1_noreturn_nr_last_fastcall, @function
+rustix_syscall1_noreturn_nr_last_fastcall:
+ .cfi_startproc
+ mov eax,edx
+ mov ebx,ecx
+ int 0x80
+ ud2
+ .cfi_endproc
+ .size rustix_syscall1_noreturn_nr_last_fastcall, .-rustix_syscall1_noreturn_nr_last_fastcall
+
+ .section .text.rustix_syscall2_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall2_nr_last_fastcall
+ .hidden rustix_syscall2_nr_last_fastcall
+ .type rustix_syscall2_nr_last_fastcall, @function
+rustix_syscall2_nr_last_fastcall:
+ .cfi_startproc
+ push ebx
+ .cfi_def_cfa_offset 8
+ .cfi_offset ebx, -8
+ mov ebx,edx
+ mov eax,DWORD PTR [esp+0x8]
+ int 0x80
+ pop ebx
+ .cfi_def_cfa_offset 4
+ ret 0x4
+ .cfi_endproc
+ .size rustix_syscall2_nr_last_fastcall, .-rustix_syscall2_nr_last_fastcall
+
+ .section .text.rustix_syscall3_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall3_nr_last_fastcall
+ .hidden rustix_syscall3_nr_last_fastcall
+ .type rustix_syscall3_nr_last_fastcall, @function
+rustix_syscall3_nr_last_fastcall:
+ .cfi_startproc
+ push ebx
+ .cfi_def_cfa_offset 8
+ .cfi_offset ebx, -8
+ mov ebx,DWORD PTR [esp+0x8]
+ mov eax,DWORD PTR [esp+0xc]
+ int 0x80
+ pop ebx
+ .cfi_def_cfa_offset 4
+ ret 0x8
+ .cfi_endproc
+ .size rustix_syscall3_nr_last_fastcall, .-rustix_syscall3_nr_last_fastcall
+
+ .section .text.rustix_syscall4_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall4_nr_last_fastcall
+ .hidden rustix_syscall4_nr_last_fastcall
+ .type rustix_syscall4_nr_last_fastcall, @function
+rustix_syscall4_nr_last_fastcall:
+ .cfi_startproc
+ push ebx
+ .cfi_def_cfa_offset 8
+ push esi
+ .cfi_def_cfa_offset 12
+ .cfi_offset esi, -12
+ .cfi_offset ebx, -8
+ mov ebx,DWORD PTR [esp+0xc]
+ mov esi,DWORD PTR [esp+0x10]
+ mov eax,DWORD PTR [esp+0x14]
+ int 0x80
+ pop esi
+ .cfi_def_cfa_offset 8
+ pop ebx
+ .cfi_def_cfa_offset 4
+ ret 0xc
+ .cfi_endproc
+ .size rustix_syscall4_nr_last_fastcall, .-rustix_syscall4_nr_last_fastcall
+
+ .section .text.rustix_syscall5_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall5_nr_last_fastcall
+ .hidden rustix_syscall5_nr_last_fastcall
+ .type rustix_syscall5_nr_last_fastcall, @function
+rustix_syscall5_nr_last_fastcall:
+ .cfi_startproc
+ push ebx
+ .cfi_def_cfa_offset 8
+ push edi
+ .cfi_def_cfa_offset 12
+ push esi
+ .cfi_def_cfa_offset 16
+ .cfi_offset esi, -16
+ .cfi_offset edi, -12
+ .cfi_offset ebx, -8
+ mov ebx,DWORD PTR [esp+0x10]
+ mov esi,DWORD PTR [esp+0x14]
+ mov edi,DWORD PTR [esp+0x18]
+ mov eax,DWORD PTR [esp+0x1c]
+ int 0x80
+ pop esi
+ .cfi_def_cfa_offset 12
+ pop edi
+ .cfi_def_cfa_offset 8
+ pop ebx
+ .cfi_def_cfa_offset 4
+ ret 0x10
+ .cfi_endproc
+ .size rustix_syscall5_nr_last_fastcall, .-rustix_syscall5_nr_last_fastcall
+
+ .section .text.rustix_syscall6_nr_last_fastcall,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall6_nr_last_fastcall
+ .hidden rustix_syscall6_nr_last_fastcall
+ .type rustix_syscall6_nr_last_fastcall, @function
+rustix_syscall6_nr_last_fastcall:
+ .cfi_startproc
+ push ebp
+ .cfi_def_cfa_offset 8
+ push ebx
+ .cfi_def_cfa_offset 12
+ push edi
+ .cfi_def_cfa_offset 16
+ push esi
+ .cfi_def_cfa_offset 20
+ .cfi_offset esi, -20
+ .cfi_offset edi, -16
+ .cfi_offset ebx, -12
+ .cfi_offset ebp, -8
+ mov ebx,DWORD PTR [esp+0x14]
+ mov esi,DWORD PTR [esp+0x18]
+ mov edi,DWORD PTR [esp+0x1c]
+ mov ebp,DWORD PTR [esp+0x20]
+ mov eax,DWORD PTR [esp+0x24]
+ int 0x80
+ pop esi
+ .cfi_def_cfa_offset 16
+ pop edi
+ .cfi_def_cfa_offset 12
+ pop ebx
+ .cfi_def_cfa_offset 8
+ pop ebp
+ .cfi_def_cfa_offset 4
+ ret 0x14
+ .cfi_endproc
+ .size rustix_syscall6_nr_last_fastcall, .-rustix_syscall6_nr_last_fastcall
+
+ .section .text.rustix_int_0x80,"ax",@progbits
+ .p2align 4
+ .weak rustix_int_0x80
+ .hidden rustix_int_0x80
+ .type rustix_int_0x80, @function
+rustix_int_0x80:
+ .cfi_startproc
+ int 0x80
+ ret
+ .cfi_endproc
+ .size rustix_int_0x80, .-rustix_int_0x80
+
+ .section .note.GNU-stack,"",@progbits
diff --git a/vendor/rustix/src/imp/linux_raw/arch/outline/x86_64.s b/vendor/rustix/src/imp/linux_raw/arch/outline/x86_64.s
new file mode 100644
index 000000000..2beda323b
--- /dev/null
+++ b/vendor/rustix/src/imp/linux_raw/arch/outline/x86_64.s
@@ -0,0 +1,122 @@
+// Assembly code for making x86-64 syscalls.
+//
+// x86-64 syscall argument register ordering is the same as the x86-64
+// userspace argument register ordering except that a3 is passed in r10
+// instead of rcx, and the syscall number (nr) is passed in eax.
+//
+// outline.rs takes care of reordering the nr argument to the end for us,
+// so we only need to move nr into eax and move rcx into r10 as needed.
+//
+// x32 is not yet supported.
+
+ .file "x86_64.s"
+ .intel_syntax noprefix
+
+ .section .text.rustix_syscall0_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall0_nr_last
+ .hidden rustix_syscall0_nr_last
+ .type rustix_syscall0_nr_last, @function
+rustix_syscall0_nr_last:
+ .cfi_startproc
+ mov eax,edi
+ syscall
+ ret
+ .cfi_endproc
+ .size rustix_syscall0_nr_last, .-rustix_syscall0_nr_last
+
+ .section .text.rustix_syscall1_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall1_nr_last
+ .hidden rustix_syscall1_nr_last
+ .type rustix_syscall1_nr_last, @function
+rustix_syscall1_nr_last:
+ .cfi_startproc
+ mov eax,esi
+ syscall
+ ret
+ .cfi_endproc
+ .size rustix_syscall1_nr_last, .-rustix_syscall1_nr_last
+
+ .section .text.rustix_syscall1_noreturn_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall1_noreturn_nr_last
+ .hidden rustix_syscall1_noreturn_nr_last
+ .type rustix_syscall1_noreturn_nr_last, @function
+rustix_syscall1_noreturn_nr_last:
+ .cfi_startproc
+ mov eax,esi
+ syscall
+ ud2
+ .cfi_endproc
+ .size rustix_syscall1_noreturn_nr_last, .-rustix_syscall1_noreturn_nr_last
+
+ .section .text.rustix_syscall2_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall2_nr_last
+ .hidden rustix_syscall2_nr_last
+ .type rustix_syscall2_nr_last, @function
+rustix_syscall2_nr_last:
+ .cfi_startproc
+ mov eax,edx
+ syscall
+ ret
+ .cfi_endproc
+ .size rustix_syscall2_nr_last, .-rustix_syscall2_nr_last
+
+ .section .text.rustix_syscall3_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall3_nr_last
+ .hidden rustix_syscall3_nr_last
+ .type rustix_syscall3_nr_last, @function
+rustix_syscall3_nr_last:
+ .cfi_startproc
+ mov eax,ecx
+ syscall
+ ret
+ .cfi_endproc
+ .size rustix_syscall3_nr_last, .-rustix_syscall3_nr_last
+
+ .section .text.rustix_syscall4_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall4_nr_last
+ .hidden rustix_syscall4_nr_last
+ .type rustix_syscall4_nr_last, @function
+rustix_syscall4_nr_last:
+ .cfi_startproc
+ mov eax,r8d
+ mov r10,rcx
+ syscall
+ ret
+ .cfi_endproc
+ .size rustix_syscall4_nr_last, .-rustix_syscall4_nr_last
+
+ .section .text.rustix_syscall5_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall5_nr_last
+ .hidden rustix_syscall5_nr_last
+ .type rustix_syscall5_nr_last, @function
+rustix_syscall5_nr_last:
+ .cfi_startproc
+ mov eax,r9d
+ mov r10,rcx
+ syscall
+ ret
+ .cfi_endproc
+ .size rustix_syscall5_nr_last, .-rustix_syscall5_nr_last
+
+ .section .text.rustix_syscall6_nr_last,"ax",@progbits
+ .p2align 4
+ .weak rustix_syscall6_nr_last
+ .hidden rustix_syscall6_nr_last
+ .type rustix_syscall6_nr_last, @function
+rustix_syscall6_nr_last:
+ .cfi_startproc
+ mov eax,DWORD PTR [rsp+0x8]
+ mov r10,rcx
+ syscall
+ ret
+ .cfi_endproc
+ .size rustix_syscall6_nr_last, .-rustix_syscall6_nr_last
+
+ .section .note.GNU-stack,"",@progbits