summaryrefslogtreecommitdiffstats
path: root/libc-top-half/musl/arch/mips/syscall_arch.h
blob: 5b7c38de2043f6250db3cb691aa1782a71e3d273 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
#define __SYSCALL_LL_E(x) \
((union { long long ll; long l[2]; }){ .ll = x }).l[0], \
((union { long long ll; long l[2]; }){ .ll = x }).l[1]
#define __SYSCALL_LL_O(x) 0, __SYSCALL_LL_E((x))

#define SYSCALL_RLIM_INFINITY (-1UL/2)

#if __mips_isa_rev >= 6
#define SYSCALL_CLOBBERLIST \
	"$1", "$3", "$11", "$12", "$13", \
	"$14", "$15", "$24", "$25", "memory"
#else
#define SYSCALL_CLOBBERLIST \
	"$1", "$3", "$11", "$12", "$13", \
	"$14", "$15", "$24", "$25", "hi", "lo", "memory"
#endif

static inline long __syscall0(long n)
{
	register long r7 __asm__("$7");
	register long r2 __asm__("$2");
	__asm__ __volatile__ (
		"addu $2,$0,%2 ; syscall"
		: "=&r"(r2), "=r"(r7)
		: "ir"(n), "0"(r2)
		: SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
	return r7 && r2>0 ? -r2 : r2;
}

static inline long __syscall1(long n, long a)
{
	register long r4 __asm__("$4") = a;
	register long r7 __asm__("$7");
	register long r2 __asm__("$2");
	__asm__ __volatile__ (
		"addu $2,$0,%2 ; syscall"
		: "=&r"(r2), "=r"(r7)
		: "ir"(n), "0"(r2), "r"(r4)
		: SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
	return r7 && r2>0 ? -r2 : r2;
}

static inline long __syscall2(long n, long a, long b)
{
	register long r4 __asm__("$4") = a;
	register long r5 __asm__("$5") = b;
	register long r7 __asm__("$7");
	register long r2 __asm__("$2");
	__asm__ __volatile__ (
		"addu $2,$0,%2 ; syscall"
		: "=&r"(r2), "=r"(r7)
		: "ir"(n), "0"(r2), "r"(r4), "r"(r5)
		: SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
	return r7 && r2>0 ? -r2 : r2;
}

static inline long __syscall3(long n, long a, long b, long c)
{
	register long r4 __asm__("$4") = a;
	register long r5 __asm__("$5") = b;
	register long r6 __asm__("$6") = c;
	register long r7 __asm__("$7");
	register long r2 __asm__("$2");
	__asm__ __volatile__ (
		"addu $2,$0,%2 ; syscall"
		: "=&r"(r2), "=r"(r7)
		: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
		: SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
	return r7 && r2>0 ? -r2 : r2;
}

static inline long __syscall4(long n, long a, long b, long c, long d)
{
	register long r4 __asm__("$4") = a;
	register long r5 __asm__("$5") = b;
	register long r6 __asm__("$6") = c;
	register long r7 __asm__("$7") = d;
	register long r2 __asm__("$2");
	__asm__ __volatile__ (
		"addu $2,$0,%2 ; syscall"
		: "=&r"(r2), "+r"(r7)
		: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
		: SYSCALL_CLOBBERLIST, "$8", "$9", "$10");
	return r7 && r2>0 ? -r2 : r2;
}

static inline long __syscall5(long n, long a, long b, long c, long d, long e)
{
	register long r4 __asm__("$4") = a;
	register long r5 __asm__("$5") = b;
	register long r6 __asm__("$6") = c;
	register long r7 __asm__("$7") = d;
	register long r8 __asm__("$8") = e;
	register long r2 __asm__("$2");
	__asm__ __volatile__ (
		"subu $sp,$sp,32 ; sw $8,16($sp) ; "
		"addu $2,$0,%3 ; syscall ;"
		"addu $sp,$sp,32"
		: "=&r"(r2), "+r"(r7), "+r"(r8)
		: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
		: SYSCALL_CLOBBERLIST, "$9", "$10");
	return r7 && r2>0 ? -r2 : r2;
}

static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f)
{
	register long r4 __asm__("$4") = a;
	register long r5 __asm__("$5") = b;
	register long r6 __asm__("$6") = c;
	register long r7 __asm__("$7") = d;
	register long r8 __asm__("$8") = e;
	register long r9 __asm__("$9") = f;
	register long r2 __asm__("$2");
	__asm__ __volatile__ (
		"subu $sp,$sp,32 ; sw $8,16($sp) ; sw $9,20($sp) ; "
		"addu $2,$0,%4 ; syscall ;"
		"addu $sp,$sp,32"
		: "=&r"(r2), "+r"(r7), "+r"(r8), "+r"(r9)
		: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
		: SYSCALL_CLOBBERLIST, "$10");
	return r7 && r2>0 ? -r2 : r2;
}

static inline long __syscall7(long n, long a, long b, long c, long d, long e, long f, long g)
{
	register long r4 __asm__("$4") = a;
	register long r5 __asm__("$5") = b;
	register long r6 __asm__("$6") = c;
	register long r7 __asm__("$7") = d;
	register long r8 __asm__("$8") = e;
	register long r9 __asm__("$9") = f;
	register long r10 __asm__("$10") = g;
	register long r2 __asm__("$2");
	__asm__ __volatile__ (
		"subu $sp,$sp,32 ; sw $8,16($sp) ; sw $9,20($sp) ; sw $10,24($sp) ; "
		"addu $2,$0,%5 ; syscall ;"
		"addu $sp,$sp,32"
		: "=&r"(r2), "+r"(r7), "+r"(r8), "+r"(r9), "+r"(r10)
		: "ir"(n), "0"(r2), "r"(r4), "r"(r5), "r"(r6)
		: SYSCALL_CLOBBERLIST);
	return r7 && r2>0 ? -r2 : r2;
}

#define VDSO_USEFUL
#define VDSO_CGT32_SYM "__vdso_clock_gettime"
#define VDSO_CGT32_VER "LINUX_2.6"
#define VDSO_CGT_SYM "__vdso_clock_gettime64"
#define VDSO_CGT_VER "LINUX_2.6"

#define SO_SNDTIMEO_OLD 0x1005
#define SO_RCVTIMEO_OLD 0x1006

#undef SYS_socketcall