summaryrefslogtreecommitdiffstats
path: root/src/VBox/VMM/VMMR0/VMMR0JmpA-amd64.asm
blob: 8735dfa68994389f46d717ee470090f1ab661e6e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
; $Id: VMMR0JmpA-amd64.asm $
;; @file
; VMM - R0 SetJmp / LongJmp routines for AMD64.
;

;
; Copyright (C) 2006-2019 Oracle Corporation
;
; This file is part of VirtualBox Open Source Edition (OSE), as
; available from http://www.virtualbox.org. This file is free software;
; you can redistribute it and/or modify it under the terms of the GNU
; General Public License (GPL) as published by the Free Software
; Foundation, in version 2 as it comes in the "COPYING" file of the
; VirtualBox OSE distribution. VirtualBox OSE is distributed in the
; hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
;

;*******************************************************************************
;* Header Files                                                                *
;*******************************************************************************
%define RT_ASM_WITH_SEH64
%include "VBox/asmdefs.mac"
%include "VMMInternal.mac"
%include "VBox/err.mac"
%include "VBox/param.mac"


;*******************************************************************************
;*  Defined Constants And Macros                                               *
;*******************************************************************************
%define RESUME_MAGIC    07eadf00dh
%define STACK_PADDING   0eeeeeeeeeeeeeeeeh

;; Workaround for linux 4.6 fast/slow syscall stack depth difference.
%ifdef VMM_R0_SWITCH_STACK
 %define STACK_FUZZ_SIZE 0
%else
 %define STACK_FUZZ_SIZE 128
%endif


BEGINCODE


;;
; The setjmp variant used for calling Ring-3.
;
; This differs from the normal setjmp in that it will resume VMMRZCallRing3 if we're
; in the middle of a ring-3 call. Another differences is the function pointer and
; argument. This has to do with resuming code and the stack frame of the caller.
;
; @returns  VINF_SUCCESS on success or whatever is passed to vmmR0CallRing3LongJmp.
; @param    pJmpBuf msc:rcx gcc:rdi x86:[esp+0x04]     Our jmp_buf.
; @param    pfn     msc:rdx gcc:rsi x86:[esp+0x08]     The function to be called when not resuming.
; @param    pvUser1 msc:r8  gcc:rdx x86:[esp+0x0c]     The argument of that function.
; @param    pvUser2 msc:r9  gcc:rcx x86:[esp+0x10]     The argument of that function.
;
BEGINPROC vmmR0CallRing3SetJmp
GLOBALNAME vmmR0CallRing3SetJmp2
GLOBALNAME vmmR0CallRing3SetJmpEx
    ;
    ; Save the registers.
    ;
    push    rbp
    SEH64_PUSH_xBP
    mov     rbp, rsp
    SEH64_SET_FRAME_xBP 0
 %ifdef ASM_CALL64_MSC
    sub     rsp, 30h + STACK_FUZZ_SIZE  ; (10h is used by resume (??), 20h for callee spill area)
    SEH64_ALLOCATE_STACK 30h + STACK_FUZZ_SIZE
SEH64_END_PROLOGUE
    mov     r11, rdx                    ; pfn
    mov     rdx, rcx                    ; pJmpBuf;
 %else
    sub     rsp, 10h + STACK_FUZZ_SIZE  ; (10h is used by resume (??))
    SEH64_ALLOCATE_STACK 10h + STACK_FUZZ_SIZE
SEH64_END_PROLOGUE
    mov     r8, rdx                     ; pvUser1 (save it like MSC)
    mov     r9, rcx                     ; pvUser2 (save it like MSC)
    mov     r11, rsi                    ; pfn
    mov     rdx, rdi                    ; pJmpBuf
 %endif
    mov     [xDX + VMMR0JMPBUF.rbx], rbx
 %ifdef ASM_CALL64_MSC
    mov     [xDX + VMMR0JMPBUF.rsi], rsi
    mov     [xDX + VMMR0JMPBUF.rdi], rdi
 %endif
    mov     [xDX + VMMR0JMPBUF.rbp], rbp
    mov     [xDX + VMMR0JMPBUF.r12], r12
    mov     [xDX + VMMR0JMPBUF.r13], r13
    mov     [xDX + VMMR0JMPBUF.r14], r14
    mov     [xDX + VMMR0JMPBUF.r15], r15
    mov     xAX, [rbp + 8]              ; (not really necessary, except for validity check)
    mov     [xDX + VMMR0JMPBUF.rip], xAX
 %ifdef ASM_CALL64_MSC
    lea     r10, [rsp + 20h]            ; must save the spill area
 %else
    lea     r10, [rsp]
 %endif
    mov     [xDX + VMMR0JMPBUF.rsp], r10
 %ifdef RT_OS_WINDOWS
    movdqa  [xDX + VMMR0JMPBUF.xmm6], xmm6
    movdqa  [xDX + VMMR0JMPBUF.xmm7], xmm7
    movdqa  [xDX + VMMR0JMPBUF.xmm8], xmm8
    movdqa  [xDX + VMMR0JMPBUF.xmm9], xmm9
    movdqa  [xDX + VMMR0JMPBUF.xmm10], xmm10
    movdqa  [xDX + VMMR0JMPBUF.xmm11], xmm11
    movdqa  [xDX + VMMR0JMPBUF.xmm12], xmm12
    movdqa  [xDX + VMMR0JMPBUF.xmm13], xmm13
    movdqa  [xDX + VMMR0JMPBUF.xmm14], xmm14
    movdqa  [xDX + VMMR0JMPBUF.xmm15], xmm15
 %endif
    pushf
    pop     xAX
    mov     [xDX + VMMR0JMPBUF.rflags], xAX

    ;
    ; If we're not in a ring-3 call, call pfn and return.
    ;
    test    byte [xDX + VMMR0JMPBUF.fInRing3Call], 1
    jnz     .resume

 %ifdef VMM_R0_SWITCH_STACK
    mov     r15, [xDX + VMMR0JMPBUF.pvSavedStack]
    test    r15, r15
    jz      .entry_error
  %ifdef VBOX_STRICT
    cmp     dword [r15], 0h
    jne     .entry_error
    mov     rdi, r15
    mov     rcx, VMM_STACK_SIZE / 8
    mov     rax, qword 0eeeeeeeffeeeeeeeh
    repne stosq
    mov     [rdi - 10h], rbx
  %endif
    lea     r15, [r15 + VMM_STACK_SIZE - 40h]
    mov     rsp, r15                    ; Switch stack!
 %endif ; VMM_R0_SWITCH_STACK

    mov     r12, rdx                    ; Save pJmpBuf.
 %ifdef ASM_CALL64_MSC
    mov     rcx, r8                     ; pvUser -> arg0
    mov     rdx, r9
 %else
    mov     rdi, r8                     ; pvUser -> arg0
    mov     rsi, r9
 %endif
    call    r11
    mov     rdx, r12                    ; Restore pJmpBuf

 %ifdef VMM_R0_SWITCH_STACK
  %ifdef VBOX_STRICT
    mov     r15, [xDX + VMMR0JMPBUF.pvSavedStack]
    mov     dword [r15], 0h             ; Reset the marker
  %endif
 %endif

    ;
    ; Return like in the long jump but clear eip, no shortcuts here.
    ;
.proper_return:
%ifdef RT_OS_WINDOWS
    movdqa  xmm6,  [xDX + VMMR0JMPBUF.xmm6 ]
    movdqa  xmm7,  [xDX + VMMR0JMPBUF.xmm7 ]
    movdqa  xmm8,  [xDX + VMMR0JMPBUF.xmm8 ]
    movdqa  xmm9,  [xDX + VMMR0JMPBUF.xmm9 ]
    movdqa  xmm10, [xDX + VMMR0JMPBUF.xmm10]
    movdqa  xmm11, [xDX + VMMR0JMPBUF.xmm11]
    movdqa  xmm12, [xDX + VMMR0JMPBUF.xmm12]
    movdqa  xmm13, [xDX + VMMR0JMPBUF.xmm13]
    movdqa  xmm14, [xDX + VMMR0JMPBUF.xmm14]
    movdqa  xmm15, [xDX + VMMR0JMPBUF.xmm15]
%endif
    mov     rbx, [xDX + VMMR0JMPBUF.rbx]
%ifdef ASM_CALL64_MSC
    mov     rsi, [xDX + VMMR0JMPBUF.rsi]
    mov     rdi, [xDX + VMMR0JMPBUF.rdi]
%endif
    mov     r12, [xDX + VMMR0JMPBUF.r12]
    mov     r13, [xDX + VMMR0JMPBUF.r13]
    mov     r14, [xDX + VMMR0JMPBUF.r14]
    mov     r15, [xDX + VMMR0JMPBUF.r15]
    mov     rbp, [xDX + VMMR0JMPBUF.rbp]
    and     qword [xDX + VMMR0JMPBUF.rip], byte 0 ; used for valid check.
    mov     rsp, [xDX + VMMR0JMPBUF.rsp]
    push    qword [xDX + VMMR0JMPBUF.rflags]
    popf
    leave
    ret

.entry_error:
    mov     eax, VERR_VMM_SET_JMP_ERROR
    jmp     .proper_return

.stack_overflow:
    mov     eax, VERR_VMM_SET_JMP_STACK_OVERFLOW
    jmp     .proper_return

    ;
    ; Aborting resume.
    ; Note! No need to restore XMM registers here since we haven't touched them yet.
    ;
.bad:
    and     qword [xDX + VMMR0JMPBUF.rip], byte 0 ; used for valid check.
    mov     rbx, [xDX + VMMR0JMPBUF.rbx]
 %ifdef ASM_CALL64_MSC
    mov     rsi, [xDX + VMMR0JMPBUF.rsi]
    mov     rdi, [xDX + VMMR0JMPBUF.rdi]
 %endif
    mov     r12, [xDX + VMMR0JMPBUF.r12]
    mov     r13, [xDX + VMMR0JMPBUF.r13]
    mov     r14, [xDX + VMMR0JMPBUF.r14]
    mov     r15, [xDX + VMMR0JMPBUF.r15]
    mov     eax, VERR_VMM_SET_JMP_ABORTED_RESUME
    leave
    ret

    ;
    ; Resume VMMRZCallRing3 the call.
    ;
.resume:
 %ifndef VMM_R0_SWITCH_STACK
    ; Sanity checks incoming stack, applying fuzz if needed.
    sub     r10, [xDX + VMMR0JMPBUF.SpCheck]
    jz      .resume_stack_checked_out
    add     r10, STACK_FUZZ_SIZE        ; plus/minus STACK_FUZZ_SIZE is fine.
    cmp     r10, STACK_FUZZ_SIZE * 2
    ja      .bad

    mov     r10, [xDX + VMMR0JMPBUF.SpCheck]
    mov     [xDX + VMMR0JMPBUF.rsp], r10 ; Must be update in case of another long jump (used for save calc).

.resume_stack_checked_out:
    mov     ecx, [xDX + VMMR0JMPBUF.cbSavedStack]
    cmp     rcx, VMM_STACK_SIZE
    ja      .bad
    test    rcx, 7
    jnz     .bad
    mov     rdi, [xDX + VMMR0JMPBUF.SpCheck]
    sub     rdi, [xDX + VMMR0JMPBUF.SpResume]
    cmp     rcx, rdi
    jne     .bad
 %endif

%ifdef VMM_R0_SWITCH_STACK
    ; Switch stack.
    mov     rsp, [xDX + VMMR0JMPBUF.SpResume]
%else
    ; Restore the stack.
    mov     ecx, [xDX + VMMR0JMPBUF.cbSavedStack]
    shr     ecx, 3
    mov     rsi, [xDX + VMMR0JMPBUF.pvSavedStack]
    mov     rdi, [xDX + VMMR0JMPBUF.SpResume]
    mov     rsp, rdi
    rep movsq
%endif ; !VMM_R0_SWITCH_STACK
    mov     byte [xDX + VMMR0JMPBUF.fInRing3Call], 0

    ;
    ; Continue where we left off.
    ;
%ifdef VBOX_STRICT
    pop     rax                         ; magic
    cmp     rax, RESUME_MAGIC
    je      .magic_ok
    mov     ecx, 0123h
    mov     [ecx], edx
.magic_ok:
%endif
%ifdef RT_OS_WINDOWS
    movdqa  xmm6,  [rsp + 000h]
    movdqa  xmm7,  [rsp + 010h]
    movdqa  xmm8,  [rsp + 020h]
    movdqa  xmm9,  [rsp + 030h]
    movdqa  xmm10, [rsp + 040h]
    movdqa  xmm11, [rsp + 050h]
    movdqa  xmm12, [rsp + 060h]
    movdqa  xmm13, [rsp + 070h]
    movdqa  xmm14, [rsp + 080h]
    movdqa  xmm15, [rsp + 090h]
    add     rsp, 0a0h
%endif
    popf
    pop     rbx
%ifdef ASM_CALL64_MSC
    pop     rsi
    pop     rdi
%endif
    pop     r12
    pop     r13
    pop     r14
    pop     r15
    pop     rbp
    xor     eax, eax                    ; VINF_SUCCESS
    ret
ENDPROC vmmR0CallRing3SetJmp


;;
; Worker for VMMRZCallRing3.
; This will save the stack and registers.
;
; @param    pJmpBuf msc:rcx gcc:rdi x86:[ebp+8]     Pointer to the jump buffer.
; @param    rc      msc:rdx gcc:rsi x86:[ebp+c]     The return code.
;
BEGINPROC vmmR0CallRing3LongJmp
    ;
    ; Save the registers on the stack.
    ;
    push    rbp
    SEH64_PUSH_xBP
    mov     rbp, rsp
    SEH64_SET_FRAME_xBP 0
    push    r15
    SEH64_PUSH_GREG r15
    push    r14
    SEH64_PUSH_GREG r14
    push    r13
    SEH64_PUSH_GREG r13
    push    r12
    SEH64_PUSH_GREG r12
%ifdef ASM_CALL64_MSC
    push    rdi
    SEH64_PUSH_GREG rdi
    push    rsi
    SEH64_PUSH_GREG rsi
%endif
    push    rbx
    SEH64_PUSH_GREG rbx
    pushf
    SEH64_ALLOCATE_STACK 8
%ifdef RT_OS_WINDOWS
    sub     rsp, 0a0h
    SEH64_ALLOCATE_STACK 0a0h
    movdqa  [rsp + 000h], xmm6
    movdqa  [rsp + 010h], xmm7
    movdqa  [rsp + 020h], xmm8
    movdqa  [rsp + 030h], xmm9
    movdqa  [rsp + 040h], xmm10
    movdqa  [rsp + 050h], xmm11
    movdqa  [rsp + 060h], xmm12
    movdqa  [rsp + 070h], xmm13
    movdqa  [rsp + 080h], xmm14
    movdqa  [rsp + 090h], xmm15
%endif
%ifdef VBOX_STRICT
    push    RESUME_MAGIC
    SEH64_ALLOCATE_STACK 8
%endif
SEH64_END_PROLOGUE

    ;
    ; Normalize the parameters.
    ;
%ifdef ASM_CALL64_MSC
    mov     eax, edx                    ; rc
    mov     rdx, rcx                    ; pJmpBuf
%else
    mov     rdx, rdi                    ; pJmpBuf
    mov     eax, esi                    ; rc
%endif

    ;
    ; Is the jump buffer armed?
    ;
    cmp     qword [xDX + VMMR0JMPBUF.rip], byte 0
    je      .nok

    ;
    ; Sanity checks.
    ;
    mov     rdi, [xDX + VMMR0JMPBUF.pvSavedStack]
    test    rdi, rdi                    ; darwin may set this to 0.
    jz      .nok
    mov     [xDX + VMMR0JMPBUF.SpResume], rsp
 %ifndef VMM_R0_SWITCH_STACK
    mov     rsi, rsp
    mov     rcx, [xDX + VMMR0JMPBUF.rsp]
    sub     rcx, rsi

    ; two sanity checks on the size.
    cmp     rcx, VMM_STACK_SIZE         ; check max size.
    jnbe    .nok

    ;
    ; Copy the stack
    ;
    test    ecx, 7                      ; check alignment
    jnz     .nok
    mov     [xDX + VMMR0JMPBUF.cbSavedStack], ecx
    shr     ecx, 3
    rep movsq

 %endif ; !VMM_R0_SWITCH_STACK

    ; Save a PC and return PC here to assist unwinding.
.unwind_point:
    lea     rcx, [.unwind_point wrt RIP]
    mov     [xDX + VMMR0JMPBUF.SavedEipForUnwind], rcx
    mov     rcx, [xDX + VMMR0JMPBUF.rbp]
    lea     rcx, [rcx + 8]
    mov     [xDX + VMMR0JMPBUF.UnwindRetPcLocation], rcx
    mov     rcx, [rcx]
    mov     [xDX + VMMR0JMPBUF.UnwindRetPcValue], rcx

    ; Save RSP & RBP to enable stack dumps
    mov     rcx, rbp
    mov     [xDX + VMMR0JMPBUF.SavedEbp], rcx
    sub     rcx, 8
    mov     [xDX + VMMR0JMPBUF.SavedEsp], rcx

    ; store the last pieces of info.
    mov     rcx, [xDX + VMMR0JMPBUF.rsp]
    mov     [xDX + VMMR0JMPBUF.SpCheck], rcx
    mov     byte [xDX + VMMR0JMPBUF.fInRing3Call], 1

    ;
    ; Do the long jump.
    ;
%ifdef RT_OS_WINDOWS
    movdqa  xmm6,  [xDX + VMMR0JMPBUF.xmm6 ]
    movdqa  xmm7,  [xDX + VMMR0JMPBUF.xmm7 ]
    movdqa  xmm8,  [xDX + VMMR0JMPBUF.xmm8 ]
    movdqa  xmm9,  [xDX + VMMR0JMPBUF.xmm9 ]
    movdqa  xmm10, [xDX + VMMR0JMPBUF.xmm10]
    movdqa  xmm11, [xDX + VMMR0JMPBUF.xmm11]
    movdqa  xmm12, [xDX + VMMR0JMPBUF.xmm12]
    movdqa  xmm13, [xDX + VMMR0JMPBUF.xmm13]
    movdqa  xmm14, [xDX + VMMR0JMPBUF.xmm14]
    movdqa  xmm15, [xDX + VMMR0JMPBUF.xmm15]
%endif
    mov     rbx, [xDX + VMMR0JMPBUF.rbx]
%ifdef ASM_CALL64_MSC
    mov     rsi, [xDX + VMMR0JMPBUF.rsi]
    mov     rdi, [xDX + VMMR0JMPBUF.rdi]
%endif
    mov     r12, [xDX + VMMR0JMPBUF.r12]
    mov     r13, [xDX + VMMR0JMPBUF.r13]
    mov     r14, [xDX + VMMR0JMPBUF.r14]
    mov     r15, [xDX + VMMR0JMPBUF.r15]
    mov     rbp, [xDX + VMMR0JMPBUF.rbp]
    mov     rsp, [xDX + VMMR0JMPBUF.rsp]
    push    qword [xDX + VMMR0JMPBUF.rflags]
    popf
    leave
    ret

    ;
    ; Failure
    ;
.nok:
%ifdef VBOX_STRICT
    pop     rax                         ; magic
    cmp     rax, RESUME_MAGIC
    je      .magic_ok
    mov     ecx, 0123h
    mov     [rcx], edx
.magic_ok:
%endif
    mov     eax, VERR_VMM_LONG_JMP_ERROR
%ifdef RT_OS_WINDOWS
    add     rsp, 0a0h                   ; skip XMM registers since they are unmodified.
%endif
    popf
    pop     rbx
%ifdef ASM_CALL64_MSC
    pop     rsi
    pop     rdi
%endif
    pop     r12
    pop     r13
    pop     r14
    pop     r15
    leave
    ret
ENDPROC vmmR0CallRing3LongJmp


;;
; Internal R0 logger worker: Logger wrapper.
;
; @cproto VMMR0DECL(void) vmmR0LoggerWrapper(const char *pszFormat, ...)
;
BEGINPROC_EXPORTED vmmR0LoggerWrapper
SEH64_END_PROLOGUE
    int3
    int3
    int3
    ret
ENDPROC vmmR0LoggerWrapper