summaryrefslogtreecommitdiffstats
path: root/arch/x86/kernel/sev_verify_cbit.S
blob: 1ab65f6c6ae7a159221b438a7acf5d0df4740618 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
/* SPDX-License-Identifier: GPL-2.0-only */
/*
 *	sev_verify_cbit.S - Code for verification of the C-bit position reported
 *			    by the Hypervisor when running with SEV enabled.
 *
 *	Copyright (c) 2020  Joerg Roedel (jroedel@suse.de)
 *
 * sev_verify_cbit() is called before switching to a new long-mode page-table
 * at boot.
 *
 * Verify that the C-bit position is correct by writing a random value to
 * an encrypted memory location while on the current page-table. Then it
 * switches to the new page-table to verify the memory content is still the
 * same. After that it switches back to the current page-table and when the
 * check succeeded it returns. If the check failed the code invalidates the
 * stack pointer and goes into a hlt loop. The stack-pointer is invalidated to
 * make sure no interrupt or exception can get the CPU out of the hlt loop.
 *
 * New page-table pointer is expected in %rdi (first parameter)
 *
 */
SYM_FUNC_START(sev_verify_cbit)
#ifdef CONFIG_AMD_MEM_ENCRYPT
	/* First check if a C-bit was detected */
	movq	sme_me_mask(%rip), %rsi
	testq	%rsi, %rsi
	jz	3f

	/* sme_me_mask != 0 could mean SME or SEV - Check also for SEV */
	movq	sev_status(%rip), %rsi
	testq	%rsi, %rsi
	jz	3f

	/* Save CR4 in %rsi */
	movq	%cr4, %rsi

	/* Disable Global Pages */
	movq	%rsi, %rdx
	andq	$(~X86_CR4_PGE), %rdx
	movq	%rdx, %cr4

	/*
	 * Verified that running under SEV - now get a random value using
	 * RDRAND. This instruction is mandatory when running as an SEV guest.
	 *
	 * Don't bail out of the loop if RDRAND returns errors. It is better to
	 * prevent forward progress than to work with a non-random value here.
	 */
1:	rdrand	%rdx
	jnc	1b

	/* Store value to memory and keep it in %rdx */
	movq	%rdx, sev_check_data(%rip)

	/* Backup current %cr3 value to restore it later */
	movq	%cr3, %rcx

	/* Switch to new %cr3 - This might unmap the stack */
	movq	%rdi, %cr3

	/*
	 * Compare value in %rdx with memory location. If C-bit is incorrect
	 * this would read the encrypted data and make the check fail.
	 */
	cmpq	%rdx, sev_check_data(%rip)

	/* Restore old %cr3 */
	movq	%rcx, %cr3

	/* Restore previous CR4 */
	movq	%rsi, %cr4

	/* Check CMPQ result */
	je	3f

	/*
	 * The check failed, prevent any forward progress to prevent ROP
	 * attacks, invalidate the stack and go into a hlt loop.
	 */
	xorl	%esp, %esp
	subq	$0x1000, %rsp
2:	hlt
	jmp 2b
3:
#endif
	/* Return page-table pointer */
	movq	%rdi, %rax
	RET
SYM_FUNC_END(sev_verify_cbit)