1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
|
/*
* Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <arch.h>
#include <asm_macros.S>
#include <assert_macros.S>
#include <context.h>
#include <el3_common_macros.S>
#if CTX_INCLUDE_EL2_REGS
.global el2_sysregs_context_save_common
.global el2_sysregs_context_restore_common
#if ENABLE_SPE_FOR_LOWER_ELS
.global el2_sysregs_context_save_spe
.global el2_sysregs_context_restore_spe
#endif /* ENABLE_SPE_FOR_LOWER_ELS */
#if CTX_INCLUDE_MTE_REGS
.global el2_sysregs_context_save_mte
.global el2_sysregs_context_restore_mte
#endif /* CTX_INCLUDE_MTE_REGS */
#if ENABLE_MPAM_FOR_LOWER_ELS
.global el2_sysregs_context_save_mpam
.global el2_sysregs_context_restore_mpam
#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
#if ENABLE_FEAT_FGT
.global el2_sysregs_context_save_fgt
.global el2_sysregs_context_restore_fgt
#endif /* ENABLE_FEAT_FGT */
#if ENABLE_FEAT_ECV
.global el2_sysregs_context_save_ecv
.global el2_sysregs_context_restore_ecv
#endif /* ENABLE_FEAT_ECV */
#if ENABLE_FEAT_VHE
.global el2_sysregs_context_save_vhe
.global el2_sysregs_context_restore_vhe
#endif /* ENABLE_FEAT_VHE */
#if RAS_EXTENSION
.global el2_sysregs_context_save_ras
.global el2_sysregs_context_restore_ras
#endif /* RAS_EXTENSION */
#if CTX_INCLUDE_NEVE_REGS
.global el2_sysregs_context_save_nv2
.global el2_sysregs_context_restore_nv2
#endif /* CTX_INCLUDE_NEVE_REGS */
#if ENABLE_TRF_FOR_NS
.global el2_sysregs_context_save_trf
.global el2_sysregs_context_restore_trf
#endif /* ENABLE_TRF_FOR_NS */
#if ENABLE_FEAT_CSV2_2
.global el2_sysregs_context_save_csv2
.global el2_sysregs_context_restore_csv2
#endif /* ENABLE_FEAT_CSV2_2 */
#if ENABLE_FEAT_HCX
.global el2_sysregs_context_save_hcx
.global el2_sysregs_context_restore_hcx
#endif /* ENABLE_FEAT_HCX */
#endif /* CTX_INCLUDE_EL2_REGS */
.global el1_sysregs_context_save
.global el1_sysregs_context_restore
#if CTX_INCLUDE_FPREGS
.global fpregs_context_save
.global fpregs_context_restore
#endif /* CTX_INCLUDE_FPREGS */
.global prepare_el3_entry
.global restore_gp_pmcr_pauth_regs
.global save_and_update_ptw_el1_sys_regs
.global el3_exit
#if CTX_INCLUDE_EL2_REGS
/* -----------------------------------------------------
* The following functions strictly follow the AArch64
* PCS to use x9-x16 (temporary caller-saved registers)
* to save/restore EL2 system register context.
* el2_sysregs_context_save/restore_common functions
* save and restore registers that are common to all
* configurations. The rest of the functions save and
* restore EL2 system registers that are present when a
* particular feature is enabled. All functions assume
* that 'x0' is pointing to a 'el2_sys_regs' structure
* where the register context will be saved/restored.
*
* The following registers are not added.
* AMEVCNTVOFF0<n>_EL2
* AMEVCNTVOFF1<n>_EL2
* ICH_AP0R<n>_EL2
* ICH_AP1R<n>_EL2
* ICH_LR<n>_EL2
* -----------------------------------------------------
*/
func el2_sysregs_context_save_common
mrs x9, actlr_el2
mrs x10, afsr0_el2
stp x9, x10, [x0, #CTX_ACTLR_EL2]
mrs x11, afsr1_el2
mrs x12, amair_el2
stp x11, x12, [x0, #CTX_AFSR1_EL2]
mrs x13, cnthctl_el2
mrs x14, cntvoff_el2
stp x13, x14, [x0, #CTX_CNTHCTL_EL2]
mrs x15, cptr_el2
str x15, [x0, #CTX_CPTR_EL2]
#if CTX_INCLUDE_AARCH32_REGS
mrs x16, dbgvcr32_el2
str x16, [x0, #CTX_DBGVCR32_EL2]
#endif /* CTX_INCLUDE_AARCH32_REGS */
mrs x9, elr_el2
mrs x10, esr_el2
stp x9, x10, [x0, #CTX_ELR_EL2]
mrs x11, far_el2
mrs x12, hacr_el2
stp x11, x12, [x0, #CTX_FAR_EL2]
mrs x13, hcr_el2
mrs x14, hpfar_el2
stp x13, x14, [x0, #CTX_HCR_EL2]
mrs x15, hstr_el2
mrs x16, ICC_SRE_EL2
stp x15, x16, [x0, #CTX_HSTR_EL2]
mrs x9, ICH_HCR_EL2
mrs x10, ICH_VMCR_EL2
stp x9, x10, [x0, #CTX_ICH_HCR_EL2]
mrs x11, mair_el2
mrs x12, mdcr_el2
stp x11, x12, [x0, #CTX_MAIR_EL2]
mrs x14, sctlr_el2
str x14, [x0, #CTX_SCTLR_EL2]
mrs x15, spsr_el2
mrs x16, sp_el2
stp x15, x16, [x0, #CTX_SPSR_EL2]
mrs x9, tcr_el2
mrs x10, tpidr_el2
stp x9, x10, [x0, #CTX_TCR_EL2]
mrs x11, ttbr0_el2
mrs x12, vbar_el2
stp x11, x12, [x0, #CTX_TTBR0_EL2]
mrs x13, vmpidr_el2
mrs x14, vpidr_el2
stp x13, x14, [x0, #CTX_VMPIDR_EL2]
mrs x15, vtcr_el2
mrs x16, vttbr_el2
stp x15, x16, [x0, #CTX_VTCR_EL2]
ret
endfunc el2_sysregs_context_save_common
func el2_sysregs_context_restore_common
ldp x9, x10, [x0, #CTX_ACTLR_EL2]
msr actlr_el2, x9
msr afsr0_el2, x10
ldp x11, x12, [x0, #CTX_AFSR1_EL2]
msr afsr1_el2, x11
msr amair_el2, x12
ldp x13, x14, [x0, #CTX_CNTHCTL_EL2]
msr cnthctl_el2, x13
msr cntvoff_el2, x14
ldr x15, [x0, #CTX_CPTR_EL2]
msr cptr_el2, x15
#if CTX_INCLUDE_AARCH32_REGS
ldr x16, [x0, #CTX_DBGVCR32_EL2]
msr dbgvcr32_el2, x16
#endif /* CTX_INCLUDE_AARCH32_REGS */
ldp x9, x10, [x0, #CTX_ELR_EL2]
msr elr_el2, x9
msr esr_el2, x10
ldp x11, x12, [x0, #CTX_FAR_EL2]
msr far_el2, x11
msr hacr_el2, x12
ldp x13, x14, [x0, #CTX_HCR_EL2]
msr hcr_el2, x13
msr hpfar_el2, x14
ldp x15, x16, [x0, #CTX_HSTR_EL2]
msr hstr_el2, x15
msr ICC_SRE_EL2, x16
ldp x9, x10, [x0, #CTX_ICH_HCR_EL2]
msr ICH_HCR_EL2, x9
msr ICH_VMCR_EL2, x10
ldp x11, x12, [x0, #CTX_MAIR_EL2]
msr mair_el2, x11
msr mdcr_el2, x12
ldr x14, [x0, #CTX_SCTLR_EL2]
msr sctlr_el2, x14
ldp x15, x16, [x0, #CTX_SPSR_EL2]
msr spsr_el2, x15
msr sp_el2, x16
ldp x9, x10, [x0, #CTX_TCR_EL2]
msr tcr_el2, x9
msr tpidr_el2, x10
ldp x11, x12, [x0, #CTX_TTBR0_EL2]
msr ttbr0_el2, x11
msr vbar_el2, x12
ldp x13, x14, [x0, #CTX_VMPIDR_EL2]
msr vmpidr_el2, x13
msr vpidr_el2, x14
ldp x15, x16, [x0, #CTX_VTCR_EL2]
msr vtcr_el2, x15
msr vttbr_el2, x16
ret
endfunc el2_sysregs_context_restore_common
#if ENABLE_SPE_FOR_LOWER_ELS
func el2_sysregs_context_save_spe
mrs x13, PMSCR_EL2
str x13, [x0, #CTX_PMSCR_EL2]
ret
endfunc el2_sysregs_context_save_spe
func el2_sysregs_context_restore_spe
ldr x13, [x0, #CTX_PMSCR_EL2]
msr PMSCR_EL2, x13
ret
endfunc el2_sysregs_context_restore_spe
#endif /* ENABLE_SPE_FOR_LOWER_ELS */
#if CTX_INCLUDE_MTE_REGS
func el2_sysregs_context_save_mte
mrs x9, TFSR_EL2
str x9, [x0, #CTX_TFSR_EL2]
ret
endfunc el2_sysregs_context_save_mte
func el2_sysregs_context_restore_mte
ldr x9, [x0, #CTX_TFSR_EL2]
msr TFSR_EL2, x9
ret
endfunc el2_sysregs_context_restore_mte
#endif /* CTX_INCLUDE_MTE_REGS */
#if ENABLE_MPAM_FOR_LOWER_ELS
func el2_sysregs_context_save_mpam
mrs x10, MPAM2_EL2
str x10, [x0, #CTX_MPAM2_EL2]
mrs x11, MPAMHCR_EL2
mrs x12, MPAMVPM0_EL2
stp x11, x12, [x0, #CTX_MPAMHCR_EL2]
mrs x13, MPAMVPM1_EL2
mrs x14, MPAMVPM2_EL2
stp x13, x14, [x0, #CTX_MPAMVPM1_EL2]
mrs x15, MPAMVPM3_EL2
mrs x16, MPAMVPM4_EL2
stp x15, x16, [x0, #CTX_MPAMVPM3_EL2]
mrs x9, MPAMVPM5_EL2
mrs x10, MPAMVPM6_EL2
stp x9, x10, [x0, #CTX_MPAMVPM5_EL2]
mrs x11, MPAMVPM7_EL2
mrs x12, MPAMVPMV_EL2
stp x11, x12, [x0, #CTX_MPAMVPM7_EL2]
ret
endfunc func el2_sysregs_context_save_mpam
func el2_sysregs_context_restore_mpam
ldr x10, [x0, #CTX_MPAM2_EL2]
msr MPAM2_EL2, x10
ldp x11, x12, [x0, #CTX_MPAMHCR_EL2]
msr MPAMHCR_EL2, x11
msr MPAMVPM0_EL2, x12
ldp x13, x14, [x0, #CTX_MPAMVPM1_EL2]
msr MPAMVPM1_EL2, x13
msr MPAMVPM2_EL2, x14
ldp x15, x16, [x0, #CTX_MPAMVPM3_EL2]
msr MPAMVPM3_EL2, x15
msr MPAMVPM4_EL2, x16
ldp x9, x10, [x0, #CTX_MPAMVPM5_EL2]
msr MPAMVPM5_EL2, x9
msr MPAMVPM6_EL2, x10
ldp x11, x12, [x0, #CTX_MPAMVPM7_EL2]
msr MPAMVPM7_EL2, x11
msr MPAMVPMV_EL2, x12
ret
endfunc el2_sysregs_context_restore_mpam
#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
#if ENABLE_FEAT_FGT
func el2_sysregs_context_save_fgt
mrs x13, HDFGRTR_EL2
#if ENABLE_FEAT_AMUv1
mrs x14, HAFGRTR_EL2
stp x13, x14, [x0, #CTX_HDFGRTR_EL2]
#else
str x13, [x0, #CTX_HDFGRTR_EL2]
#endif /* ENABLE_FEAT_AMUv1 */
mrs x15, HDFGWTR_EL2
mrs x16, HFGITR_EL2
stp x15, x16, [x0, #CTX_HDFGWTR_EL2]
mrs x9, HFGRTR_EL2
mrs x10, HFGWTR_EL2
stp x9, x10, [x0, #CTX_HFGRTR_EL2]
ret
endfunc el2_sysregs_context_save_fgt
func el2_sysregs_context_restore_fgt
#if ENABLE_FEAT_AMUv1
ldp x13, x14, [x0, #CTX_HDFGRTR_EL2]
msr HAFGRTR_EL2, x14
#else
ldr x13, [x0, #CTX_HDFGRTR_EL2]
#endif /* ENABLE_FEAT_AMUv1 */
msr HDFGRTR_EL2, x13
ldp x15, x16, [x0, #CTX_HDFGWTR_EL2]
msr HDFGWTR_EL2, x15
msr HFGITR_EL2, x16
ldp x9, x10, [x0, #CTX_HFGRTR_EL2]
msr HFGRTR_EL2, x9
msr HFGWTR_EL2, x10
ret
endfunc el2_sysregs_context_restore_fgt
#endif /* ENABLE_FEAT_FGT */
#if ENABLE_FEAT_ECV
func el2_sysregs_context_save_ecv
mrs x11, CNTPOFF_EL2
str x11, [x0, #CTX_CNTPOFF_EL2]
ret
endfunc el2_sysregs_context_save_ecv
func el2_sysregs_context_restore_ecv
ldr x11, [x0, #CTX_CNTPOFF_EL2]
msr CNTPOFF_EL2, x11
ret
endfunc el2_sysregs_context_restore_ecv
#endif /* ENABLE_FEAT_ECV */
#if ENABLE_FEAT_VHE
func el2_sysregs_context_save_vhe
/*
* CONTEXTIDR_EL2 register is saved only when FEAT_VHE or
* FEAT_Debugv8p2 (currently not in TF-A) is supported.
*/
mrs x9, contextidr_el2
mrs x10, ttbr1_el2
stp x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
ret
endfunc el2_sysregs_context_save_vhe
func el2_sysregs_context_restore_vhe
/*
* CONTEXTIDR_EL2 register is restored only when FEAT_VHE or
* FEAT_Debugv8p2 (currently not in TF-A) is supported.
*/
ldp x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
msr contextidr_el2, x9
msr ttbr1_el2, x10
ret
endfunc el2_sysregs_context_restore_vhe
#endif /* ENABLE_FEAT_VHE */
#if RAS_EXTENSION
func el2_sysregs_context_save_ras
/*
* VDISR_EL2 and VSESR_EL2 registers are saved only when
* FEAT_RAS is supported.
*/
mrs x11, vdisr_el2
mrs x12, vsesr_el2
stp x11, x12, [x0, #CTX_VDISR_EL2]
ret
endfunc el2_sysregs_context_save_ras
func el2_sysregs_context_restore_ras
/*
* VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS
* is supported.
*/
ldp x11, x12, [x0, #CTX_VDISR_EL2]
msr vdisr_el2, x11
msr vsesr_el2, x12
ret
endfunc el2_sysregs_context_restore_ras
#endif /* RAS_EXTENSION */
#if CTX_INCLUDE_NEVE_REGS
func el2_sysregs_context_save_nv2
/*
* VNCR_EL2 register is saved only when FEAT_NV2 is supported.
*/
mrs x16, vncr_el2
str x16, [x0, #CTX_VNCR_EL2]
ret
endfunc el2_sysregs_context_save_nv2
func el2_sysregs_context_restore_nv2
/*
* VNCR_EL2 register is restored only when FEAT_NV2 is supported.
*/
ldr x16, [x0, #CTX_VNCR_EL2]
msr vncr_el2, x16
ret
endfunc el2_sysregs_context_restore_nv2
#endif /* CTX_INCLUDE_NEVE_REGS */
#if ENABLE_TRF_FOR_NS
func el2_sysregs_context_save_trf
/*
* TRFCR_EL2 register is saved only when FEAT_TRF is supported.
*/
mrs x12, TRFCR_EL2
str x12, [x0, #CTX_TRFCR_EL2]
ret
endfunc el2_sysregs_context_save_trf
func el2_sysregs_context_restore_trf
/*
* TRFCR_EL2 register is restored only when FEAT_TRF is supported.
*/
ldr x12, [x0, #CTX_TRFCR_EL2]
msr TRFCR_EL2, x12
ret
endfunc el2_sysregs_context_restore_trf
#endif /* ENABLE_TRF_FOR_NS */
#if ENABLE_FEAT_CSV2_2
func el2_sysregs_context_save_csv2
/*
* SCXTNUM_EL2 register is saved only when FEAT_CSV2_2 is supported.
*/
mrs x13, scxtnum_el2
str x13, [x0, #CTX_SCXTNUM_EL2]
ret
endfunc el2_sysregs_context_save_csv2
func el2_sysregs_context_restore_csv2
/*
* SCXTNUM_EL2 register is restored only when FEAT_CSV2_2 is supported.
*/
ldr x13, [x0, #CTX_SCXTNUM_EL2]
msr scxtnum_el2, x13
ret
endfunc el2_sysregs_context_restore_csv2
#endif /* ENABLE_FEAT_CSV2_2 */
#if ENABLE_FEAT_HCX
func el2_sysregs_context_save_hcx
mrs x14, hcrx_el2
str x14, [x0, #CTX_HCRX_EL2]
ret
endfunc el2_sysregs_context_save_hcx
func el2_sysregs_context_restore_hcx
ldr x14, [x0, #CTX_HCRX_EL2]
msr hcrx_el2, x14
ret
endfunc el2_sysregs_context_restore_hcx
#endif /* ENABLE_FEAT_HCX */
#endif /* CTX_INCLUDE_EL2_REGS */
/* ------------------------------------------------------------------
* The following function strictly follows the AArch64 PCS to use
* x9-x17 (temporary caller-saved registers) to save EL1 system
* register context. It assumes that 'x0' is pointing to a
* 'el1_sys_regs' structure where the register context will be saved.
* ------------------------------------------------------------------
*/
func el1_sysregs_context_save
mrs x9, spsr_el1
mrs x10, elr_el1
stp x9, x10, [x0, #CTX_SPSR_EL1]
#if !ERRATA_SPECULATIVE_AT
mrs x15, sctlr_el1
mrs x16, tcr_el1
stp x15, x16, [x0, #CTX_SCTLR_EL1]
#endif /* ERRATA_SPECULATIVE_AT */
mrs x17, cpacr_el1
mrs x9, csselr_el1
stp x17, x9, [x0, #CTX_CPACR_EL1]
mrs x10, sp_el1
mrs x11, esr_el1
stp x10, x11, [x0, #CTX_SP_EL1]
mrs x12, ttbr0_el1
mrs x13, ttbr1_el1
stp x12, x13, [x0, #CTX_TTBR0_EL1]
mrs x14, mair_el1
mrs x15, amair_el1
stp x14, x15, [x0, #CTX_MAIR_EL1]
mrs x16, actlr_el1
mrs x17, tpidr_el1
stp x16, x17, [x0, #CTX_ACTLR_EL1]
mrs x9, tpidr_el0
mrs x10, tpidrro_el0
stp x9, x10, [x0, #CTX_TPIDR_EL0]
mrs x13, par_el1
mrs x14, far_el1
stp x13, x14, [x0, #CTX_PAR_EL1]
mrs x15, afsr0_el1
mrs x16, afsr1_el1
stp x15, x16, [x0, #CTX_AFSR0_EL1]
mrs x17, contextidr_el1
mrs x9, vbar_el1
stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
/* Save AArch32 system registers if the build has instructed so */
#if CTX_INCLUDE_AARCH32_REGS
mrs x11, spsr_abt
mrs x12, spsr_und
stp x11, x12, [x0, #CTX_SPSR_ABT]
mrs x13, spsr_irq
mrs x14, spsr_fiq
stp x13, x14, [x0, #CTX_SPSR_IRQ]
mrs x15, dacr32_el2
mrs x16, ifsr32_el2
stp x15, x16, [x0, #CTX_DACR32_EL2]
#endif /* CTX_INCLUDE_AARCH32_REGS */
/* Save NS timer registers if the build has instructed so */
#if NS_TIMER_SWITCH
mrs x10, cntp_ctl_el0
mrs x11, cntp_cval_el0
stp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
mrs x12, cntv_ctl_el0
mrs x13, cntv_cval_el0
stp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
mrs x14, cntkctl_el1
str x14, [x0, #CTX_CNTKCTL_EL1]
#endif /* NS_TIMER_SWITCH */
/* Save MTE system registers if the build has instructed so */
#if CTX_INCLUDE_MTE_REGS
mrs x15, TFSRE0_EL1
mrs x16, TFSR_EL1
stp x15, x16, [x0, #CTX_TFSRE0_EL1]
mrs x9, RGSR_EL1
mrs x10, GCR_EL1
stp x9, x10, [x0, #CTX_RGSR_EL1]
#endif /* CTX_INCLUDE_MTE_REGS */
ret
endfunc el1_sysregs_context_save
/* ------------------------------------------------------------------
* The following function strictly follows the AArch64 PCS to use
* x9-x17 (temporary caller-saved registers) to restore EL1 system
* register context. It assumes that 'x0' is pointing to a
* 'el1_sys_regs' structure from where the register context will be
* restored
* ------------------------------------------------------------------
*/
func el1_sysregs_context_restore
ldp x9, x10, [x0, #CTX_SPSR_EL1]
msr spsr_el1, x9
msr elr_el1, x10
#if !ERRATA_SPECULATIVE_AT
ldp x15, x16, [x0, #CTX_SCTLR_EL1]
msr sctlr_el1, x15
msr tcr_el1, x16
#endif /* ERRATA_SPECULATIVE_AT */
ldp x17, x9, [x0, #CTX_CPACR_EL1]
msr cpacr_el1, x17
msr csselr_el1, x9
ldp x10, x11, [x0, #CTX_SP_EL1]
msr sp_el1, x10
msr esr_el1, x11
ldp x12, x13, [x0, #CTX_TTBR0_EL1]
msr ttbr0_el1, x12
msr ttbr1_el1, x13
ldp x14, x15, [x0, #CTX_MAIR_EL1]
msr mair_el1, x14
msr amair_el1, x15
ldp x16, x17, [x0, #CTX_ACTLR_EL1]
msr actlr_el1, x16
msr tpidr_el1, x17
ldp x9, x10, [x0, #CTX_TPIDR_EL0]
msr tpidr_el0, x9
msr tpidrro_el0, x10
ldp x13, x14, [x0, #CTX_PAR_EL1]
msr par_el1, x13
msr far_el1, x14
ldp x15, x16, [x0, #CTX_AFSR0_EL1]
msr afsr0_el1, x15
msr afsr1_el1, x16
ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
msr contextidr_el1, x17
msr vbar_el1, x9
/* Restore AArch32 system registers if the build has instructed so */
#if CTX_INCLUDE_AARCH32_REGS
ldp x11, x12, [x0, #CTX_SPSR_ABT]
msr spsr_abt, x11
msr spsr_und, x12
ldp x13, x14, [x0, #CTX_SPSR_IRQ]
msr spsr_irq, x13
msr spsr_fiq, x14
ldp x15, x16, [x0, #CTX_DACR32_EL2]
msr dacr32_el2, x15
msr ifsr32_el2, x16
#endif /* CTX_INCLUDE_AARCH32_REGS */
/* Restore NS timer registers if the build has instructed so */
#if NS_TIMER_SWITCH
ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
msr cntp_ctl_el0, x10
msr cntp_cval_el0, x11
ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
msr cntv_ctl_el0, x12
msr cntv_cval_el0, x13
ldr x14, [x0, #CTX_CNTKCTL_EL1]
msr cntkctl_el1, x14
#endif /* NS_TIMER_SWITCH */
/* Restore MTE system registers if the build has instructed so */
#if CTX_INCLUDE_MTE_REGS
ldp x11, x12, [x0, #CTX_TFSRE0_EL1]
msr TFSRE0_EL1, x11
msr TFSR_EL1, x12
ldp x13, x14, [x0, #CTX_RGSR_EL1]
msr RGSR_EL1, x13
msr GCR_EL1, x14
#endif /* CTX_INCLUDE_MTE_REGS */
/* No explict ISB required here as ERET covers it */
ret
endfunc el1_sysregs_context_restore
/* ------------------------------------------------------------------
* The following function follows the aapcs_64 strictly to use
* x9-x17 (temporary caller-saved registers according to AArch64 PCS)
* to save floating point register context. It assumes that 'x0' is
* pointing to a 'fp_regs' structure where the register context will
* be saved.
*
* Access to VFP registers will trap if CPTR_EL3.TFP is set.
* However currently we don't use VFP registers nor set traps in
* Trusted Firmware, and assume it's cleared.
*
* TODO: Revisit when VFP is used in secure world
* ------------------------------------------------------------------
*/
#if CTX_INCLUDE_FPREGS
func fpregs_context_save
stp q0, q1, [x0, #CTX_FP_Q0]
stp q2, q3, [x0, #CTX_FP_Q2]
stp q4, q5, [x0, #CTX_FP_Q4]
stp q6, q7, [x0, #CTX_FP_Q6]
stp q8, q9, [x0, #CTX_FP_Q8]
stp q10, q11, [x0, #CTX_FP_Q10]
stp q12, q13, [x0, #CTX_FP_Q12]
stp q14, q15, [x0, #CTX_FP_Q14]
stp q16, q17, [x0, #CTX_FP_Q16]
stp q18, q19, [x0, #CTX_FP_Q18]
stp q20, q21, [x0, #CTX_FP_Q20]
stp q22, q23, [x0, #CTX_FP_Q22]
stp q24, q25, [x0, #CTX_FP_Q24]
stp q26, q27, [x0, #CTX_FP_Q26]
stp q28, q29, [x0, #CTX_FP_Q28]
stp q30, q31, [x0, #CTX_FP_Q30]
mrs x9, fpsr
str x9, [x0, #CTX_FP_FPSR]
mrs x10, fpcr
str x10, [x0, #CTX_FP_FPCR]
#if CTX_INCLUDE_AARCH32_REGS
mrs x11, fpexc32_el2
str x11, [x0, #CTX_FP_FPEXC32_EL2]
#endif /* CTX_INCLUDE_AARCH32_REGS */
ret
endfunc fpregs_context_save
/* ------------------------------------------------------------------
* The following function follows the aapcs_64 strictly to use x9-x17
* (temporary caller-saved registers according to AArch64 PCS) to
* restore floating point register context. It assumes that 'x0' is
* pointing to a 'fp_regs' structure from where the register context
* will be restored.
*
* Access to VFP registers will trap if CPTR_EL3.TFP is set.
* However currently we don't use VFP registers nor set traps in
* Trusted Firmware, and assume it's cleared.
*
* TODO: Revisit when VFP is used in secure world
* ------------------------------------------------------------------
*/
func fpregs_context_restore
ldp q0, q1, [x0, #CTX_FP_Q0]
ldp q2, q3, [x0, #CTX_FP_Q2]
ldp q4, q5, [x0, #CTX_FP_Q4]
ldp q6, q7, [x0, #CTX_FP_Q6]
ldp q8, q9, [x0, #CTX_FP_Q8]
ldp q10, q11, [x0, #CTX_FP_Q10]
ldp q12, q13, [x0, #CTX_FP_Q12]
ldp q14, q15, [x0, #CTX_FP_Q14]
ldp q16, q17, [x0, #CTX_FP_Q16]
ldp q18, q19, [x0, #CTX_FP_Q18]
ldp q20, q21, [x0, #CTX_FP_Q20]
ldp q22, q23, [x0, #CTX_FP_Q22]
ldp q24, q25, [x0, #CTX_FP_Q24]
ldp q26, q27, [x0, #CTX_FP_Q26]
ldp q28, q29, [x0, #CTX_FP_Q28]
ldp q30, q31, [x0, #CTX_FP_Q30]
ldr x9, [x0, #CTX_FP_FPSR]
msr fpsr, x9
ldr x10, [x0, #CTX_FP_FPCR]
msr fpcr, x10
#if CTX_INCLUDE_AARCH32_REGS
ldr x11, [x0, #CTX_FP_FPEXC32_EL2]
msr fpexc32_el2, x11
#endif /* CTX_INCLUDE_AARCH32_REGS */
/*
* No explict ISB required here as ERET to
* switch to secure EL1 or non-secure world
* covers it
*/
ret
endfunc fpregs_context_restore
#endif /* CTX_INCLUDE_FPREGS */
/*
* Set the PSTATE bits not set when the exception was taken as
* described in the AArch64.TakeException() pseudocode function
* in ARM DDI 0487F.c page J1-7635 to a default value.
*/
.macro set_unset_pstate_bits
/*
* If Data Independent Timing (DIT) functionality is implemented,
* always enable DIT in EL3
*/
#if ENABLE_FEAT_DIT
mov x8, #DIT_BIT
msr DIT, x8
#endif /* ENABLE_FEAT_DIT */
.endm /* set_unset_pstate_bits */
/* ------------------------------------------------------------------
* The following macro is used to save and restore all the general
* purpose and ARMv8.3-PAuth (if enabled) registers.
* It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
* is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
* needs not to be saved/restored during world switch.
*
* Ideally we would only save and restore the callee saved registers
* when a world switch occurs but that type of implementation is more
* complex. So currently we will always save and restore these
* registers on entry and exit of EL3.
* clobbers: x18
* ------------------------------------------------------------------
*/
.macro save_gp_pmcr_pauth_regs
stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
mrs x18, sp_el0
str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
/* ----------------------------------------------------------
* Check if earlier initialization of MDCR_EL3.SCCD/MCCD to 1
* has failed.
*
* MDCR_EL3:
* MCCD bit set, Prohibits the Cycle Counter PMCCNTR_EL0 from
* counting at EL3.
* SCCD bit set, Secure Cycle Counter Disable. Prohibits PMCCNTR_EL0
* from counting in Secure state.
* If these bits are not set, meaning that FEAT_PMUv3p5/7 is
* not implemented and PMCR_EL0 should be saved in non-secure
* context.
* ----------------------------------------------------------
*/
mov_imm x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
mrs x9, mdcr_el3
tst x9, x10
bne 1f
/* ----------------------------------------------------------
* If control reaches here, it ensures the Secure Cycle
* Counter (PMCCNTR_EL0) is not prohibited from counting at
* EL3 and in secure states.
* Henceforth, PMCR_EL0 to be saved before world switch.
* ----------------------------------------------------------
*/
mrs x9, pmcr_el0
/* Check caller's security state */
mrs x10, scr_el3
tst x10, #SCR_NS_BIT
beq 2f
/* Save PMCR_EL0 if called from Non-secure state */
str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
/* Disable cycle counter when event counting is prohibited */
2: orr x9, x9, #PMCR_EL0_DP_BIT
msr pmcr_el0, x9
isb
1:
#if CTX_INCLUDE_PAUTH_REGS
/* ----------------------------------------------------------
* Save the ARMv8.3-PAuth keys as they are not banked
* by exception level
* ----------------------------------------------------------
*/
add x19, sp, #CTX_PAUTH_REGS_OFFSET
mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */
mrs x21, APIAKeyHi_EL1
mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */
mrs x23, APIBKeyHi_EL1
mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */
mrs x25, APDAKeyHi_EL1
mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */
mrs x27, APDBKeyHi_EL1
mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */
mrs x29, APGAKeyHi_EL1
stp x20, x21, [x19, #CTX_PACIAKEY_LO]
stp x22, x23, [x19, #CTX_PACIBKEY_LO]
stp x24, x25, [x19, #CTX_PACDAKEY_LO]
stp x26, x27, [x19, #CTX_PACDBKEY_LO]
stp x28, x29, [x19, #CTX_PACGAKEY_LO]
#endif /* CTX_INCLUDE_PAUTH_REGS */
.endm /* save_gp_pmcr_pauth_regs */
/* -----------------------------------------------------------------
* This function saves the context and sets the PSTATE to a known
* state, preparing entry to el3.
* Save all the general purpose and ARMv8.3-PAuth (if enabled)
* registers.
* Then set any of the PSTATE bits that are not set by hardware
* according to the Aarch64.TakeException pseudocode in the Arm
* Architecture Reference Manual to a default value for EL3.
* clobbers: x17
* -----------------------------------------------------------------
*/
func prepare_el3_entry
save_gp_pmcr_pauth_regs
/*
* Set the PSTATE bits not described in the Aarch64.TakeException
* pseudocode to their default values.
*/
set_unset_pstate_bits
ret
endfunc prepare_el3_entry
/* ------------------------------------------------------------------
* This function restores ARMv8.3-PAuth (if enabled) and all general
* purpose registers except x30 from the CPU context.
* x30 register must be explicitly restored by the caller.
* ------------------------------------------------------------------
*/
func restore_gp_pmcr_pauth_regs
#if CTX_INCLUDE_PAUTH_REGS
/* Restore the ARMv8.3 PAuth keys */
add x10, sp, #CTX_PAUTH_REGS_OFFSET
ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */
ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */
ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */
ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */
ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */
msr APIAKeyLo_EL1, x0
msr APIAKeyHi_EL1, x1
msr APIBKeyLo_EL1, x2
msr APIBKeyHi_EL1, x3
msr APDAKeyLo_EL1, x4
msr APDAKeyHi_EL1, x5
msr APDBKeyLo_EL1, x6
msr APDBKeyHi_EL1, x7
msr APGAKeyLo_EL1, x8
msr APGAKeyHi_EL1, x9
#endif /* CTX_INCLUDE_PAUTH_REGS */
/* ----------------------------------------------------------
* Restore PMCR_EL0 when returning to Non-secure state if
* Secure Cycle Counter is not disabled in MDCR_EL3 when
* ARMv8.5-PMU is implemented.
* ----------------------------------------------------------
*/
mrs x0, scr_el3
tst x0, #SCR_NS_BIT
beq 2f
/* ----------------------------------------------------------
* Back to Non-secure state.
* Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
* failed, meaning that FEAT_PMUv3p5/7 is not implemented and
* PMCR_EL0 should be restored from non-secure context.
* ----------------------------------------------------------
*/
mov_imm x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
mrs x0, mdcr_el3
tst x0, x1
bne 2f
ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
msr pmcr_el0, x0
2:
ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
msr sp_el0, x28
ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
ret
endfunc restore_gp_pmcr_pauth_regs
/*
* In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
* registers and update EL1 registers to disable stage1 and stage2
* page table walk
*/
func save_and_update_ptw_el1_sys_regs
/* ----------------------------------------------------------
* Save only sctlr_el1 and tcr_el1 registers
* ----------------------------------------------------------
*/
mrs x29, sctlr_el1
str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
mrs x29, tcr_el1
str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
/* ------------------------------------------------------------
* Must follow below order in order to disable page table
* walk for lower ELs (EL1 and EL0). First step ensures that
* page table walk is disabled for stage1 and second step
* ensures that page table walker should use TCR_EL1.EPDx
* bits to perform address translation. ISB ensures that CPU
* does these 2 steps in order.
*
* 1. Update TCR_EL1.EPDx bits to disable page table walk by
* stage1.
* 2. Enable MMU bit to avoid identity mapping via stage2
* and force TCR_EL1.EPDx to be used by the page table
* walker.
* ------------------------------------------------------------
*/
orr x29, x29, #(TCR_EPD0_BIT)
orr x29, x29, #(TCR_EPD1_BIT)
msr tcr_el1, x29
isb
mrs x29, sctlr_el1
orr x29, x29, #SCTLR_M_BIT
msr sctlr_el1, x29
isb
ret
endfunc save_and_update_ptw_el1_sys_regs
/* ------------------------------------------------------------------
* This routine assumes that the SP_EL3 is pointing to a valid
* context structure from where the gp regs and other special
* registers can be retrieved.
* ------------------------------------------------------------------
*/
func el3_exit
#if ENABLE_ASSERTIONS
/* el3_exit assumes SP_EL0 on entry */
mrs x17, spsel
cmp x17, #MODE_SP_EL0
ASM_ASSERT(eq)
#endif /* ENABLE_ASSERTIONS */
/* ----------------------------------------------------------
* Save the current SP_EL0 i.e. the EL3 runtime stack which
* will be used for handling the next SMC.
* Then switch to SP_EL3.
* ----------------------------------------------------------
*/
mov x17, sp
msr spsel, #MODE_SP_ELX
str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
/* ----------------------------------------------------------
* Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
* ----------------------------------------------------------
*/
ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
msr scr_el3, x18
msr spsr_el3, x16
msr elr_el3, x17
#if IMAGE_BL31
/* ----------------------------------------------------------
* Restore CPTR_EL3.
* ZCR is only restored if SVE is supported and enabled.
* Synchronization is required before zcr_el3 is addressed.
* ----------------------------------------------------------
*/
ldp x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
msr cptr_el3, x19
ands x19, x19, #CPTR_EZ_BIT
beq sve_not_enabled
isb
msr S3_6_C1_C2_0, x20 /* zcr_el3 */
sve_not_enabled:
#endif /* IMAGE_BL31 */
#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
/* ----------------------------------------------------------
* Restore mitigation state as it was on entry to EL3
* ----------------------------------------------------------
*/
ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
cbz x17, 1f
blr x17
1:
#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
restore_ptw_el1_sys_regs
/* ----------------------------------------------------------
* Restore general purpose (including x30), PMCR_EL0 and
* ARMv8.3-PAuth registers.
* Exit EL3 via ERET to a lower exception level.
* ----------------------------------------------------------
*/
bl restore_gp_pmcr_pauth_regs
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
#if IMAGE_BL31 && RAS_EXTENSION
/* ----------------------------------------------------------
* Issue Error Synchronization Barrier to synchronize SErrors
* before exiting EL3. We're running with EAs unmasked, so
* any synchronized errors would be taken immediately;
* therefore no need to inspect DISR_EL1 register.
* ----------------------------------------------------------
*/
esb
#else
dsb sy
#endif /* IMAGE_BL31 && RAS_EXTENSION */
#ifdef IMAGE_BL31
str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
#endif /* IMAGE_BL31 */
exception_return
endfunc el3_exit
|