1/*
2 * Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save
15	.global	el2_sysregs_context_restore
16#endif
17
18	.global	el1_sysregs_context_save
19	.global	el1_sysregs_context_restore
20#if CTX_INCLUDE_FPREGS
21	.global	fpregs_context_save
22	.global	fpregs_context_restore
23#endif
24	.global	save_gp_pmcr_pauth_regs
25	.global	restore_gp_pmcr_pauth_regs
26	.global save_and_update_ptw_el1_sys_regs
27	.global	el3_exit
28
29#if CTX_INCLUDE_EL2_REGS
30
31/* -----------------------------------------------------
32 * The following function strictly follows the AArch64
33 * PCS to use x9-x16 (temporary caller-saved registers)
34 * to save EL2 system register context. It assumes that
35 * 'x0' is pointing to a 'el2_sys_regs' structure where
36 * the register context will be saved.
37 *
38 * The following registers are not added.
39 * AMEVCNTVOFF0<n>_EL2
40 * AMEVCNTVOFF1<n>_EL2
41 * ICH_AP0R<n>_EL2
42 * ICH_AP1R<n>_EL2
43 * ICH_LR<n>_EL2
44 * -----------------------------------------------------
45 */
46func el2_sysregs_context_save
47	mrs	x9, actlr_el2
48	mrs	x10, afsr0_el2
49	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
50
51	mrs	x11, afsr1_el2
52	mrs	x12, amair_el2
53	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
54
55	mrs	x13, cnthctl_el2
56	mrs	x14, cntvoff_el2
57	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
58
59	mrs	x15, cptr_el2
60	str	x15, [x0, #CTX_CPTR_EL2]
61
62#if CTX_INCLUDE_AARCH32_REGS
63	mrs	x16, dbgvcr32_el2
64	str	x16, [x0, #CTX_DBGVCR32_EL2]
65#endif
66
67	mrs	x9, elr_el2
68	mrs	x10, esr_el2
69	stp	x9, x10, [x0, #CTX_ELR_EL2]
70
71	mrs	x11, far_el2
72	mrs	x12, hacr_el2
73	stp	x11, x12, [x0, #CTX_FAR_EL2]
74
75	mrs	x13, hcr_el2
76	mrs	x14, hpfar_el2
77	stp	x13, x14, [x0, #CTX_HCR_EL2]
78
79	mrs	x15, hstr_el2
80	mrs	x16, ICC_SRE_EL2
81	stp	x15, x16, [x0, #CTX_HSTR_EL2]
82
83	mrs	x9, ICH_HCR_EL2
84	mrs	x10, ICH_VMCR_EL2
85	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
86
87	mrs	x11, mair_el2
88	mrs	x12, mdcr_el2
89	stp	x11, x12, [x0, #CTX_MAIR_EL2]
90
91#if ENABLE_SPE_FOR_LOWER_ELS
92	mrs	x13, PMSCR_EL2
93	str	x13, [x0, #CTX_PMSCR_EL2]
94#endif
95	mrs	x14, sctlr_el2
96	str	x14, [x0, #CTX_SCTLR_EL2]
97
98	mrs	x15, spsr_el2
99	mrs	x16, sp_el2
100	stp	x15, x16, [x0, #CTX_SPSR_EL2]
101
102	mrs	x9, tcr_el2
103	mrs	x10, tpidr_el2
104	stp	x9, x10, [x0, #CTX_TCR_EL2]
105
106	mrs	x11, ttbr0_el2
107	mrs	x12, vbar_el2
108	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
109
110	mrs	x13, vmpidr_el2
111	mrs	x14, vpidr_el2
112	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
113
114	mrs	x15, vtcr_el2
115	mrs	x16, vttbr_el2
116	stp	x15, x16, [x0, #CTX_VTCR_EL2]
117
118#if CTX_INCLUDE_MTE_REGS
119	mrs	x9, TFSR_EL2
120	str	x9, [x0, #CTX_TFSR_EL2]
121#endif
122
123#if ENABLE_MPAM_FOR_LOWER_ELS
124	mrs	x10, MPAM2_EL2
125	str	x10, [x0, #CTX_MPAM2_EL2]
126
127	mrs	x11, MPAMHCR_EL2
128	mrs	x12, MPAMVPM0_EL2
129	stp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
130
131	mrs	x13, MPAMVPM1_EL2
132	mrs	x14, MPAMVPM2_EL2
133	stp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
134
135	mrs	x15, MPAMVPM3_EL2
136	mrs	x16, MPAMVPM4_EL2
137	stp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
138
139	mrs	x9, MPAMVPM5_EL2
140	mrs	x10, MPAMVPM6_EL2
141	stp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
142
143	mrs	x11, MPAMVPM7_EL2
144	mrs	x12, MPAMVPMV_EL2
145	stp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
146#endif
147
148#if ARM_ARCH_AT_LEAST(8, 6)
149	mrs	x13, HAFGRTR_EL2
150	mrs	x14, HDFGRTR_EL2
151	stp	x13, x14, [x0, #CTX_HAFGRTR_EL2]
152
153	mrs	x15, HDFGWTR_EL2
154	mrs	x16, HFGITR_EL2
155	stp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
156
157	mrs	x9, HFGRTR_EL2
158	mrs	x10, HFGWTR_EL2
159	stp	x9, x10, [x0, #CTX_HFGRTR_EL2]
160
161	mrs	x11, CNTPOFF_EL2
162	str	x11, [x0, #CTX_CNTPOFF_EL2]
163#endif
164
165#if ARM_ARCH_AT_LEAST(8, 4)
166	mrs	x12, contextidr_el2
167	str	x12, [x0, #CTX_CONTEXTIDR_EL2]
168
169#if CTX_INCLUDE_AARCH32_REGS
170	mrs	x13, sder32_el2
171	str	x13, [x0, #CTX_SDER32_EL2]
172#endif
173	mrs	x14, ttbr1_el2
174	mrs	x15, vdisr_el2
175	stp	x14, x15, [x0, #CTX_TTBR1_EL2]
176
177#if CTX_INCLUDE_NEVE_REGS
178	mrs	x16, vncr_el2
179	str	x16, [x0, #CTX_VNCR_EL2]
180#endif
181
182	mrs	x9, vsesr_el2
183	mrs	x10, vstcr_el2
184	stp	x9, x10, [x0, #CTX_VSESR_EL2]
185
186	mrs	x11, vsttbr_el2
187	mrs	x12, TRFCR_EL2
188	stp	x11, x12, [x0, #CTX_VSTTBR_EL2]
189#endif
190
191#if ARM_ARCH_AT_LEAST(8, 5)
192	mrs	x13, scxtnum_el2
193	str	x13, [x0, #CTX_SCXTNUM_EL2]
194#endif
195
196#if ENABLE_FEAT_HCX
197	mrs	x14, hcrx_el2
198	str	x14, [x0, #CTX_HCRX_EL2]
199#endif
200
201	ret
202endfunc el2_sysregs_context_save
203
204
205/* -----------------------------------------------------
206 * The following function strictly follows the AArch64
207 * PCS to use x9-x16 (temporary caller-saved registers)
208 * to restore EL2 system register context.  It assumes
209 * that 'x0' is pointing to a 'el2_sys_regs' structure
210 * from where the register context will be restored
211
212 * The following registers are not restored
213 * AMEVCNTVOFF0<n>_EL2
214 * AMEVCNTVOFF1<n>_EL2
215 * ICH_AP0R<n>_EL2
216 * ICH_AP1R<n>_EL2
217 * ICH_LR<n>_EL2
218 * -----------------------------------------------------
219 */
220func el2_sysregs_context_restore
221	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
222	msr	actlr_el2, x9
223	msr	afsr0_el2, x10
224
225	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
226	msr	afsr1_el2, x11
227	msr	amair_el2, x12
228
229	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
230	msr	cnthctl_el2, x13
231	msr	cntvoff_el2, x14
232
233	ldr	x15, [x0, #CTX_CPTR_EL2]
234	msr	cptr_el2, x15
235
236#if CTX_INCLUDE_AARCH32_REGS
237	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
238	msr	dbgvcr32_el2, x16
239#endif
240
241	ldp	x9, x10, [x0, #CTX_ELR_EL2]
242	msr	elr_el2, x9
243	msr	esr_el2, x10
244
245	ldp	x11, x12, [x0, #CTX_FAR_EL2]
246	msr	far_el2, x11
247	msr	hacr_el2, x12
248
249	ldp	x13, x14, [x0, #CTX_HCR_EL2]
250	msr	hcr_el2, x13
251	msr	hpfar_el2, x14
252
253	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
254	msr	hstr_el2, x15
255	msr	ICC_SRE_EL2, x16
256
257	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
258	msr	ICH_HCR_EL2, x9
259	msr	ICH_VMCR_EL2, x10
260
261	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
262	msr	mair_el2, x11
263	msr	mdcr_el2, x12
264
265#if ENABLE_SPE_FOR_LOWER_ELS
266	ldr	x13, [x0, #CTX_PMSCR_EL2]
267	msr	PMSCR_EL2, x13
268#endif
269	ldr	x14, [x0, #CTX_SCTLR_EL2]
270	msr	sctlr_el2, x14
271
272	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
273	msr	spsr_el2, x15
274	msr	sp_el2, x16
275
276	ldp	x9, x10, [x0, #CTX_TCR_EL2]
277	msr	tcr_el2, x9
278	msr	tpidr_el2, x10
279
280	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
281	msr	ttbr0_el2, x11
282	msr	vbar_el2, x12
283
284	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
285	msr	vmpidr_el2, x13
286	msr	vpidr_el2, x14
287
288	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
289	msr	vtcr_el2, x15
290	msr	vttbr_el2, x16
291
292#if CTX_INCLUDE_MTE_REGS
293	ldr	x9, [x0, #CTX_TFSR_EL2]
294	msr	TFSR_EL2, x9
295#endif
296
297#if ENABLE_MPAM_FOR_LOWER_ELS
298	ldr	x10, [x0, #CTX_MPAM2_EL2]
299	msr	MPAM2_EL2, x10
300
301	ldp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
302	msr	MPAMHCR_EL2, x11
303	msr	MPAMVPM0_EL2, x12
304
305	ldp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
306	msr	MPAMVPM1_EL2, x13
307	msr	MPAMVPM2_EL2, x14
308
309	ldp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
310	msr	MPAMVPM3_EL2, x15
311	msr	MPAMVPM4_EL2, x16
312
313	ldp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
314	msr	MPAMVPM5_EL2, x9
315	msr	MPAMVPM6_EL2, x10
316
317	ldp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
318	msr	MPAMVPM7_EL2, x11
319	msr	MPAMVPMV_EL2, x12
320#endif
321
322#if ARM_ARCH_AT_LEAST(8, 6)
323	ldp	x13, x14, [x0, #CTX_HAFGRTR_EL2]
324	msr	HAFGRTR_EL2, x13
325	msr	HDFGRTR_EL2, x14
326
327	ldp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
328	msr	HDFGWTR_EL2, x15
329	msr	HFGITR_EL2, x16
330
331	ldp	x9, x10, [x0, #CTX_HFGRTR_EL2]
332	msr	HFGRTR_EL2, x9
333	msr	HFGWTR_EL2, x10
334
335	ldr	x11, [x0, #CTX_CNTPOFF_EL2]
336	msr	CNTPOFF_EL2, x11
337#endif
338
339#if ARM_ARCH_AT_LEAST(8, 4)
340	ldr	x12, [x0, #CTX_CONTEXTIDR_EL2]
341	msr	contextidr_el2, x12
342
343#if CTX_INCLUDE_AARCH32_REGS
344	ldr	x13, [x0, #CTX_SDER32_EL2]
345	msr	sder32_el2, x13
346#endif
347	ldp	x14, x15, [x0, #CTX_TTBR1_EL2]
348	msr	ttbr1_el2, x14
349	msr	vdisr_el2, x15
350
351#if CTX_INCLUDE_NEVE_REGS
352	ldr	x16, [x0, #CTX_VNCR_EL2]
353	msr	vncr_el2, x16
354#endif
355
356	ldp	x9, x10, [x0, #CTX_VSESR_EL2]
357	msr	vsesr_el2, x9
358	msr	vstcr_el2, x10
359
360	ldp	x11, x12, [x0, #CTX_VSTTBR_EL2]
361	msr	vsttbr_el2, x11
362	msr	TRFCR_EL2, x12
363#endif
364
365#if ARM_ARCH_AT_LEAST(8, 5)
366	ldr	x13, [x0, #CTX_SCXTNUM_EL2]
367	msr	scxtnum_el2, x13
368#endif
369
370#if ENABLE_FEAT_HCX
371	ldr	x14, [x0, #CTX_HCRX_EL2]
372	msr	hcrx_el2, x14
373#endif
374
375	ret
376endfunc el2_sysregs_context_restore
377
378#endif /* CTX_INCLUDE_EL2_REGS */
379
380/* ------------------------------------------------------------------
381 * The following function strictly follows the AArch64 PCS to use
382 * x9-x17 (temporary caller-saved registers) to save EL1 system
383 * register context. It assumes that 'x0' is pointing to a
384 * 'el1_sys_regs' structure where the register context will be saved.
385 * ------------------------------------------------------------------
386 */
387func el1_sysregs_context_save
388
389	mrs	x9, spsr_el1
390	mrs	x10, elr_el1
391	stp	x9, x10, [x0, #CTX_SPSR_EL1]
392
393#if !ERRATA_SPECULATIVE_AT
394	mrs	x15, sctlr_el1
395	mrs	x16, tcr_el1
396	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
397#endif
398
399	mrs	x17, cpacr_el1
400	mrs	x9, csselr_el1
401	stp	x17, x9, [x0, #CTX_CPACR_EL1]
402
403	mrs	x10, sp_el1
404	mrs	x11, esr_el1
405	stp	x10, x11, [x0, #CTX_SP_EL1]
406
407	mrs	x12, ttbr0_el1
408	mrs	x13, ttbr1_el1
409	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
410
411	mrs	x14, mair_el1
412	mrs	x15, amair_el1
413	stp	x14, x15, [x0, #CTX_MAIR_EL1]
414
415	mrs	x16, actlr_el1
416	mrs	x17, tpidr_el1
417	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
418
419	mrs	x9, tpidr_el0
420	mrs	x10, tpidrro_el0
421	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
422
423	mrs	x13, par_el1
424	mrs	x14, far_el1
425	stp	x13, x14, [x0, #CTX_PAR_EL1]
426
427	mrs	x15, afsr0_el1
428	mrs	x16, afsr1_el1
429	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
430
431	mrs	x17, contextidr_el1
432	mrs	x9, vbar_el1
433	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
434
435	/* Save AArch32 system registers if the build has instructed so */
436#if CTX_INCLUDE_AARCH32_REGS
437	mrs	x11, spsr_abt
438	mrs	x12, spsr_und
439	stp	x11, x12, [x0, #CTX_SPSR_ABT]
440
441	mrs	x13, spsr_irq
442	mrs	x14, spsr_fiq
443	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
444
445	mrs	x15, dacr32_el2
446	mrs	x16, ifsr32_el2
447	stp	x15, x16, [x0, #CTX_DACR32_EL2]
448#endif
449
450	/* Save NS timer registers if the build has instructed so */
451#if NS_TIMER_SWITCH
452	mrs	x10, cntp_ctl_el0
453	mrs	x11, cntp_cval_el0
454	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
455
456	mrs	x12, cntv_ctl_el0
457	mrs	x13, cntv_cval_el0
458	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
459
460	mrs	x14, cntkctl_el1
461	str	x14, [x0, #CTX_CNTKCTL_EL1]
462#endif
463
464	/* Save MTE system registers if the build has instructed so */
465#if CTX_INCLUDE_MTE_REGS
466	mrs	x15, TFSRE0_EL1
467	mrs	x16, TFSR_EL1
468	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
469
470	mrs	x9, RGSR_EL1
471	mrs	x10, GCR_EL1
472	stp	x9, x10, [x0, #CTX_RGSR_EL1]
473#endif
474
475	ret
476endfunc el1_sysregs_context_save
477
478/* ------------------------------------------------------------------
479 * The following function strictly follows the AArch64 PCS to use
480 * x9-x17 (temporary caller-saved registers) to restore EL1 system
481 * register context.  It assumes that 'x0' is pointing to a
482 * 'el1_sys_regs' structure from where the register context will be
483 * restored
484 * ------------------------------------------------------------------
485 */
486func el1_sysregs_context_restore
487
488	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
489	msr	spsr_el1, x9
490	msr	elr_el1, x10
491
492#if !ERRATA_SPECULATIVE_AT
493	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
494	msr	sctlr_el1, x15
495	msr	tcr_el1, x16
496#endif
497
498	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
499	msr	cpacr_el1, x17
500	msr	csselr_el1, x9
501
502	ldp	x10, x11, [x0, #CTX_SP_EL1]
503	msr	sp_el1, x10
504	msr	esr_el1, x11
505
506	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
507	msr	ttbr0_el1, x12
508	msr	ttbr1_el1, x13
509
510	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
511	msr	mair_el1, x14
512	msr	amair_el1, x15
513
514	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
515	msr	actlr_el1, x16
516	msr	tpidr_el1, x17
517
518	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
519	msr	tpidr_el0, x9
520	msr	tpidrro_el0, x10
521
522	ldp	x13, x14, [x0, #CTX_PAR_EL1]
523	msr	par_el1, x13
524	msr	far_el1, x14
525
526	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
527	msr	afsr0_el1, x15
528	msr	afsr1_el1, x16
529
530	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
531	msr	contextidr_el1, x17
532	msr	vbar_el1, x9
533
534	/* Restore AArch32 system registers if the build has instructed so */
535#if CTX_INCLUDE_AARCH32_REGS
536	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
537	msr	spsr_abt, x11
538	msr	spsr_und, x12
539
540	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
541	msr	spsr_irq, x13
542	msr	spsr_fiq, x14
543
544	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
545	msr	dacr32_el2, x15
546	msr	ifsr32_el2, x16
547#endif
548	/* Restore NS timer registers if the build has instructed so */
549#if NS_TIMER_SWITCH
550	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
551	msr	cntp_ctl_el0, x10
552	msr	cntp_cval_el0, x11
553
554	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
555	msr	cntv_ctl_el0, x12
556	msr	cntv_cval_el0, x13
557
558	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
559	msr	cntkctl_el1, x14
560#endif
561	/* Restore MTE system registers if the build has instructed so */
562#if CTX_INCLUDE_MTE_REGS
563	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
564	msr	TFSRE0_EL1, x11
565	msr	TFSR_EL1, x12
566
567	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
568	msr	RGSR_EL1, x13
569	msr	GCR_EL1, x14
570#endif
571
572	/* No explict ISB required here as ERET covers it */
573	ret
574endfunc el1_sysregs_context_restore
575
576/* ------------------------------------------------------------------
577 * The following function follows the aapcs_64 strictly to use
578 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
579 * to save floating point register context. It assumes that 'x0' is
580 * pointing to a 'fp_regs' structure where the register context will
581 * be saved.
582 *
583 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
584 * However currently we don't use VFP registers nor set traps in
585 * Trusted Firmware, and assume it's cleared.
586 *
587 * TODO: Revisit when VFP is used in secure world
588 * ------------------------------------------------------------------
589 */
590#if CTX_INCLUDE_FPREGS
591func fpregs_context_save
592	stp	q0, q1, [x0, #CTX_FP_Q0]
593	stp	q2, q3, [x0, #CTX_FP_Q2]
594	stp	q4, q5, [x0, #CTX_FP_Q4]
595	stp	q6, q7, [x0, #CTX_FP_Q6]
596	stp	q8, q9, [x0, #CTX_FP_Q8]
597	stp	q10, q11, [x0, #CTX_FP_Q10]
598	stp	q12, q13, [x0, #CTX_FP_Q12]
599	stp	q14, q15, [x0, #CTX_FP_Q14]
600	stp	q16, q17, [x0, #CTX_FP_Q16]
601	stp	q18, q19, [x0, #CTX_FP_Q18]
602	stp	q20, q21, [x0, #CTX_FP_Q20]
603	stp	q22, q23, [x0, #CTX_FP_Q22]
604	stp	q24, q25, [x0, #CTX_FP_Q24]
605	stp	q26, q27, [x0, #CTX_FP_Q26]
606	stp	q28, q29, [x0, #CTX_FP_Q28]
607	stp	q30, q31, [x0, #CTX_FP_Q30]
608
609	mrs	x9, fpsr
610	str	x9, [x0, #CTX_FP_FPSR]
611
612	mrs	x10, fpcr
613	str	x10, [x0, #CTX_FP_FPCR]
614
615#if CTX_INCLUDE_AARCH32_REGS
616	mrs	x11, fpexc32_el2
617	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
618#endif
619	ret
620endfunc fpregs_context_save
621
622/* ------------------------------------------------------------------
623 * The following function follows the aapcs_64 strictly to use x9-x17
624 * (temporary caller-saved registers according to AArch64 PCS) to
625 * restore floating point register context. It assumes that 'x0' is
626 * pointing to a 'fp_regs' structure from where the register context
627 * will be restored.
628 *
629 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
630 * However currently we don't use VFP registers nor set traps in
631 * Trusted Firmware, and assume it's cleared.
632 *
633 * TODO: Revisit when VFP is used in secure world
634 * ------------------------------------------------------------------
635 */
636func fpregs_context_restore
637	ldp	q0, q1, [x0, #CTX_FP_Q0]
638	ldp	q2, q3, [x0, #CTX_FP_Q2]
639	ldp	q4, q5, [x0, #CTX_FP_Q4]
640	ldp	q6, q7, [x0, #CTX_FP_Q6]
641	ldp	q8, q9, [x0, #CTX_FP_Q8]
642	ldp	q10, q11, [x0, #CTX_FP_Q10]
643	ldp	q12, q13, [x0, #CTX_FP_Q12]
644	ldp	q14, q15, [x0, #CTX_FP_Q14]
645	ldp	q16, q17, [x0, #CTX_FP_Q16]
646	ldp	q18, q19, [x0, #CTX_FP_Q18]
647	ldp	q20, q21, [x0, #CTX_FP_Q20]
648	ldp	q22, q23, [x0, #CTX_FP_Q22]
649	ldp	q24, q25, [x0, #CTX_FP_Q24]
650	ldp	q26, q27, [x0, #CTX_FP_Q26]
651	ldp	q28, q29, [x0, #CTX_FP_Q28]
652	ldp	q30, q31, [x0, #CTX_FP_Q30]
653
654	ldr	x9, [x0, #CTX_FP_FPSR]
655	msr	fpsr, x9
656
657	ldr	x10, [x0, #CTX_FP_FPCR]
658	msr	fpcr, x10
659
660#if CTX_INCLUDE_AARCH32_REGS
661	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
662	msr	fpexc32_el2, x11
663#endif
664	/*
665	 * No explict ISB required here as ERET to
666	 * switch to secure EL1 or non-secure world
667	 * covers it
668	 */
669
670	ret
671endfunc fpregs_context_restore
672#endif /* CTX_INCLUDE_FPREGS */
673
674/* ------------------------------------------------------------------
675 * The following function is used to save and restore all the general
676 * purpose and ARMv8.3-PAuth (if enabled) registers.
677 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
678 * when ARMv8.5-PMU is implemented, and if called from Non-secure
679 * state saves PMCR_EL0 and disables Cycle Counter.
680 *
681 * Ideally we would only save and restore the callee saved registers
682 * when a world switch occurs but that type of implementation is more
683 * complex. So currently we will always save and restore these
684 * registers on entry and exit of EL3.
685 * These are not macros to ensure their invocation fits within the 32
686 * instructions per exception vector.
687 * clobbers: x18
688 * ------------------------------------------------------------------
689 */
690func save_gp_pmcr_pauth_regs
691	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
692	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
693	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
694	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
695	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
696	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
697	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
698	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
699	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
700	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
701	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
702	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
703	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
704	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
705	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
706	mrs	x18, sp_el0
707	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
708
709	/* ----------------------------------------------------------
710	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
711	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
712	 * PMCR_EL0 should be saved in non-secure context.
713	 * ----------------------------------------------------------
714	 */
715	mov_imm	x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
716	mrs	x9, mdcr_el3
717	tst	x9, x10
718	bne	1f
719
720	/* Secure Cycle Counter is not disabled */
721	mrs	x9, pmcr_el0
722
723	/* Check caller's security state */
724	mrs	x10, scr_el3
725	tst	x10, #SCR_NS_BIT
726	beq	2f
727
728	/* Save PMCR_EL0 if called from Non-secure state */
729	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
730
731	/* Disable cycle counter when event counting is prohibited */
7322:	orr	x9, x9, #PMCR_EL0_DP_BIT
733	msr	pmcr_el0, x9
734	isb
7351:
736#if CTX_INCLUDE_PAUTH_REGS
737	/* ----------------------------------------------------------
738 	 * Save the ARMv8.3-PAuth keys as they are not banked
739 	 * by exception level
740	 * ----------------------------------------------------------
741	 */
742	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
743
744	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
745	mrs	x21, APIAKeyHi_EL1
746	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
747	mrs	x23, APIBKeyHi_EL1
748	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
749	mrs	x25, APDAKeyHi_EL1
750	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
751	mrs	x27, APDBKeyHi_EL1
752	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
753	mrs	x29, APGAKeyHi_EL1
754
755	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
756	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
757	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
758	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
759	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
760#endif /* CTX_INCLUDE_PAUTH_REGS */
761
762	ret
763endfunc save_gp_pmcr_pauth_regs
764
765/* ------------------------------------------------------------------
766 * This function restores ARMv8.3-PAuth (if enabled) and all general
767 * purpose registers except x30 from the CPU context.
768 * x30 register must be explicitly restored by the caller.
769 * ------------------------------------------------------------------
770 */
771func restore_gp_pmcr_pauth_regs
772#if CTX_INCLUDE_PAUTH_REGS
773 	/* Restore the ARMv8.3 PAuth keys */
774	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
775
776	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
777	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
778	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
779	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
780	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
781
782	msr	APIAKeyLo_EL1, x0
783	msr	APIAKeyHi_EL1, x1
784	msr	APIBKeyLo_EL1, x2
785	msr	APIBKeyHi_EL1, x3
786	msr	APDAKeyLo_EL1, x4
787	msr	APDAKeyHi_EL1, x5
788	msr	APDBKeyLo_EL1, x6
789	msr	APDBKeyHi_EL1, x7
790	msr	APGAKeyLo_EL1, x8
791	msr	APGAKeyHi_EL1, x9
792#endif /* CTX_INCLUDE_PAUTH_REGS */
793
794	/* ----------------------------------------------------------
795	 * Restore PMCR_EL0 when returning to Non-secure state if
796	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
797	 * ARMv8.5-PMU is implemented.
798	 * ----------------------------------------------------------
799	 */
800	mrs	x0, scr_el3
801	tst	x0, #SCR_NS_BIT
802	beq	2f
803
804	/* ----------------------------------------------------------
805	 * Back to Non-secure state.
806	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
807	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
808	 * PMCR_EL0 should be restored from non-secure context.
809	 * ----------------------------------------------------------
810	 */
811	mov_imm	x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
812	mrs	x0, mdcr_el3
813	tst	x0, x1
814	bne	2f
815	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
816	msr	pmcr_el0, x0
8172:
818	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
819	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
820	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
821	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
822	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
823	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
824	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
825	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
826	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
827	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
828	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
829	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
830	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
831	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
832	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
833	msr	sp_el0, x28
834	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
835	ret
836endfunc restore_gp_pmcr_pauth_regs
837
838/*
839 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
840 * registers and update EL1 registers to disable stage1 and stage2
841 * page table walk
842 */
843func save_and_update_ptw_el1_sys_regs
844	/* ----------------------------------------------------------
845	 * Save only sctlr_el1 and tcr_el1 registers
846	 * ----------------------------------------------------------
847	 */
848	mrs	x29, sctlr_el1
849	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
850	mrs	x29, tcr_el1
851	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
852
853	/* ------------------------------------------------------------
854	 * Must follow below order in order to disable page table
855	 * walk for lower ELs (EL1 and EL0). First step ensures that
856	 * page table walk is disabled for stage1 and second step
857	 * ensures that page table walker should use TCR_EL1.EPDx
858	 * bits to perform address translation. ISB ensures that CPU
859	 * does these 2 steps in order.
860	 *
861	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
862	 *    stage1.
863	 * 2. Enable MMU bit to avoid identity mapping via stage2
864	 *    and force TCR_EL1.EPDx to be used by the page table
865	 *    walker.
866	 * ------------------------------------------------------------
867	 */
868	orr	x29, x29, #(TCR_EPD0_BIT)
869	orr	x29, x29, #(TCR_EPD1_BIT)
870	msr	tcr_el1, x29
871	isb
872	mrs	x29, sctlr_el1
873	orr	x29, x29, #SCTLR_M_BIT
874	msr	sctlr_el1, x29
875	isb
876
877	ret
878endfunc save_and_update_ptw_el1_sys_regs
879
880/* ------------------------------------------------------------------
881 * This routine assumes that the SP_EL3 is pointing to a valid
882 * context structure from where the gp regs and other special
883 * registers can be retrieved.
884 * ------------------------------------------------------------------
885 */
886func el3_exit
887#if ENABLE_ASSERTIONS
888	/* el3_exit assumes SP_EL0 on entry */
889	mrs	x17, spsel
890	cmp	x17, #MODE_SP_EL0
891	ASM_ASSERT(eq)
892#endif
893
894	/* ----------------------------------------------------------
895	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
896	 * will be used for handling the next SMC.
897	 * Then switch to SP_EL3.
898	 * ----------------------------------------------------------
899	 */
900	mov	x17, sp
901	msr	spsel, #MODE_SP_ELX
902	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
903
904	/* ----------------------------------------------------------
905	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
906	 * ----------------------------------------------------------
907	 */
908	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
909	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
910	msr	scr_el3, x18
911	msr	spsr_el3, x16
912	msr	elr_el3, x17
913
914#if IMAGE_BL31
915	/* ----------------------------------------------------------
916	 * Restore CPTR_EL3.
917	 * ZCR is only restored if SVE is supported and enabled.
918	 * Synchronization is required before zcr_el3 is addressed.
919	 * ----------------------------------------------------------
920	 */
921	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
922	msr	cptr_el3, x19
923
924	ands	x19, x19, #CPTR_EZ_BIT
925	beq	sve_not_enabled
926
927	isb
928	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
929sve_not_enabled:
930#endif
931
932#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
933	/* ----------------------------------------------------------
934	 * Restore mitigation state as it was on entry to EL3
935	 * ----------------------------------------------------------
936	 */
937	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
938	cbz	x17, 1f
939	blr	x17
9401:
941#endif
942	restore_ptw_el1_sys_regs
943
944	/* ----------------------------------------------------------
945	 * Restore general purpose (including x30), PMCR_EL0 and
946	 * ARMv8.3-PAuth registers.
947	 * Exit EL3 via ERET to a lower exception level.
948 	 * ----------------------------------------------------------
949 	 */
950	bl	restore_gp_pmcr_pauth_regs
951	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
952
953#if IMAGE_BL31 && RAS_EXTENSION
954	/* ----------------------------------------------------------
955	 * Issue Error Synchronization Barrier to synchronize SErrors
956	 * before exiting EL3. We're running with EAs unmasked, so
957	 * any synchronized errors would be taken immediately;
958	 * therefore no need to inspect DISR_EL1 register.
959 	 * ----------------------------------------------------------
960	 */
961	esb
962#else
963	dsb	sy
964#endif
965#ifdef IMAGE_BL31
966	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
967#endif
968	exception_return
969
970endfunc el3_exit
971