1/*
2 * Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <context.h>
11#include <el3_common_macros.S>
12
13#if CTX_INCLUDE_EL2_REGS
14	.global	el2_sysregs_context_save
15	.global	el2_sysregs_context_restore
16#endif
17
18	.global	el1_sysregs_context_save
19	.global	el1_sysregs_context_restore
20#if CTX_INCLUDE_FPREGS
21	.global	fpregs_context_save
22	.global	fpregs_context_restore
23#endif
24	.global	save_gp_pmcr_pauth_regs
25	.global	restore_gp_pmcr_pauth_regs
26	.global save_and_update_ptw_el1_sys_regs
27	.global	el3_exit
28
29#if CTX_INCLUDE_EL2_REGS
30
31/* -----------------------------------------------------
32 * The following function strictly follows the AArch64
33 * PCS to use x9-x16 (temporary caller-saved registers)
34 * to save EL2 system register context. It assumes that
35 * 'x0' is pointing to a 'el2_sys_regs' structure where
36 * the register context will be saved.
37 *
38 * The following registers are not added.
39 * AMEVCNTVOFF0<n>_EL2
40 * AMEVCNTVOFF1<n>_EL2
41 * ICH_AP0R<n>_EL2
42 * ICH_AP1R<n>_EL2
43 * ICH_LR<n>_EL2
44 * -----------------------------------------------------
45 */
46func el2_sysregs_context_save
47	mrs	x9, actlr_el2
48	mrs	x10, afsr0_el2
49	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
50
51	mrs	x11, afsr1_el2
52	mrs	x12, amair_el2
53	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
54
55	mrs	x13, cnthctl_el2
56	mrs	x14, cntvoff_el2
57	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
58
59	mrs	x15, cptr_el2
60	str	x15, [x0, #CTX_CPTR_EL2]
61
62#if CTX_INCLUDE_AARCH32_REGS
63	mrs	x16, dbgvcr32_el2
64	str	x16, [x0, #CTX_DBGVCR32_EL2]
65#endif
66
67	mrs	x9, elr_el2
68	mrs	x10, esr_el2
69	stp	x9, x10, [x0, #CTX_ELR_EL2]
70
71	mrs	x11, far_el2
72	mrs	x12, hacr_el2
73	stp	x11, x12, [x0, #CTX_FAR_EL2]
74
75	mrs	x13, hcr_el2
76	mrs	x14, hpfar_el2
77	stp	x13, x14, [x0, #CTX_HCR_EL2]
78
79	mrs	x15, hstr_el2
80	mrs	x16, ICC_SRE_EL2
81	stp	x15, x16, [x0, #CTX_HSTR_EL2]
82
83	mrs	x9, ICH_HCR_EL2
84	mrs	x10, ICH_VMCR_EL2
85	stp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
86
87	mrs	x11, mair_el2
88	mrs	x12, mdcr_el2
89	stp	x11, x12, [x0, #CTX_MAIR_EL2]
90
91#if ENABLE_SPE_FOR_LOWER_ELS
92	mrs	x13, PMSCR_EL2
93	str	x13, [x0, #CTX_PMSCR_EL2]
94#endif
95	mrs	x14, sctlr_el2
96	str	x14, [x0, #CTX_SCTLR_EL2]
97
98	mrs	x15, spsr_el2
99	mrs	x16, sp_el2
100	stp	x15, x16, [x0, #CTX_SPSR_EL2]
101
102	mrs	x9, tcr_el2
103	mrs	x10, tpidr_el2
104	stp	x9, x10, [x0, #CTX_TCR_EL2]
105
106	mrs	x11, ttbr0_el2
107	mrs	x12, vbar_el2
108	stp	x11, x12, [x0, #CTX_TTBR0_EL2]
109
110	mrs	x13, vmpidr_el2
111	mrs	x14, vpidr_el2
112	stp	x13, x14, [x0, #CTX_VMPIDR_EL2]
113
114	mrs	x15, vtcr_el2
115	mrs	x16, vttbr_el2
116	stp	x15, x16, [x0, #CTX_VTCR_EL2]
117
118#if CTX_INCLUDE_MTE_REGS
119	mrs	x9, TFSR_EL2
120	str	x9, [x0, #CTX_TFSR_EL2]
121#endif
122
123#if ENABLE_MPAM_FOR_LOWER_ELS
124	mrs	x10, MPAM2_EL2
125	str	x10, [x0, #CTX_MPAM2_EL2]
126
127	mrs	x11, MPAMHCR_EL2
128	mrs	x12, MPAMVPM0_EL2
129	stp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
130
131	mrs	x13, MPAMVPM1_EL2
132	mrs	x14, MPAMVPM2_EL2
133	stp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
134
135	mrs	x15, MPAMVPM3_EL2
136	mrs	x16, MPAMVPM4_EL2
137	stp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
138
139	mrs	x9, MPAMVPM5_EL2
140	mrs	x10, MPAMVPM6_EL2
141	stp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
142
143	mrs	x11, MPAMVPM7_EL2
144	mrs	x12, MPAMVPMV_EL2
145	stp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
146#endif
147
148#if ARM_ARCH_AT_LEAST(8, 6)
149	mrs	x13, HAFGRTR_EL2
150	mrs	x14, HDFGRTR_EL2
151	stp	x13, x14, [x0, #CTX_HAFGRTR_EL2]
152
153	mrs	x15, HDFGWTR_EL2
154	mrs	x16, HFGITR_EL2
155	stp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
156
157	mrs	x9, HFGRTR_EL2
158	mrs	x10, HFGWTR_EL2
159	stp	x9, x10, [x0, #CTX_HFGRTR_EL2]
160
161	mrs	x11, CNTPOFF_EL2
162	str	x11, [x0, #CTX_CNTPOFF_EL2]
163#endif
164
165#if ARM_ARCH_AT_LEAST(8, 4)
166	mrs	x12, contextidr_el2
167	str	x12, [x0, #CTX_CONTEXTIDR_EL2]
168
169#if CTX_INCLUDE_AARCH32_REGS
170	mrs	x13, sder32_el2
171	str	x13, [x0, #CTX_SDER32_EL2]
172#endif
173	mrs	x14, ttbr1_el2
174	mrs	x15, vdisr_el2
175	stp	x14, x15, [x0, #CTX_TTBR1_EL2]
176
177#if CTX_INCLUDE_NEVE_REGS
178	mrs	x16, vncr_el2
179	str	x16, [x0, #CTX_VNCR_EL2]
180#endif
181
182	mrs	x9, vsesr_el2
183	mrs	x10, vstcr_el2
184	stp	x9, x10, [x0, #CTX_VSESR_EL2]
185
186	mrs	x11, vsttbr_el2
187	mrs	x12, TRFCR_EL2
188	stp	x11, x12, [x0, #CTX_VSTTBR_EL2]
189#endif
190
191#if ARM_ARCH_AT_LEAST(8, 5)
192	mrs	x13, scxtnum_el2
193	str	x13, [x0, #CTX_SCXTNUM_EL2]
194#endif
195
196	ret
197endfunc el2_sysregs_context_save
198
199
200/* -----------------------------------------------------
201 * The following function strictly follows the AArch64
202 * PCS to use x9-x16 (temporary caller-saved registers)
203 * to restore EL2 system register context.  It assumes
204 * that 'x0' is pointing to a 'el2_sys_regs' structure
205 * from where the register context will be restored
206
207 * The following registers are not restored
208 * AMEVCNTVOFF0<n>_EL2
209 * AMEVCNTVOFF1<n>_EL2
210 * ICH_AP0R<n>_EL2
211 * ICH_AP1R<n>_EL2
212 * ICH_LR<n>_EL2
213 * -----------------------------------------------------
214 */
215func el2_sysregs_context_restore
216	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
217	msr	actlr_el2, x9
218	msr	afsr0_el2, x10
219
220	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
221	msr	afsr1_el2, x11
222	msr	amair_el2, x12
223
224	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
225	msr	cnthctl_el2, x13
226	msr	cntvoff_el2, x14
227
228	ldr	x15, [x0, #CTX_CPTR_EL2]
229	msr	cptr_el2, x15
230
231#if CTX_INCLUDE_AARCH32_REGS
232	ldr	x16, [x0, #CTX_DBGVCR32_EL2]
233	msr	dbgvcr32_el2, x16
234#endif
235
236	ldp	x9, x10, [x0, #CTX_ELR_EL2]
237	msr	elr_el2, x9
238	msr	esr_el2, x10
239
240	ldp	x11, x12, [x0, #CTX_FAR_EL2]
241	msr	far_el2, x11
242	msr	hacr_el2, x12
243
244	ldp	x13, x14, [x0, #CTX_HCR_EL2]
245	msr	hcr_el2, x13
246	msr	hpfar_el2, x14
247
248	ldp	x15, x16, [x0, #CTX_HSTR_EL2]
249	msr	hstr_el2, x15
250	msr	ICC_SRE_EL2, x16
251
252	ldp	x9, x10, [x0, #CTX_ICH_HCR_EL2]
253	msr	ICH_HCR_EL2, x9
254	msr	ICH_VMCR_EL2, x10
255
256	ldp	x11, x12, [x0, #CTX_MAIR_EL2]
257	msr	mair_el2, x11
258	msr	mdcr_el2, x12
259
260#if ENABLE_SPE_FOR_LOWER_ELS
261	ldr	x13, [x0, #CTX_PMSCR_EL2]
262	msr	PMSCR_EL2, x13
263#endif
264	ldr	x14, [x0, #CTX_SCTLR_EL2]
265	msr	sctlr_el2, x14
266
267	ldp	x15, x16, [x0, #CTX_SPSR_EL2]
268	msr	spsr_el2, x15
269	msr	sp_el2, x16
270
271	ldp	x9, x10, [x0, #CTX_TCR_EL2]
272	msr	tcr_el2, x9
273	msr	tpidr_el2, x10
274
275	ldp	x11, x12, [x0, #CTX_TTBR0_EL2]
276	msr	ttbr0_el2, x11
277	msr	vbar_el2, x12
278
279	ldp	x13, x14, [x0, #CTX_VMPIDR_EL2]
280	msr	vmpidr_el2, x13
281	msr	vpidr_el2, x14
282
283	ldp	x15, x16, [x0, #CTX_VTCR_EL2]
284	msr	vtcr_el2, x15
285	msr	vttbr_el2, x16
286
287#if CTX_INCLUDE_MTE_REGS
288	ldr	x9, [x0, #CTX_TFSR_EL2]
289	msr	TFSR_EL2, x9
290#endif
291
292#if ENABLE_MPAM_FOR_LOWER_ELS
293	ldr	x10, [x0, #CTX_MPAM2_EL2]
294	msr	MPAM2_EL2, x10
295
296	ldp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
297	msr	MPAMHCR_EL2, x11
298	msr	MPAMVPM0_EL2, x12
299
300	ldp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
301	msr	MPAMVPM1_EL2, x13
302	msr	MPAMVPM2_EL2, x14
303
304	ldp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
305	msr	MPAMVPM3_EL2, x15
306	msr	MPAMVPM4_EL2, x16
307
308	ldp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
309	msr	MPAMVPM5_EL2, x9
310	msr	MPAMVPM6_EL2, x10
311
312	ldp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
313	msr	MPAMVPM7_EL2, x11
314	msr	MPAMVPMV_EL2, x12
315#endif
316
317#if ARM_ARCH_AT_LEAST(8, 6)
318	ldp	x13, x14, [x0, #CTX_HAFGRTR_EL2]
319	msr	HAFGRTR_EL2, x13
320	msr	HDFGRTR_EL2, x14
321
322	ldp	x15, x16, [x0, #CTX_HDFGWTR_EL2]
323	msr	HDFGWTR_EL2, x15
324	msr	HFGITR_EL2, x16
325
326	ldp	x9, x10, [x0, #CTX_HFGRTR_EL2]
327	msr	HFGRTR_EL2, x9
328	msr	HFGWTR_EL2, x10
329
330	ldr	x11, [x0, #CTX_CNTPOFF_EL2]
331	msr	CNTPOFF_EL2, x11
332#endif
333
334#if ARM_ARCH_AT_LEAST(8, 4)
335	ldr	x12, [x0, #CTX_CONTEXTIDR_EL2]
336	msr	contextidr_el2, x12
337
338#if CTX_INCLUDE_AARCH32_REGS
339	ldr	x13, [x0, #CTX_SDER32_EL2]
340	msr	sder32_el2, x13
341#endif
342	ldp	x14, x15, [x0, #CTX_TTBR1_EL2]
343	msr	ttbr1_el2, x14
344	msr	vdisr_el2, x15
345
346#if CTX_INCLUDE_NEVE_REGS
347	ldr	x16, [x0, #CTX_VNCR_EL2]
348	msr	vncr_el2, x16
349#endif
350
351	ldp	x9, x10, [x0, #CTX_VSESR_EL2]
352	msr	vsesr_el2, x9
353	msr	vstcr_el2, x10
354
355	ldp	x11, x12, [x0, #CTX_VSTTBR_EL2]
356	msr	vsttbr_el2, x11
357	msr	TRFCR_EL2, x12
358#endif
359
360#if ARM_ARCH_AT_LEAST(8, 5)
361	ldr	x13, [x0, #CTX_SCXTNUM_EL2]
362	msr	scxtnum_el2, x13
363#endif
364
365	ret
366endfunc el2_sysregs_context_restore
367
368#endif /* CTX_INCLUDE_EL2_REGS */
369
370/* ------------------------------------------------------------------
371 * The following function strictly follows the AArch64 PCS to use
372 * x9-x17 (temporary caller-saved registers) to save EL1 system
373 * register context. It assumes that 'x0' is pointing to a
374 * 'el1_sys_regs' structure where the register context will be saved.
375 * ------------------------------------------------------------------
376 */
377func el1_sysregs_context_save
378
379	mrs	x9, spsr_el1
380	mrs	x10, elr_el1
381	stp	x9, x10, [x0, #CTX_SPSR_EL1]
382
383#if !ERRATA_SPECULATIVE_AT
384	mrs	x15, sctlr_el1
385	mrs	x16, tcr_el1
386	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
387#endif
388
389	mrs	x17, cpacr_el1
390	mrs	x9, csselr_el1
391	stp	x17, x9, [x0, #CTX_CPACR_EL1]
392
393	mrs	x10, sp_el1
394	mrs	x11, esr_el1
395	stp	x10, x11, [x0, #CTX_SP_EL1]
396
397	mrs	x12, ttbr0_el1
398	mrs	x13, ttbr1_el1
399	stp	x12, x13, [x0, #CTX_TTBR0_EL1]
400
401	mrs	x14, mair_el1
402	mrs	x15, amair_el1
403	stp	x14, x15, [x0, #CTX_MAIR_EL1]
404
405	mrs	x16, actlr_el1
406	mrs	x17, tpidr_el1
407	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
408
409	mrs	x9, tpidr_el0
410	mrs	x10, tpidrro_el0
411	stp	x9, x10, [x0, #CTX_TPIDR_EL0]
412
413	mrs	x13, par_el1
414	mrs	x14, far_el1
415	stp	x13, x14, [x0, #CTX_PAR_EL1]
416
417	mrs	x15, afsr0_el1
418	mrs	x16, afsr1_el1
419	stp	x15, x16, [x0, #CTX_AFSR0_EL1]
420
421	mrs	x17, contextidr_el1
422	mrs	x9, vbar_el1
423	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
424
425	/* Save AArch32 system registers if the build has instructed so */
426#if CTX_INCLUDE_AARCH32_REGS
427	mrs	x11, spsr_abt
428	mrs	x12, spsr_und
429	stp	x11, x12, [x0, #CTX_SPSR_ABT]
430
431	mrs	x13, spsr_irq
432	mrs	x14, spsr_fiq
433	stp	x13, x14, [x0, #CTX_SPSR_IRQ]
434
435	mrs	x15, dacr32_el2
436	mrs	x16, ifsr32_el2
437	stp	x15, x16, [x0, #CTX_DACR32_EL2]
438#endif
439
440	/* Save NS timer registers if the build has instructed so */
441#if NS_TIMER_SWITCH
442	mrs	x10, cntp_ctl_el0
443	mrs	x11, cntp_cval_el0
444	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
445
446	mrs	x12, cntv_ctl_el0
447	mrs	x13, cntv_cval_el0
448	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
449
450	mrs	x14, cntkctl_el1
451	str	x14, [x0, #CTX_CNTKCTL_EL1]
452#endif
453
454	/* Save MTE system registers if the build has instructed so */
455#if CTX_INCLUDE_MTE_REGS
456	mrs	x15, TFSRE0_EL1
457	mrs	x16, TFSR_EL1
458	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]
459
460	mrs	x9, RGSR_EL1
461	mrs	x10, GCR_EL1
462	stp	x9, x10, [x0, #CTX_RGSR_EL1]
463#endif
464
465	ret
466endfunc el1_sysregs_context_save
467
468/* ------------------------------------------------------------------
469 * The following function strictly follows the AArch64 PCS to use
470 * x9-x17 (temporary caller-saved registers) to restore EL1 system
471 * register context.  It assumes that 'x0' is pointing to a
472 * 'el1_sys_regs' structure from where the register context will be
473 * restored
474 * ------------------------------------------------------------------
475 */
476func el1_sysregs_context_restore
477
478	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
479	msr	spsr_el1, x9
480	msr	elr_el1, x10
481
482#if !ERRATA_SPECULATIVE_AT
483	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
484	msr	sctlr_el1, x15
485	msr	tcr_el1, x16
486#endif
487
488	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
489	msr	cpacr_el1, x17
490	msr	csselr_el1, x9
491
492	ldp	x10, x11, [x0, #CTX_SP_EL1]
493	msr	sp_el1, x10
494	msr	esr_el1, x11
495
496	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
497	msr	ttbr0_el1, x12
498	msr	ttbr1_el1, x13
499
500	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
501	msr	mair_el1, x14
502	msr	amair_el1, x15
503
504	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
505	msr	actlr_el1, x16
506	msr	tpidr_el1, x17
507
508	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
509	msr	tpidr_el0, x9
510	msr	tpidrro_el0, x10
511
512	ldp	x13, x14, [x0, #CTX_PAR_EL1]
513	msr	par_el1, x13
514	msr	far_el1, x14
515
516	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
517	msr	afsr0_el1, x15
518	msr	afsr1_el1, x16
519
520	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
521	msr	contextidr_el1, x17
522	msr	vbar_el1, x9
523
524	/* Restore AArch32 system registers if the build has instructed so */
525#if CTX_INCLUDE_AARCH32_REGS
526	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
527	msr	spsr_abt, x11
528	msr	spsr_und, x12
529
530	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
531	msr	spsr_irq, x13
532	msr	spsr_fiq, x14
533
534	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
535	msr	dacr32_el2, x15
536	msr	ifsr32_el2, x16
537#endif
538	/* Restore NS timer registers if the build has instructed so */
539#if NS_TIMER_SWITCH
540	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
541	msr	cntp_ctl_el0, x10
542	msr	cntp_cval_el0, x11
543
544	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
545	msr	cntv_ctl_el0, x12
546	msr	cntv_cval_el0, x13
547
548	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
549	msr	cntkctl_el1, x14
550#endif
551	/* Restore MTE system registers if the build has instructed so */
552#if CTX_INCLUDE_MTE_REGS
553	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
554	msr	TFSRE0_EL1, x11
555	msr	TFSR_EL1, x12
556
557	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
558	msr	RGSR_EL1, x13
559	msr	GCR_EL1, x14
560#endif
561
562	/* No explict ISB required here as ERET covers it */
563	ret
564endfunc el1_sysregs_context_restore
565
566/* ------------------------------------------------------------------
567 * The following function follows the aapcs_64 strictly to use
568 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
569 * to save floating point register context. It assumes that 'x0' is
570 * pointing to a 'fp_regs' structure where the register context will
571 * be saved.
572 *
573 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
574 * However currently we don't use VFP registers nor set traps in
575 * Trusted Firmware, and assume it's cleared.
576 *
577 * TODO: Revisit when VFP is used in secure world
578 * ------------------------------------------------------------------
579 */
580#if CTX_INCLUDE_FPREGS
581func fpregs_context_save
582	stp	q0, q1, [x0, #CTX_FP_Q0]
583	stp	q2, q3, [x0, #CTX_FP_Q2]
584	stp	q4, q5, [x0, #CTX_FP_Q4]
585	stp	q6, q7, [x0, #CTX_FP_Q6]
586	stp	q8, q9, [x0, #CTX_FP_Q8]
587	stp	q10, q11, [x0, #CTX_FP_Q10]
588	stp	q12, q13, [x0, #CTX_FP_Q12]
589	stp	q14, q15, [x0, #CTX_FP_Q14]
590	stp	q16, q17, [x0, #CTX_FP_Q16]
591	stp	q18, q19, [x0, #CTX_FP_Q18]
592	stp	q20, q21, [x0, #CTX_FP_Q20]
593	stp	q22, q23, [x0, #CTX_FP_Q22]
594	stp	q24, q25, [x0, #CTX_FP_Q24]
595	stp	q26, q27, [x0, #CTX_FP_Q26]
596	stp	q28, q29, [x0, #CTX_FP_Q28]
597	stp	q30, q31, [x0, #CTX_FP_Q30]
598
599	mrs	x9, fpsr
600	str	x9, [x0, #CTX_FP_FPSR]
601
602	mrs	x10, fpcr
603	str	x10, [x0, #CTX_FP_FPCR]
604
605#if CTX_INCLUDE_AARCH32_REGS
606	mrs	x11, fpexc32_el2
607	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
608#endif
609	ret
610endfunc fpregs_context_save
611
612/* ------------------------------------------------------------------
613 * The following function follows the aapcs_64 strictly to use x9-x17
614 * (temporary caller-saved registers according to AArch64 PCS) to
615 * restore floating point register context. It assumes that 'x0' is
616 * pointing to a 'fp_regs' structure from where the register context
617 * will be restored.
618 *
619 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
620 * However currently we don't use VFP registers nor set traps in
621 * Trusted Firmware, and assume it's cleared.
622 *
623 * TODO: Revisit when VFP is used in secure world
624 * ------------------------------------------------------------------
625 */
626func fpregs_context_restore
627	ldp	q0, q1, [x0, #CTX_FP_Q0]
628	ldp	q2, q3, [x0, #CTX_FP_Q2]
629	ldp	q4, q5, [x0, #CTX_FP_Q4]
630	ldp	q6, q7, [x0, #CTX_FP_Q6]
631	ldp	q8, q9, [x0, #CTX_FP_Q8]
632	ldp	q10, q11, [x0, #CTX_FP_Q10]
633	ldp	q12, q13, [x0, #CTX_FP_Q12]
634	ldp	q14, q15, [x0, #CTX_FP_Q14]
635	ldp	q16, q17, [x0, #CTX_FP_Q16]
636	ldp	q18, q19, [x0, #CTX_FP_Q18]
637	ldp	q20, q21, [x0, #CTX_FP_Q20]
638	ldp	q22, q23, [x0, #CTX_FP_Q22]
639	ldp	q24, q25, [x0, #CTX_FP_Q24]
640	ldp	q26, q27, [x0, #CTX_FP_Q26]
641	ldp	q28, q29, [x0, #CTX_FP_Q28]
642	ldp	q30, q31, [x0, #CTX_FP_Q30]
643
644	ldr	x9, [x0, #CTX_FP_FPSR]
645	msr	fpsr, x9
646
647	ldr	x10, [x0, #CTX_FP_FPCR]
648	msr	fpcr, x10
649
650#if CTX_INCLUDE_AARCH32_REGS
651	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
652	msr	fpexc32_el2, x11
653#endif
654	/*
655	 * No explict ISB required here as ERET to
656	 * switch to secure EL1 or non-secure world
657	 * covers it
658	 */
659
660	ret
661endfunc fpregs_context_restore
662#endif /* CTX_INCLUDE_FPREGS */
663
664/* ------------------------------------------------------------------
665 * The following function is used to save and restore all the general
666 * purpose and ARMv8.3-PAuth (if enabled) registers.
667 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
668 * when ARMv8.5-PMU is implemented, and if called from Non-secure
669 * state saves PMCR_EL0 and disables Cycle Counter.
670 *
671 * Ideally we would only save and restore the callee saved registers
672 * when a world switch occurs but that type of implementation is more
673 * complex. So currently we will always save and restore these
674 * registers on entry and exit of EL3.
675 * These are not macros to ensure their invocation fits within the 32
676 * instructions per exception vector.
677 * clobbers: x18
678 * ------------------------------------------------------------------
679 */
680func save_gp_pmcr_pauth_regs
681	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
682	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
683	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
684	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
685	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
686	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
687	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
688	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
689	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
690	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
691	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
692	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
693	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
694	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
695	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
696	mrs	x18, sp_el0
697	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
698
699	/* ----------------------------------------------------------
700	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
701	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
702	 * PMCR_EL0 should be saved in non-secure context.
703	 * ----------------------------------------------------------
704	 */
705	mov_imm	x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
706	mrs	x9, mdcr_el3
707	tst	x9, x10
708	bne	1f
709
710	/* Secure Cycle Counter is not disabled */
711	mrs	x9, pmcr_el0
712
713	/* Check caller's security state */
714	mrs	x10, scr_el3
715	tst	x10, #SCR_NS_BIT
716	beq	2f
717
718	/* Save PMCR_EL0 if called from Non-secure state */
719	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
720
721	/* Disable cycle counter when event counting is prohibited */
7222:	orr	x9, x9, #PMCR_EL0_DP_BIT
723	msr	pmcr_el0, x9
724	isb
7251:
726#if CTX_INCLUDE_PAUTH_REGS
727	/* ----------------------------------------------------------
728 	 * Save the ARMv8.3-PAuth keys as they are not banked
729 	 * by exception level
730	 * ----------------------------------------------------------
731	 */
732	add	x19, sp, #CTX_PAUTH_REGS_OFFSET
733
734	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
735	mrs	x21, APIAKeyHi_EL1
736	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
737	mrs	x23, APIBKeyHi_EL1
738	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
739	mrs	x25, APDAKeyHi_EL1
740	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
741	mrs	x27, APDBKeyHi_EL1
742	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
743	mrs	x29, APGAKeyHi_EL1
744
745	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
746	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
747	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
748	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
749	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
750#endif /* CTX_INCLUDE_PAUTH_REGS */
751
752	ret
753endfunc save_gp_pmcr_pauth_regs
754
755/* ------------------------------------------------------------------
756 * This function restores ARMv8.3-PAuth (if enabled) and all general
757 * purpose registers except x30 from the CPU context.
758 * x30 register must be explicitly restored by the caller.
759 * ------------------------------------------------------------------
760 */
761func restore_gp_pmcr_pauth_regs
762#if CTX_INCLUDE_PAUTH_REGS
763 	/* Restore the ARMv8.3 PAuth keys */
764	add	x10, sp, #CTX_PAUTH_REGS_OFFSET
765
766	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
767	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
768	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
769	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
770	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */
771
772	msr	APIAKeyLo_EL1, x0
773	msr	APIAKeyHi_EL1, x1
774	msr	APIBKeyLo_EL1, x2
775	msr	APIBKeyHi_EL1, x3
776	msr	APDAKeyLo_EL1, x4
777	msr	APDAKeyHi_EL1, x5
778	msr	APDBKeyLo_EL1, x6
779	msr	APDBKeyHi_EL1, x7
780	msr	APGAKeyLo_EL1, x8
781	msr	APGAKeyHi_EL1, x9
782#endif /* CTX_INCLUDE_PAUTH_REGS */
783
784	/* ----------------------------------------------------------
785	 * Restore PMCR_EL0 when returning to Non-secure state if
786	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
787	 * ARMv8.5-PMU is implemented.
788	 * ----------------------------------------------------------
789	 */
790	mrs	x0, scr_el3
791	tst	x0, #SCR_NS_BIT
792	beq	2f
793
794	/* ----------------------------------------------------------
795	 * Back to Non-secure state.
796	 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
797	 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
798	 * PMCR_EL0 should be restored from non-secure context.
799	 * ----------------------------------------------------------
800	 */
801	mov_imm	x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
802	mrs	x0, mdcr_el3
803	tst	x0, x1
804	bne	2f
805	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
806	msr	pmcr_el0, x0
8072:
808	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
809	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
810	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
811	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
812	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
813	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
814	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
815	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
816	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
817	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
818	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
819	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
820	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
821	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
822	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
823	msr	sp_el0, x28
824	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
825	ret
826endfunc restore_gp_pmcr_pauth_regs
827
828/*
829 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
830 * registers and update EL1 registers to disable stage1 and stage2
831 * page table walk
832 */
833func save_and_update_ptw_el1_sys_regs
834	/* ----------------------------------------------------------
835	 * Save only sctlr_el1 and tcr_el1 registers
836	 * ----------------------------------------------------------
837	 */
838	mrs	x29, sctlr_el1
839	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
840	mrs	x29, tcr_el1
841	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
842
843	/* ------------------------------------------------------------
844	 * Must follow below order in order to disable page table
845	 * walk for lower ELs (EL1 and EL0). First step ensures that
846	 * page table walk is disabled for stage1 and second step
847	 * ensures that page table walker should use TCR_EL1.EPDx
848	 * bits to perform address translation. ISB ensures that CPU
849	 * does these 2 steps in order.
850	 *
851	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
852	 *    stage1.
853	 * 2. Enable MMU bit to avoid identity mapping via stage2
854	 *    and force TCR_EL1.EPDx to be used by the page table
855	 *    walker.
856	 * ------------------------------------------------------------
857	 */
858	orr	x29, x29, #(TCR_EPD0_BIT)
859	orr	x29, x29, #(TCR_EPD1_BIT)
860	msr	tcr_el1, x29
861	isb
862	mrs	x29, sctlr_el1
863	orr	x29, x29, #SCTLR_M_BIT
864	msr	sctlr_el1, x29
865	isb
866
867	ret
868endfunc save_and_update_ptw_el1_sys_regs
869
870/* ------------------------------------------------------------------
871 * This routine assumes that the SP_EL3 is pointing to a valid
872 * context structure from where the gp regs and other special
873 * registers can be retrieved.
874 * ------------------------------------------------------------------
875 */
876func el3_exit
877#if ENABLE_ASSERTIONS
878	/* el3_exit assumes SP_EL0 on entry */
879	mrs	x17, spsel
880	cmp	x17, #MODE_SP_EL0
881	ASM_ASSERT(eq)
882#endif
883
884	/* ----------------------------------------------------------
885	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
886	 * will be used for handling the next SMC.
887	 * Then switch to SP_EL3.
888	 * ----------------------------------------------------------
889	 */
890	mov	x17, sp
891	msr	spsel, #MODE_SP_ELX
892	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
893
894	/* ----------------------------------------------------------
895	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
896	 * ----------------------------------------------------------
897	 */
898	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
899	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
900	msr	scr_el3, x18
901	msr	spsr_el3, x16
902	msr	elr_el3, x17
903
904#if IMAGE_BL31
905	/* ----------------------------------------------------------
906	 * Restore CPTR_EL3.
907	 * ZCR is only restored if SVE is supported and enabled.
908	 * Synchronization is required before zcr_el3 is addressed.
909	 * ----------------------------------------------------------
910	 */
911	ldp	x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
912	msr	cptr_el3, x19
913
914	ands	x19, x19, #CPTR_EZ_BIT
915	beq	sve_not_enabled
916
917	isb
918	msr	S3_6_C1_C2_0, x20 /* zcr_el3 */
919sve_not_enabled:
920#endif
921
922#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
923	/* ----------------------------------------------------------
924	 * Restore mitigation state as it was on entry to EL3
925	 * ----------------------------------------------------------
926	 */
927	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
928	cbz	x17, 1f
929	blr	x17
9301:
931#endif
932	restore_ptw_el1_sys_regs
933
934	/* ----------------------------------------------------------
935	 * Restore general purpose (including x30), PMCR_EL0 and
936	 * ARMv8.3-PAuth registers.
937	 * Exit EL3 via ERET to a lower exception level.
938 	 * ----------------------------------------------------------
939 	 */
940	bl	restore_gp_pmcr_pauth_regs
941	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
942
943#if IMAGE_BL31 && RAS_EXTENSION
944	/* ----------------------------------------------------------
945	 * Issue Error Synchronization Barrier to synchronize SErrors
946	 * before exiting EL3. We're running with EAs unmasked, so
947	 * any synchronized errors would be taken immediately;
948	 * therefore no need to inspect DISR_EL1 register.
949 	 * ----------------------------------------------------------
950	 */
951	esb
952#else
953	dsb	sy
954#endif
955#ifdef IMAGE_BL31
956	str	xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
957#endif
958	exception_return
959
960endfunc el3_exit
961