1 /*
2  * Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef SMCCC_HELPERS_H
8 #define SMCCC_HELPERS_H
9 
10 #include <lib/smccc.h>
11 
12 /* These are offsets to registers in smc_ctx_t */
13 #define SMC_CTX_GPREG_R0	U(0x0)
14 #define SMC_CTX_GPREG_R1	U(0x4)
15 #define SMC_CTX_GPREG_R2	U(0x8)
16 #define SMC_CTX_GPREG_R3	U(0xC)
17 #define SMC_CTX_GPREG_R4	U(0x10)
18 #define SMC_CTX_GPREG_R5	U(0x14)
19 #define SMC_CTX_SP_USR		U(0x34)
20 #define SMC_CTX_SPSR_MON	U(0x78)
21 #define SMC_CTX_SP_MON		U(0x7C)
22 #define SMC_CTX_LR_MON		U(0x80)
23 #define SMC_CTX_SCR		U(0x84)
24 #define SMC_CTX_PMCR		U(0x88)
25 #define SMC_CTX_SIZE		U(0x90)
26 
27 #ifndef __ASSEMBLER__
28 
29 #include <stdint.h>
30 
31 #include <lib/cassert.h>
32 
33 /*
34  * The generic structure to save arguments and callee saved registers during
35  * an SMC. Also this structure is used to store the result return values after
36  * the completion of SMC service.
37  */
38 typedef struct smc_ctx {
39 	u_register_t r0;
40 	u_register_t r1;
41 	u_register_t r2;
42 	u_register_t r3;
43 	u_register_t r4;
44 	u_register_t r5;
45 	u_register_t r6;
46 	u_register_t r7;
47 	u_register_t r8;
48 	u_register_t r9;
49 	u_register_t r10;
50 	u_register_t r11;
51 	u_register_t r12;
52 	/* spsr_usr doesn't exist */
53 	u_register_t sp_usr;
54 	u_register_t lr_usr;
55 	u_register_t spsr_irq;
56 	u_register_t sp_irq;
57 	u_register_t lr_irq;
58 	u_register_t spsr_fiq;
59 	u_register_t sp_fiq;
60 	u_register_t lr_fiq;
61 	u_register_t spsr_svc;
62 	u_register_t sp_svc;
63 	u_register_t lr_svc;
64 	u_register_t spsr_abt;
65 	u_register_t sp_abt;
66 	u_register_t lr_abt;
67 	u_register_t spsr_und;
68 	u_register_t sp_und;
69 	u_register_t lr_und;
70 	u_register_t spsr_mon;
71 	/*
72 	 * `sp_mon` will point to the C runtime stack in monitor mode. But prior
73 	 * to exit from SMC, this will point to the `smc_ctx_t` so that
74 	 * on next entry due to SMC, the `smc_ctx_t` can be easily accessed.
75 	 */
76 	u_register_t sp_mon;
77 	u_register_t lr_mon;
78 	u_register_t scr;
79 	u_register_t pmcr;
80 	/*
81 	 * The workaround for CVE-2017-5715 requires storing information in
82 	 * the bottom 3 bits of the stack pointer.  Add a padding field to
83 	 * force the size of the struct to be a multiple of 8.
84 	 */
85 	u_register_t pad;
86 } smc_ctx_t __aligned(8);
87 
88 /*
89  * Compile time assertions related to the 'smc_context' structure to
90  * ensure that the assembler and the compiler view of the offsets of
91  * the structure members is the same.
92  */
93 CASSERT(SMC_CTX_GPREG_R0 == __builtin_offsetof(smc_ctx_t, r0), \
94 	assert_smc_ctx_greg_r0_offset_mismatch);
95 CASSERT(SMC_CTX_GPREG_R1 == __builtin_offsetof(smc_ctx_t, r1), \
96 	assert_smc_ctx_greg_r1_offset_mismatch);
97 CASSERT(SMC_CTX_GPREG_R2 == __builtin_offsetof(smc_ctx_t, r2), \
98 	assert_smc_ctx_greg_r2_offset_mismatch);
99 CASSERT(SMC_CTX_GPREG_R3 == __builtin_offsetof(smc_ctx_t, r3), \
100 	assert_smc_ctx_greg_r3_offset_mismatch);
101 CASSERT(SMC_CTX_GPREG_R4 == __builtin_offsetof(smc_ctx_t, r4), \
102 	assert_smc_ctx_greg_r4_offset_mismatch);
103 CASSERT(SMC_CTX_SP_USR == __builtin_offsetof(smc_ctx_t, sp_usr), \
104 	assert_smc_ctx_sp_usr_offset_mismatch);
105 CASSERT(SMC_CTX_LR_MON == __builtin_offsetof(smc_ctx_t, lr_mon), \
106 	assert_smc_ctx_lr_mon_offset_mismatch);
107 CASSERT(SMC_CTX_SPSR_MON == __builtin_offsetof(smc_ctx_t, spsr_mon), \
108 	assert_smc_ctx_spsr_mon_offset_mismatch);
109 
110 CASSERT((sizeof(smc_ctx_t) & 0x7U) == 0U, assert_smc_ctx_not_aligned);
111 CASSERT(SMC_CTX_SIZE == sizeof(smc_ctx_t), assert_smc_ctx_size_mismatch);
112 
113 /* Convenience macros to return from SMC handler */
114 #define SMC_RET0(_h) {				\
115 	return (uintptr_t)(_h);			\
116 }
117 #define SMC_RET1(_h, _r0) {			\
118 	((smc_ctx_t *)(_h))->r0 = (_r0);	\
119 	SMC_RET0(_h);				\
120 }
121 #define SMC_RET2(_h, _r0, _r1) {		\
122 	((smc_ctx_t *)(_h))->r1 = (_r1);	\
123 	SMC_RET1(_h, (_r0));			\
124 }
125 #define SMC_RET3(_h, _r0, _r1, _r2) {		\
126 	((smc_ctx_t *)(_h))->r2 = (_r2);	\
127 	SMC_RET2(_h, (_r0), (_r1));		\
128 }
129 #define SMC_RET4(_h, _r0, _r1, _r2, _r3) {	\
130 	((smc_ctx_t *)(_h))->r3 = (_r3);	\
131 	SMC_RET3(_h, (_r0), (_r1), (_r2));	\
132 }
133 #define SMC_RET5(_h, _r0, _r1, _r2, _r3, _r4) {	\
134 	((smc_ctx_t *)(_h))->r4 = (_r4);	\
135 	SMC_RET4(_h, (_r0), (_r1), (_r2), (_r3));	\
136 }
137 #define SMC_RET6(_h, _r0, _r1, _r2, _r3, _r4, _r5) {	\
138 	((smc_ctx_t *)(_h))->r5 = (_r5);	\
139 	SMC_RET5(_h, (_r0), (_r1), (_r2), (_r3), (_r4));	\
140 }
141 #define SMC_RET7(_h, _r0, _r1, _r2, _r3, _r4, _r5, _r6) {	\
142 	((smc_ctx_t *)(_h))->r6 = (_r6);	\
143 	SMC_RET6(_h, (_r0), (_r1), (_r2), (_r3), (_r4), (_r5));	\
144 }
145 #define SMC_RET8(_h, _r0, _r1, _r2, _r3, _r4, _r5, _r6, _r7) {	\
146 	((smc_ctx_t *)(_h))->r7 = (_r7);	\
147 	SMC_RET7(_h, (_r0), (_r1), (_r2), (_r3), (_r4), (_r5), (_r6));	\
148 }
149 
150 /*
151  * Helper macro to retrieve the SMC parameters from smc_ctx_t.
152  */
153 #define get_smc_params_from_ctx(_hdl, _r1, _r2, _r3, _r4) {	\
154 		_r1 = ((smc_ctx_t *)_hdl)->r1;		\
155 		_r2 = ((smc_ctx_t *)_hdl)->r2;		\
156 		_r3 = ((smc_ctx_t *)_hdl)->r3;		\
157 		_r4 = ((smc_ctx_t *)_hdl)->r4;		\
158 		}
159 
160 /* ------------------------------------------------------------------------
161  * Helper APIs for setting and retrieving appropriate `smc_ctx_t`.
162  * These functions need to implemented by the BL including this library.
163  * ------------------------------------------------------------------------
164  */
165 
166 /* Get the pointer to `smc_ctx_t` corresponding to the security state. */
167 void *smc_get_ctx(unsigned int security_state);
168 
169 /* Set the next `smc_ctx_t` corresponding to the security state. */
170 void smc_set_next_ctx(unsigned int security_state);
171 
172 /* Get the pointer to next `smc_ctx_t` already set by `smc_set_next_ctx()`. */
173 void *smc_get_next_ctx(void);
174 
175 #endif /*__ASSEMBLER__*/
176 
177 #endif /* SMCCC_HELPERS_H */
178