1 /*
2 * Copyright (c) 2016-2021, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7 #ifndef ARCH_HELPERS_H
8 #define ARCH_HELPERS_H
9
10 #include <cdefs.h>
11 #include <stdbool.h>
12 #include <stdint.h>
13 #include <string.h>
14
15 #include <arch.h>
16
17 /**********************************************************************
18 * Macros which create inline functions to read or write CPU system
19 * registers
20 *********************************************************************/
21
22 #define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \
23 static inline void write_## _name(u_register_t v) \
24 { \
25 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
26 }
27
28 #define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \
29 static inline u_register_t read_ ## _name(void) \
30 { \
31 u_register_t v; \
32 __asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\
33 return v; \
34 }
35
36 /*
37 * The undocumented %Q and %R extended asm are used to implemented the below
38 * 64 bit `mrrc` and `mcrr` instructions.
39 */
40
41 #define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm) \
42 static inline void write64_## _name(uint64_t v) \
43 { \
44 __asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\
45 }
46
47 #define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm) \
48 static inline uint64_t read64_## _name(void) \
49 { uint64_t v; \
50 __asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\
51 return v; \
52 }
53
54 #define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name) \
55 static inline u_register_t read_ ## _name(void) \
56 { \
57 u_register_t v; \
58 __asm__ volatile ("mrs %0, " #_reg_name : "=r" (v)); \
59 return v; \
60 }
61
62 #define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name) \
63 static inline void write_ ## _name(u_register_t v) \
64 { \
65 __asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v)); \
66 }
67
68 #define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name) \
69 static inline void write_ ## _name(const u_register_t v) \
70 { \
71 __asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v)); \
72 }
73
74 /* Define read function for coproc register */
75 #define DEFINE_COPROCR_READ_FUNC(_name, ...) \
76 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)
77
78 /* Define write function for coproc register */
79 #define DEFINE_COPROCR_WRITE_FUNC(_name, ...) \
80 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
81
82 /* Define read & write function for coproc register */
83 #define DEFINE_COPROCR_RW_FUNCS(_name, ...) \
84 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__) \
85 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
86
87 /* Define 64 bit read function for coproc register */
88 #define DEFINE_COPROCR_READ_FUNC_64(_name, ...) \
89 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)
90
91 /* Define 64 bit write function for coproc register */
92 #define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) \
93 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
94
95 /* Define 64 bit read & write function for coproc register */
96 #define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) \
97 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__) \
98 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
99
100 /* Define read & write function for system register */
101 #define DEFINE_SYSREG_RW_FUNCS(_name) \
102 _DEFINE_SYSREG_READ_FUNC(_name, _name) \
103 _DEFINE_SYSREG_WRITE_FUNC(_name, _name)
104
105 /**********************************************************************
106 * Macros to create inline functions for tlbi operations
107 *********************************************************************/
108
109 #define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
110 static inline void tlbi##_op(void) \
111 { \
112 u_register_t v = 0; \
113 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
114 }
115
116 #define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
117 static inline void bpi##_op(void) \
118 { \
119 u_register_t v = 0; \
120 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
121 }
122
123 #define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
124 static inline void tlbi##_op(u_register_t v) \
125 { \
126 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
127 }
128
129 /* Define function for simple TLBI operation */
130 #define DEFINE_TLBIOP_FUNC(_op, ...) \
131 _DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__)
132
133 /* Define function for TLBI operation with register parameter */
134 #define DEFINE_TLBIOP_PARAM_FUNC(_op, ...) \
135 _DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__)
136
137 /* Define function for simple BPI operation */
138 #define DEFINE_BPIOP_FUNC(_op, ...) \
139 _DEFINE_BPIOP_FUNC(_op, __VA_ARGS__)
140
141 /**********************************************************************
142 * Macros to create inline functions for DC operations
143 *********************************************************************/
144 #define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
145 static inline void dc##_op(u_register_t v) \
146 { \
147 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
148 }
149
150 /* Define function for DC operation with register parameter */
151 #define DEFINE_DCOP_PARAM_FUNC(_op, ...) \
152 _DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__)
153
154 /**********************************************************************
155 * Macros to create inline functions for system instructions
156 *********************************************************************/
157 /* Define function for simple system instruction */
158 #define DEFINE_SYSOP_FUNC(_op) \
159 static inline void _op(void) \
160 { \
161 __asm__ (#_op); \
162 }
163
164
165 /* Define function for system instruction with type specifier */
166 #define DEFINE_SYSOP_TYPE_FUNC(_op, _type) \
167 static inline void _op ## _type(void) \
168 { \
169 __asm__ (#_op " " #_type : : : "memory"); \
170 }
171
172 /* Define function for system instruction with register parameter */
173 #define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type) \
174 static inline void _op ## _type(u_register_t v) \
175 { \
176 __asm__ (#_op " " #_type ", %0" : : "r" (v)); \
177 }
178
179 void flush_dcache_range(uintptr_t addr, size_t size);
180 void clean_dcache_range(uintptr_t addr, size_t size);
181 void inv_dcache_range(uintptr_t addr, size_t size);
182 bool is_dcache_enabled(void);
183
184 void dcsw_op_louis(u_register_t op_type);
185 void dcsw_op_all(u_register_t op_type);
186
187 void disable_mmu_secure(void);
188 void disable_mmu_icache_secure(void);
189
190 DEFINE_SYSOP_FUNC(wfi)
191 DEFINE_SYSOP_FUNC(wfe)
192 DEFINE_SYSOP_FUNC(sev)
193 DEFINE_SYSOP_TYPE_FUNC(dsb, sy)
194 DEFINE_SYSOP_TYPE_FUNC(dmb, sy)
195 DEFINE_SYSOP_TYPE_FUNC(dmb, st)
196
197 /* dmb ld is not valid for armv7/thumb machines */
198 #if ARM_ARCH_MAJOR != 7
199 DEFINE_SYSOP_TYPE_FUNC(dmb, ld)
200 #endif
201
202 DEFINE_SYSOP_TYPE_FUNC(dsb, ish)
203 DEFINE_SYSOP_TYPE_FUNC(dsb, ishst)
204 DEFINE_SYSOP_TYPE_FUNC(dmb, ish)
205 DEFINE_SYSOP_TYPE_FUNC(dmb, ishst)
206 DEFINE_SYSOP_FUNC(isb)
207
208 void __dead2 smc(uint32_t r0, uint32_t r1, uint32_t r2, uint32_t r3,
209 uint32_t r4, uint32_t r5, uint32_t r6, uint32_t r7);
210
211 DEFINE_SYSREG_RW_FUNCS(spsr)
DEFINE_SYSREG_RW_FUNCS(cpsr)212 DEFINE_SYSREG_RW_FUNCS(cpsr)
213
214 /*******************************************************************************
215 * System register accessor prototypes
216 ******************************************************************************/
217 DEFINE_COPROCR_READ_FUNC(mpidr, MPIDR)
218 DEFINE_COPROCR_READ_FUNC(midr, MIDR)
219 DEFINE_COPROCR_READ_FUNC(id_mmfr4, ID_MMFR4)
220 DEFINE_COPROCR_READ_FUNC(id_dfr0, ID_DFR0)
221 DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0)
222 DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1)
223 DEFINE_COPROCR_READ_FUNC(isr, ISR)
224 DEFINE_COPROCR_READ_FUNC(clidr, CLIDR)
225 DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64)
226
227 DEFINE_COPROCR_RW_FUNCS(scr, SCR)
228 DEFINE_COPROCR_RW_FUNCS(ctr, CTR)
229 DEFINE_COPROCR_RW_FUNCS(sctlr, SCTLR)
230 DEFINE_COPROCR_RW_FUNCS(actlr, ACTLR)
231 DEFINE_COPROCR_RW_FUNCS(hsctlr, HSCTLR)
232 DEFINE_COPROCR_RW_FUNCS(hcr, HCR)
233 DEFINE_COPROCR_RW_FUNCS(hcptr, HCPTR)
234 DEFINE_COPROCR_RW_FUNCS(cntfrq, CNTFRQ)
235 DEFINE_COPROCR_RW_FUNCS(cnthctl, CNTHCTL)
236 DEFINE_COPROCR_RW_FUNCS(mair0, MAIR0)
237 DEFINE_COPROCR_RW_FUNCS(mair1, MAIR1)
238 DEFINE_COPROCR_RW_FUNCS(hmair0, HMAIR0)
239 DEFINE_COPROCR_RW_FUNCS(ttbcr, TTBCR)
240 DEFINE_COPROCR_RW_FUNCS(htcr, HTCR)
241 DEFINE_COPROCR_RW_FUNCS(ttbr0, TTBR0)
242 DEFINE_COPROCR_RW_FUNCS_64(ttbr0, TTBR0_64)
243 DEFINE_COPROCR_RW_FUNCS(ttbr1, TTBR1)
244 DEFINE_COPROCR_RW_FUNCS_64(httbr, HTTBR_64)
245 DEFINE_COPROCR_RW_FUNCS(vpidr, VPIDR)
246 DEFINE_COPROCR_RW_FUNCS(vmpidr, VMPIDR)
247 DEFINE_COPROCR_RW_FUNCS_64(vttbr, VTTBR_64)
248 DEFINE_COPROCR_RW_FUNCS_64(ttbr1, TTBR1_64)
249 DEFINE_COPROCR_RW_FUNCS_64(cntvoff, CNTVOFF_64)
250 DEFINE_COPROCR_RW_FUNCS(csselr, CSSELR)
251 DEFINE_COPROCR_RW_FUNCS(hstr, HSTR)
252 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2, CNTHP_CTL)
253 DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2, CNTHP_TVAL)
254 DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2, CNTHP_CVAL_64)
255
256 #define get_cntp_ctl_enable(x) (((x) >> CNTP_CTL_ENABLE_SHIFT) & \
257 CNTP_CTL_ENABLE_MASK)
258 #define get_cntp_ctl_imask(x) (((x) >> CNTP_CTL_IMASK_SHIFT) & \
259 CNTP_CTL_IMASK_MASK)
260 #define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \
261 CNTP_CTL_ISTATUS_MASK)
262
263 #define set_cntp_ctl_enable(x) ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT)
264 #define set_cntp_ctl_imask(x) ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT)
265
266 #define clr_cntp_ctl_enable(x) ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT))
267 #define clr_cntp_ctl_imask(x) ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT))
268
269 DEFINE_COPROCR_RW_FUNCS(icc_sre_el1, ICC_SRE)
270 DEFINE_COPROCR_RW_FUNCS(icc_sre_el2, ICC_HSRE)
271 DEFINE_COPROCR_RW_FUNCS(icc_sre_el3, ICC_MSRE)
272 DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1, ICC_PMR)
273 DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1, ICC_RPR)
274 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3, ICC_MGRPEN1)
275 DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1)
276 DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0)
277 DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1, ICC_HPPIR0)
278 DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1, ICC_HPPIR1)
279 DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1, ICC_IAR0)
280 DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1, ICC_IAR1)
281 DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1, ICC_EOIR0)
282 DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1, ICC_EOIR1)
283 DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1, ICC_SGI0R_EL1_64)
284 DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r, ICC_SGI1R_EL1_64)
285
286 DEFINE_COPROCR_RW_FUNCS(sdcr, SDCR)
287 DEFINE_COPROCR_RW_FUNCS(hdcr, HDCR)
288 DEFINE_COPROCR_RW_FUNCS(cnthp_ctl, CNTHP_CTL)
289 DEFINE_COPROCR_READ_FUNC(pmcr, PMCR)
290
291 /*
292 * Address translation
293 */
294 DEFINE_COPROCR_WRITE_FUNC(ats1cpr, ATS1CPR)
295 DEFINE_COPROCR_WRITE_FUNC(ats1hr, ATS1HR)
296 DEFINE_COPROCR_RW_FUNCS_64(par, PAR_64)
297
298 DEFINE_COPROCR_RW_FUNCS(nsacr, NSACR)
299
300 /* AArch32 coproc registers for 32bit MMU descriptor support */
301 DEFINE_COPROCR_RW_FUNCS(prrr, PRRR)
302 DEFINE_COPROCR_RW_FUNCS(nmrr, NMRR)
303 DEFINE_COPROCR_RW_FUNCS(dacr, DACR)
304
305 /* Coproc registers for 32bit AMU support */
306 DEFINE_COPROCR_READ_FUNC(amcfgr, AMCFGR)
307 DEFINE_COPROCR_READ_FUNC(amcgcr, AMCGCR)
308 DEFINE_COPROCR_RW_FUNCS(amcr, AMCR)
309
310 DEFINE_COPROCR_RW_FUNCS(amcntenset0, AMCNTENSET0)
311 DEFINE_COPROCR_RW_FUNCS(amcntenset1, AMCNTENSET1)
312 DEFINE_COPROCR_RW_FUNCS(amcntenclr0, AMCNTENCLR0)
313 DEFINE_COPROCR_RW_FUNCS(amcntenclr1, AMCNTENCLR1)
314
315 /* Coproc registers for 64bit AMU support */
316 DEFINE_COPROCR_RW_FUNCS_64(amevcntr00, AMEVCNTR00)
317 DEFINE_COPROCR_RW_FUNCS_64(amevcntr01, AMEVCNTR01)
318 DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02)
319 DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03)
320
321 /*
322 * TLBI operation prototypes
323 */
324 DEFINE_TLBIOP_FUNC(all, TLBIALL)
325 DEFINE_TLBIOP_FUNC(allis, TLBIALLIS)
326 DEFINE_TLBIOP_PARAM_FUNC(mva, TLBIMVA)
327 DEFINE_TLBIOP_PARAM_FUNC(mvaa, TLBIMVAA)
328 DEFINE_TLBIOP_PARAM_FUNC(mvaais, TLBIMVAAIS)
329 DEFINE_TLBIOP_PARAM_FUNC(mvahis, TLBIMVAHIS)
330
331 /*
332 * BPI operation prototypes.
333 */
334 DEFINE_BPIOP_FUNC(allis, BPIALLIS)
335
336 /*
337 * DC operation prototypes
338 */
339 DEFINE_DCOP_PARAM_FUNC(civac, DCCIMVAC)
340 DEFINE_DCOP_PARAM_FUNC(ivac, DCIMVAC)
341 #if ERRATA_A53_819472 || ERRATA_A53_824069 || ERRATA_A53_827319
342 DEFINE_DCOP_PARAM_FUNC(cvac, DCCIMVAC)
343 #else
344 DEFINE_DCOP_PARAM_FUNC(cvac, DCCMVAC)
345 #endif
346
347 /*
348 * DynamIQ Shared Unit power management
349 */
350 DEFINE_COPROCR_RW_FUNCS(clusterpwrdn, CLUSTERPWRDN)
351
352 /* Previously defined accessor functions with incomplete register names */
353 #define dsb() dsbsy()
354 #define dmb() dmbsy()
355
356 /* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */
357 #if ARM_ARCH_MAJOR == 7
358 #define dmbld() dmb()
359 #endif
360
361 #define IS_IN_SECURE() \
362 (GET_NS_BIT(read_scr()) == 0)
363
364 #define IS_IN_HYP() (GET_M32(read_cpsr()) == MODE32_hyp)
365 #define IS_IN_SVC() (GET_M32(read_cpsr()) == MODE32_svc)
366 #define IS_IN_MON() (GET_M32(read_cpsr()) == MODE32_mon)
367 #define IS_IN_EL2() IS_IN_HYP()
368 /* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */
369 #define IS_IN_EL3() \
370 ((GET_M32(read_cpsr()) == MODE32_mon) || \
371 (IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr)))
372
373 static inline unsigned int get_current_el(void)
374 {
375 if (IS_IN_EL3()) {
376 return 3U;
377 } else if (IS_IN_EL2()) {
378 return 2U;
379 } else {
380 return 1U;
381 }
382 }
383
384 /* Macros for compatibility with AArch64 system registers */
385 #define read_mpidr_el1() read_mpidr()
386
387 #define read_scr_el3() read_scr()
388 #define write_scr_el3(_v) write_scr(_v)
389
390 #define read_hcr_el2() read_hcr()
391 #define write_hcr_el2(_v) write_hcr(_v)
392
393 #define read_cpacr_el1() read_cpacr()
394 #define write_cpacr_el1(_v) write_cpacr(_v)
395
396 #define read_cntfrq_el0() read_cntfrq()
397 #define write_cntfrq_el0(_v) write_cntfrq(_v)
398 #define read_isr_el1() read_isr()
399
400 #define read_cntpct_el0() read64_cntpct()
401
402 #define read_ctr_el0() read_ctr()
403
404 #define write_icc_sgi0r_el1(_v) write64_icc_sgi0r_el1(_v)
405
406 #define read_daif() read_cpsr()
407 #define write_daif(flags) write_cpsr(flags)
408
409 #define read_cnthp_cval_el2() read64_cnthp_cval_el2()
410 #define write_cnthp_cval_el2(v) write64_cnthp_cval_el2(v)
411
412 #define read_amcntenset0_el0() read_amcntenset0()
413 #define read_amcntenset1_el0() read_amcntenset1()
414
415 /* Helper functions to manipulate CPSR */
enable_irq(void)416 static inline void enable_irq(void)
417 {
418 /*
419 * The compiler memory barrier will prevent the compiler from
420 * scheduling non-volatile memory access after the write to the
421 * register.
422 *
423 * This could happen if some initialization code issues non-volatile
424 * accesses to an area used by an interrupt handler, in the assumption
425 * that it is safe as the interrupts are disabled at the time it does
426 * that (according to program order). However, non-volatile accesses
427 * are not necessarily in program order relatively with volatile inline
428 * assembly statements (and volatile accesses).
429 */
430 COMPILER_BARRIER();
431 __asm__ volatile ("cpsie i");
432 isb();
433 }
434
enable_serror(void)435 static inline void enable_serror(void)
436 {
437 COMPILER_BARRIER();
438 __asm__ volatile ("cpsie a");
439 isb();
440 }
441
enable_fiq(void)442 static inline void enable_fiq(void)
443 {
444 COMPILER_BARRIER();
445 __asm__ volatile ("cpsie f");
446 isb();
447 }
448
disable_irq(void)449 static inline void disable_irq(void)
450 {
451 COMPILER_BARRIER();
452 __asm__ volatile ("cpsid i");
453 isb();
454 }
455
disable_serror(void)456 static inline void disable_serror(void)
457 {
458 COMPILER_BARRIER();
459 __asm__ volatile ("cpsid a");
460 isb();
461 }
462
disable_fiq(void)463 static inline void disable_fiq(void)
464 {
465 COMPILER_BARRIER();
466 __asm__ volatile ("cpsid f");
467 isb();
468 }
469
470 #endif /* ARCH_HELPERS_H */
471