1 #ifndef __ASM_ARM_PROCESSOR_H
2 #define __ASM_ARM_PROCESSOR_H
3 
4 #ifndef __ASSEMBLY__
5 #include <xen/types.h>
6 #endif
7 #include <public/arch-arm.h>
8 
9 /* CTR Cache Type Register */
10 #define CTR_L1Ip_MASK       0x3
11 #define CTR_L1Ip_SHIFT      14
12 #define CTR_L1Ip_AIVIVT     0x1
13 
14 /* MIDR Main ID Register */
15 #define MIDR_REVISION_MASK      0xf
16 #define MIDR_RESIVION(midr)     ((midr) & MIDR_REVISION_MASK)
17 #define MIDR_PARTNUM_SHIFT      4
18 #define MIDR_PARTNUM_MASK       (0xfff << MIDR_PARTNUM_SHIFT)
19 #define MIDR_PARTNUM(midr) \
20     (((midr) & MIDR_PARTNUM_MASK) >> MIDR_PARTNUM_SHIFT)
21 #define MIDR_ARCHITECTURE_SHIFT 16
22 #define MIDR_ARCHITECTURE_MASK  (0xf << MIDR_ARCHITECTURE_SHIFT)
23 #define MIDR_ARCHITECTURE(midr) \
24     (((midr) & MIDR_ARCHITECTURE_MASK) >> MIDR_ARCHITECTURE_SHIFT)
25 #define MIDR_VARIANT_SHIFT      20
26 #define MIDR_VARIANT_MASK       (0xf << MIDR_VARIANT_SHIFT)
27 #define MIDR_VARIANT(midr) \
28     (((midr) & MIDR_VARIANT_MASK) >> MIDR_VARIANT_SHIFT)
29 #define MIDR_IMPLEMENTOR_SHIFT  24
30 #define MIDR_IMPLEMENTOR_MASK   (0xff << MIDR_IMPLEMENTOR_SHIFT)
31 #define MIDR_IMPLEMENTOR(midr) \
32     (((midr) & MIDR_IMPLEMENTOR_MASK) >> MIDR_IMPLEMENTOR_SHIFT)
33 
34 #define MIDR_CPU_MODEL(imp, partnum)            \
35     (((imp)     << MIDR_IMPLEMENTOR_SHIFT) |    \
36      (0xf       << MIDR_ARCHITECTURE_SHIFT) |   \
37      ((partnum) << MIDR_PARTNUM_SHIFT))
38 
39 #define MIDR_CPU_MODEL_MASK \
40      (MIDR_IMPLEMENTOR_MASK | MIDR_PARTNUM_MASK | MIDR_ARCHITECTURE_MASK)
41 
42 #define MIDR_IS_CPU_MODEL_RANGE(midr, model, rv_min, rv_max)            \
43 ({                                                                      \
44         u32 _model = (midr) & MIDR_CPU_MODEL_MASK;                      \
45         u32 _rv = (midr) & (MIDR_REVISION_MASK | MIDR_VARIANT_MASK);    \
46                                                                         \
47         _model == (model) && _rv >= (rv_min) && _rv <= (rv_max);        \
48 })
49 
50 #define ARM_CPU_IMP_ARM             0x41
51 
52 #define ARM_CPU_PART_CORTEX_A12     0xC0D
53 #define ARM_CPU_PART_CORTEX_A17     0xC0E
54 #define ARM_CPU_PART_CORTEX_A15     0xC0F
55 #define ARM_CPU_PART_CORTEX_A53     0xD03
56 #define ARM_CPU_PART_CORTEX_A57     0xD07
57 #define ARM_CPU_PART_CORTEX_A72     0xD08
58 #define ARM_CPU_PART_CORTEX_A73     0xD09
59 #define ARM_CPU_PART_CORTEX_A75     0xD0A
60 #define ARM_CPU_PART_CORTEX_A76     0xD0B
61 #define ARM_CPU_PART_NEOVERSE_N1    0xD0C
62 
63 #define MIDR_CORTEX_A12 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A12)
64 #define MIDR_CORTEX_A17 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A17)
65 #define MIDR_CORTEX_A15 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A15)
66 #define MIDR_CORTEX_A53 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A53)
67 #define MIDR_CORTEX_A57 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A57)
68 #define MIDR_CORTEX_A72 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A72)
69 #define MIDR_CORTEX_A73 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A73)
70 #define MIDR_CORTEX_A75 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A75)
71 #define MIDR_CORTEX_A76 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_CORTEX_A76)
72 #define MIDR_NEOVERSE_N1 MIDR_CPU_MODEL(ARM_CPU_IMP_ARM, ARM_CPU_PART_NEOVERSE_N1)
73 
74 /* MPIDR Multiprocessor Affinity Register */
75 #define _MPIDR_UP           (30)
76 #define MPIDR_UP            (_AC(1,U) << _MPIDR_UP)
77 #define _MPIDR_SMP          (31)
78 #define MPIDR_SMP           (_AC(1,U) << _MPIDR_SMP)
79 #define MPIDR_AFF0_SHIFT    (0)
80 #define MPIDR_AFF0_MASK     (_AC(0xff,U) << MPIDR_AFF0_SHIFT)
81 #ifdef CONFIG_ARM_64
82 #define MPIDR_HWID_MASK     _AC(0xff00ffffff,UL)
83 #else
84 #define MPIDR_HWID_MASK     _AC(0xffffff,U)
85 #endif
86 #define MPIDR_INVALID       (~MPIDR_HWID_MASK)
87 #define MPIDR_LEVEL_BITS    (8)
88 
89 
90 /*
91  * Macros to extract affinity level. picked from kernel
92  */
93 
94 #define MPIDR_LEVEL_BITS_SHIFT  3
95 #define MPIDR_LEVEL_MASK        ((1 << MPIDR_LEVEL_BITS) - 1)
96 
97 #define MPIDR_LEVEL_SHIFT(level) \
98          (((1 << level) >> 1) << MPIDR_LEVEL_BITS_SHIFT)
99 
100 #define MPIDR_AFFINITY_LEVEL(mpidr, level) \
101          ((mpidr >> MPIDR_LEVEL_SHIFT(level)) & MPIDR_LEVEL_MASK)
102 
103 #define AFFINITY_MASK(level)    ~((_AC(0x1,UL) << MPIDR_LEVEL_SHIFT(level)) - 1)
104 
105 /* TTBCR Translation Table Base Control Register */
106 #define TTBCR_EAE    _AC(0x80000000,U)
107 #define TTBCR_N_MASK _AC(0x07,U)
108 #define TTBCR_N_16KB _AC(0x00,U)
109 #define TTBCR_N_8KB  _AC(0x01,U)
110 #define TTBCR_N_4KB  _AC(0x02,U)
111 #define TTBCR_N_2KB  _AC(0x03,U)
112 #define TTBCR_N_1KB  _AC(0x04,U)
113 
114 /*
115  * TTBCR_PD(0|1) can be applied only if LPAE is disabled, i.e., TTBCR.EAE==0
116  * (ARM DDI 0487B.a G6-5203 and ARM DDI 0406C.b B4-1722).
117  */
118 #define TTBCR_PD0       (_AC(1,U)<<4)
119 #define TTBCR_PD1       (_AC(1,U)<<5)
120 
121 /* SCTLR System Control Register. */
122 
123 /* Bits specific to SCTLR_EL1 for Arm32 */
124 
125 #define SCTLR_A32_EL1_V     BIT(13, UL)
126 
127 /* Common bits for SCTLR_ELx for Arm32 */
128 
129 #define SCTLR_A32_ELx_TE    BIT(30, UL)
130 #define SCTLR_A32_ELx_FI    BIT(21, UL)
131 
132 /* Common bits for SCTLR_ELx for Arm64 */
133 #define SCTLR_A64_ELx_SA    BIT(3, UL)
134 
135 /* Common bits for SCTLR_ELx on all architectures */
136 #define SCTLR_Axx_ELx_EE    BIT(25, UL)
137 #define SCTLR_Axx_ELx_WXN   BIT(19, UL)
138 #define SCTLR_Axx_ELx_I     BIT(12, UL)
139 #define SCTLR_Axx_ELx_C     BIT(2, UL)
140 #define SCTLR_Axx_ELx_A     BIT(1, UL)
141 #define SCTLR_Axx_ELx_M     BIT(0, UL)
142 
143 #ifdef CONFIG_ARM_32
144 
145 #define HSCTLR_RES1     (BIT( 3, UL) | BIT( 4, UL) | BIT( 5, UL) |\
146                          BIT( 6, UL) | BIT(11, UL) | BIT(16, UL) |\
147                          BIT(18, UL) | BIT(22, UL) | BIT(23, UL) |\
148                          BIT(28, UL) | BIT(29, UL))
149 
150 #define HSCTLR_RES0     (BIT(7, UL)  | BIT(8, UL)  | BIT(9, UL)  | BIT(10, UL) |\
151                          BIT(13, UL) | BIT(14, UL) | BIT(15, UL) | BIT(17, UL) |\
152                          BIT(20, UL) | BIT(24, UL) | BIT(26, UL) | BIT(27, UL) |\
153                          BIT(31, UL))
154 
155 /* Initial value for HSCTLR */
156 #define HSCTLR_SET      (HSCTLR_RES1    | SCTLR_Axx_ELx_A   | SCTLR_Axx_ELx_I)
157 
158 /* Only used a pre-processing time... */
159 #define HSCTLR_CLEAR    (HSCTLR_RES0        | SCTLR_Axx_ELx_M   |\
160                          SCTLR_Axx_ELx_C    | SCTLR_Axx_ELx_WXN |\
161                          SCTLR_A32_ELx_FI   | SCTLR_Axx_ELx_EE  |\
162                          SCTLR_A32_ELx_TE)
163 
164 #if (HSCTLR_SET ^ HSCTLR_CLEAR) != 0xffffffffU
165 #error "Inconsistent HSCTLR set/clear bits"
166 #endif
167 
168 #else
169 
170 #define SCTLR_EL2_RES1  (BIT( 4, UL) | BIT( 5, UL) | BIT(11, UL) |\
171                          BIT(16, UL) | BIT(18, UL) | BIT(22, UL) |\
172                          BIT(23, UL) | BIT(28, UL) | BIT(29, UL))
173 
174 #define SCTLR_EL2_RES0  (BIT( 6, UL) | BIT( 7, UL) | BIT( 8, UL) |\
175                          BIT( 9, UL) | BIT(10, UL) | BIT(13, UL) |\
176                          BIT(14, UL) | BIT(15, UL) | BIT(17, UL) |\
177                          BIT(20, UL) | BIT(21, UL) | BIT(24, UL) |\
178                          BIT(26, UL) | BIT(27, UL) | BIT(30, UL) |\
179                          BIT(31, UL) | (0xffffffffULL << 32))
180 
181 /* Initial value for SCTLR_EL2 */
182 #define SCTLR_EL2_SET   (SCTLR_EL2_RES1     | SCTLR_A64_ELx_SA  |\
183                          SCTLR_Axx_ELx_I)
184 
185 /* Only used a pre-processing time... */
186 #define SCTLR_EL2_CLEAR (SCTLR_EL2_RES0     | SCTLR_Axx_ELx_M   |\
187                          SCTLR_Axx_ELx_A    | SCTLR_Axx_ELx_C   |\
188                          SCTLR_Axx_ELx_WXN  | SCTLR_Axx_ELx_EE)
189 
190 #if (SCTLR_EL2_SET ^ SCTLR_EL2_CLEAR) != 0xffffffffffffffffUL
191 #error "Inconsistent SCTLR_EL2 set/clear bits"
192 #endif
193 
194 #endif
195 
196 /* HCR Hyp Configuration Register */
197 #define HCR_RW          (_AC(1,UL)<<31) /* Register Width, ARM64 only */
198 #define HCR_TGE         (_AC(1,UL)<<27) /* Trap General Exceptions */
199 #define HCR_TVM         (_AC(1,UL)<<26) /* Trap Virtual Memory Controls */
200 #define HCR_TTLB        (_AC(1,UL)<<25) /* Trap TLB Maintenance Operations */
201 #define HCR_TPU         (_AC(1,UL)<<24) /* Trap Cache Maintenance Operations to PoU */
202 #define HCR_TPC         (_AC(1,UL)<<23) /* Trap Cache Maintenance Operations to PoC */
203 #define HCR_TSW         (_AC(1,UL)<<22) /* Trap Set/Way Cache Maintenance Operations */
204 #define HCR_TAC         (_AC(1,UL)<<21) /* Trap ACTLR Accesses */
205 #define HCR_TIDCP       (_AC(1,UL)<<20) /* Trap lockdown */
206 #define HCR_TSC         (_AC(1,UL)<<19) /* Trap SMC instruction */
207 #define HCR_TID3        (_AC(1,UL)<<18) /* Trap ID Register Group 3 */
208 #define HCR_TID2        (_AC(1,UL)<<17) /* Trap ID Register Group 2 */
209 #define HCR_TID1        (_AC(1,UL)<<16) /* Trap ID Register Group 1 */
210 #define HCR_TID0        (_AC(1,UL)<<15) /* Trap ID Register Group 0 */
211 #define HCR_TWE         (_AC(1,UL)<<14) /* Trap WFE instruction */
212 #define HCR_TWI         (_AC(1,UL)<<13) /* Trap WFI instruction */
213 #define HCR_DC          (_AC(1,UL)<<12) /* Default cacheable */
214 #define HCR_BSU_MASK    (_AC(3,UL)<<10) /* Barrier Shareability Upgrade */
215 #define HCR_BSU_NONE     (_AC(0,UL)<<10)
216 #define HCR_BSU_INNER    (_AC(1,UL)<<10)
217 #define HCR_BSU_OUTER    (_AC(2,UL)<<10)
218 #define HCR_BSU_FULL     (_AC(3,UL)<<10)
219 #define HCR_FB          (_AC(1,UL)<<9) /* Force Broadcast of Cache/BP/TLB operations */
220 #define HCR_VA          (_AC(1,UL)<<8) /* Virtual Asynchronous Abort */
221 #define HCR_VI          (_AC(1,UL)<<7) /* Virtual IRQ */
222 #define HCR_VF          (_AC(1,UL)<<6) /* Virtual FIQ */
223 #define HCR_AMO         (_AC(1,UL)<<5) /* Override CPSR.A */
224 #define HCR_IMO         (_AC(1,UL)<<4) /* Override CPSR.I */
225 #define HCR_FMO         (_AC(1,UL)<<3) /* Override CPSR.F */
226 #define HCR_PTW         (_AC(1,UL)<<2) /* Protected Walk */
227 #define HCR_SWIO        (_AC(1,UL)<<1) /* Set/Way Invalidation Override */
228 #define HCR_VM          (_AC(1,UL)<<0) /* Virtual MMU Enable */
229 
230 /* TCR: Stage 1 Translation Control */
231 
232 #define TCR_T0SZ_SHIFT  (0)
233 #define TCR_T1SZ_SHIFT  (16)
234 #define TCR_T0SZ(x)     ((x)<<TCR_T0SZ_SHIFT)
235 
236 /*
237  * According to ARM DDI 0487B.a, TCR_EL1.{T0SZ,T1SZ} (AArch64, page D7-2480)
238  * comprises 6 bits and TTBCR.{T0SZ,T1SZ} (AArch32, page G6-5204) comprises 3
239  * bits following another 3 bits for RES0. Thus, the mask for both registers
240  * should be 0x3f.
241  */
242 #define TCR_SZ_MASK     (_AC(0x3f,UL))
243 
244 #define TCR_EPD0        (_AC(0x1,UL)<<7)
245 #define TCR_EPD1        (_AC(0x1,UL)<<23)
246 
247 #define TCR_IRGN0_NC    (_AC(0x0,UL)<<8)
248 #define TCR_IRGN0_WBWA  (_AC(0x1,UL)<<8)
249 #define TCR_IRGN0_WT    (_AC(0x2,UL)<<8)
250 #define TCR_IRGN0_WB    (_AC(0x3,UL)<<8)
251 
252 #define TCR_ORGN0_NC    (_AC(0x0,UL)<<10)
253 #define TCR_ORGN0_WBWA  (_AC(0x1,UL)<<10)
254 #define TCR_ORGN0_WT    (_AC(0x2,UL)<<10)
255 #define TCR_ORGN0_WB    (_AC(0x3,UL)<<10)
256 
257 #define TCR_SH0_NS      (_AC(0x0,UL)<<12)
258 #define TCR_SH0_OS      (_AC(0x2,UL)<<12)
259 #define TCR_SH0_IS      (_AC(0x3,UL)<<12)
260 
261 /* Note that the fields TCR_EL1.{TG0,TG1} are not available on AArch32. */
262 #define TCR_TG0_SHIFT   (14)
263 #define TCR_TG0_MASK    (_AC(0x3,UL)<<TCR_TG0_SHIFT)
264 #define TCR_TG0_4K      (_AC(0x0,UL)<<TCR_TG0_SHIFT)
265 #define TCR_TG0_64K     (_AC(0x1,UL)<<TCR_TG0_SHIFT)
266 #define TCR_TG0_16K     (_AC(0x2,UL)<<TCR_TG0_SHIFT)
267 
268 /* Note that the field TCR_EL2.TG1 exists only if HCR_EL2.E2H==1. */
269 #define TCR_EL1_TG1_SHIFT   (30)
270 #define TCR_EL1_TG1_MASK    (_AC(0x3,UL)<<TCR_EL1_TG1_SHIFT)
271 #define TCR_EL1_TG1_16K     (_AC(0x1,UL)<<TCR_EL1_TG1_SHIFT)
272 #define TCR_EL1_TG1_4K      (_AC(0x2,UL)<<TCR_EL1_TG1_SHIFT)
273 #define TCR_EL1_TG1_64K     (_AC(0x3,UL)<<TCR_EL1_TG1_SHIFT)
274 
275 /*
276  * Note that the field TCR_EL1.IPS is not available on AArch32. Also, the field
277  * TCR_EL2.IPS exists only if HCR_EL2.E2H==1.
278  */
279 #define TCR_EL1_IPS_SHIFT   (32)
280 #define TCR_EL1_IPS_MASK    (_AC(0x7,ULL)<<TCR_EL1_IPS_SHIFT)
281 #define TCR_EL1_IPS_32_BIT  (_AC(0x0,ULL)<<TCR_EL1_IPS_SHIFT)
282 #define TCR_EL1_IPS_36_BIT  (_AC(0x1,ULL)<<TCR_EL1_IPS_SHIFT)
283 #define TCR_EL1_IPS_40_BIT  (_AC(0x2,ULL)<<TCR_EL1_IPS_SHIFT)
284 #define TCR_EL1_IPS_42_BIT  (_AC(0x3,ULL)<<TCR_EL1_IPS_SHIFT)
285 #define TCR_EL1_IPS_44_BIT  (_AC(0x4,ULL)<<TCR_EL1_IPS_SHIFT)
286 #define TCR_EL1_IPS_48_BIT  (_AC(0x5,ULL)<<TCR_EL1_IPS_SHIFT)
287 #define TCR_EL1_IPS_52_BIT  (_AC(0x6,ULL)<<TCR_EL1_IPS_SHIFT)
288 
289 /*
290  * The following values correspond to the bit masks represented by
291  * TCR_EL1_IPS_XX_BIT defines.
292  */
293 #define TCR_EL1_IPS_32_BIT_VAL  (32)
294 #define TCR_EL1_IPS_36_BIT_VAL  (36)
295 #define TCR_EL1_IPS_40_BIT_VAL  (40)
296 #define TCR_EL1_IPS_42_BIT_VAL  (42)
297 #define TCR_EL1_IPS_44_BIT_VAL  (44)
298 #define TCR_EL1_IPS_48_BIT_VAL  (48)
299 #define TCR_EL1_IPS_52_BIT_VAL  (52)
300 #define TCR_EL1_IPS_MIN_VAL     (25)
301 
302 /* Note that the fields TCR_EL2.TBI(0|1) exist only if HCR_EL2.E2H==1. */
303 #define TCR_EL1_TBI0    (_AC(0x1,ULL)<<37)
304 #define TCR_EL1_TBI1    (_AC(0x1,ULL)<<38)
305 
306 #ifdef CONFIG_ARM_64
307 
308 #define TCR_PS(x)       ((x)<<16)
309 #define TCR_TBI         (_AC(0x1,UL)<<20)
310 
311 #define TCR_RES1        (_AC(1,UL)<<31|_AC(1,UL)<<23)
312 
313 #else
314 
315 #define TCR_RES1        (_AC(1,UL)<<31)
316 
317 #endif
318 
319 /* VTCR: Stage 2 Translation Control */
320 
321 #define VTCR_T0SZ(x)    ((x)<<0)
322 
323 #define VTCR_SL0(x)     ((x)<<6)
324 
325 #define VTCR_IRGN0_NC   (_AC(0x0,UL)<<8)
326 #define VTCR_IRGN0_WBWA (_AC(0x1,UL)<<8)
327 #define VTCR_IRGN0_WT   (_AC(0x2,UL)<<8)
328 #define VTCR_IRGN0_WB   (_AC(0x3,UL)<<8)
329 
330 #define VTCR_ORGN0_NC   (_AC(0x0,UL)<<10)
331 #define VTCR_ORGN0_WBWA (_AC(0x1,UL)<<10)
332 #define VTCR_ORGN0_WT   (_AC(0x2,UL)<<10)
333 #define VTCR_ORGN0_WB   (_AC(0x3,UL)<<10)
334 
335 #define VTCR_SH0_NS     (_AC(0x0,UL)<<12)
336 #define VTCR_SH0_OS     (_AC(0x2,UL)<<12)
337 #define VTCR_SH0_IS     (_AC(0x3,UL)<<12)
338 
339 #ifdef CONFIG_ARM_64
340 
341 #define VTCR_TG0_4K     (_AC(0x0,UL)<<14)
342 #define VTCR_TG0_64K    (_AC(0x1,UL)<<14)
343 #define VTCR_TG0_16K    (_AC(0x2,UL)<<14)
344 
345 #define VTCR_PS(x)      ((x)<<16)
346 
347 #define VTCR_VS    	    (_AC(0x1,UL)<<19)
348 
349 #endif
350 
351 #define VTCR_RES1       (_AC(1,UL)<<31)
352 
353 /* HCPTR Hyp. Coprocessor Trap Register */
354 #define HCPTR_TAM       ((_AC(1,U)<<30))
355 #define HCPTR_TTA       ((_AC(1,U)<<20))        /* Trap trace registers */
356 #define HCPTR_CP(x)     ((_AC(1,U)<<(x)))       /* Trap Coprocessor x */
357 #define HCPTR_CP_MASK   ((_AC(1,U)<<14)-1)
358 
359 /* HSTR Hyp. System Trap Register */
360 #define HSTR_T(x)       ((_AC(1,U)<<(x)))       /* Trap Cp15 c<x> */
361 
362 /* HDCR Hyp. Debug Configuration Register */
363 #define HDCR_TDRA       (_AC(1,U)<<11)          /* Trap Debug ROM access */
364 #define HDCR_TDOSA      (_AC(1,U)<<10)          /* Trap Debug-OS-related register access */
365 #define HDCR_TDA        (_AC(1,U)<<9)           /* Trap Debug Access */
366 #define HDCR_TDE        (_AC(1,U)<<8)           /* Route Soft Debug exceptions from EL1/EL1 to EL2 */
367 #define HDCR_TPM        (_AC(1,U)<<6)           /* Trap Performance Monitors accesses */
368 #define HDCR_TPMCR      (_AC(1,U)<<5)           /* Trap PMCR accesses */
369 
370 #define HSR_EC_SHIFT                26
371 
372 #define HSR_EC_UNKNOWN              0x00
373 #define HSR_EC_WFI_WFE              0x01
374 #define HSR_EC_CP15_32              0x03
375 #define HSR_EC_CP15_64              0x04
376 #define HSR_EC_CP14_32              0x05        /* Trapped MCR or MRC access to CP14 */
377 #define HSR_EC_CP14_DBG             0x06        /* Trapped LDC/STC access to CP14 (only for debug registers) */
378 #define HSR_EC_CP                   0x07        /* HCPTR-trapped access to CP0-CP13 */
379 #define HSR_EC_CP10                 0x08
380 #define HSR_EC_JAZELLE              0x09
381 #define HSR_EC_BXJ                  0x0a
382 #define HSR_EC_CP14_64              0x0c
383 #define HSR_EC_SVC32                0x11
384 #define HSR_EC_HVC32                0x12
385 #define HSR_EC_SMC32                0x13
386 #ifdef CONFIG_ARM_64
387 #define HSR_EC_SVC64                0x15
388 #define HSR_EC_HVC64                0x16
389 #define HSR_EC_SMC64                0x17
390 #define HSR_EC_SYSREG               0x18
391 #endif
392 #define HSR_EC_INSTR_ABORT_LOWER_EL 0x20
393 #define HSR_EC_INSTR_ABORT_CURR_EL  0x21
394 #define HSR_EC_DATA_ABORT_LOWER_EL  0x24
395 #define HSR_EC_DATA_ABORT_CURR_EL   0x25
396 #ifdef CONFIG_ARM_64
397 #define HSR_EC_BRK                  0x3c
398 #endif
399 
400 /* FSR format, common */
401 #define FSR_LPAE                (_AC(1,UL)<<9)
402 /* FSR short format */
403 #define FSRS_FS_DEBUG           (_AC(0,UL)<<10|_AC(0x2,UL)<<0)
404 /* FSR long format */
405 #define FSRL_STATUS_DEBUG       (_AC(0x22,UL)<<0)
406 
407 #ifdef CONFIG_ARM_64
408 #define MM64_VMID_8_BITS_SUPPORT    0x0
409 #define MM64_VMID_16_BITS_SUPPORT   0x2
410 #endif
411 
412 #ifndef __ASSEMBLY__
413 
414 extern register_t __cpu_logical_map[];
415 #define cpu_logical_map(cpu) __cpu_logical_map[cpu]
416 
417 #endif
418 
419 /* Physical Address Register */
420 #define PAR_F           (_AC(1,U)<<0)
421 
422 /* .... If F == 1 */
423 #define PAR_FSC_SHIFT   (1)
424 #define PAR_FSC_MASK    (_AC(0x3f,U)<<PAR_FSC_SHIFT)
425 #define PAR_STAGE21     (_AC(1,U)<<8)     /* Stage 2 Fault During Stage 1 Walk */
426 #define PAR_STAGE2      (_AC(1,U)<<9)     /* Stage 2 Fault */
427 
428 /* If F == 0 */
429 #define PAR_MAIR_SHIFT  56                       /* Memory Attributes */
430 #define PAR_MAIR_MASK   (0xffLL<<PAR_MAIR_SHIFT)
431 #define PAR_NS          (_AC(1,U)<<9)                   /* Non-Secure */
432 #define PAR_SH_SHIFT    7                        /* Shareability */
433 #define PAR_SH_MASK     (_AC(3,U)<<PAR_SH_SHIFT)
434 
435 /* Fault Status Register */
436 /*
437  * 543210 BIT
438  * 00XXLL -- XX Fault Level LL
439  * ..01LL -- Translation Fault LL
440  * ..10LL -- Access Fault LL
441  * ..11LL -- Permission Fault LL
442  * 01xxxx -- Abort/Parity
443  * 10xxxx -- Other
444  * 11xxxx -- Implementation Defined
445  */
446 #define FSC_TYPE_MASK (_AC(0x3,U)<<4)
447 #define FSC_TYPE_FAULT (_AC(0x00,U)<<4)
448 #define FSC_TYPE_ABT   (_AC(0x01,U)<<4)
449 #define FSC_TYPE_OTH   (_AC(0x02,U)<<4)
450 #define FSC_TYPE_IMPL  (_AC(0x03,U)<<4)
451 
452 #define FSC_FLT_TRANS  (0x04)
453 #define FSC_FLT_ACCESS (0x08)
454 #define FSC_FLT_PERM   (0x0c)
455 #define FSC_SEA        (0x10) /* Synchronous External Abort */
456 #define FSC_SPE        (0x18) /* Memory Access Synchronous Parity Error */
457 #define FSC_APE        (0x11) /* Memory Access Asynchronous Parity Error */
458 #define FSC_SEATT      (0x14) /* Sync. Ext. Abort Translation Table */
459 #define FSC_SPETT      (0x1c) /* Sync. Parity. Error Translation Table */
460 #define FSC_AF         (0x21) /* Alignment Fault */
461 #define FSC_DE         (0x22) /* Debug Event */
462 #define FSC_LKD        (0x34) /* Lockdown Abort */
463 #define FSC_CPR        (0x3a) /* Coprocossor Abort */
464 
465 #define FSC_LL_MASK    (_AC(0x03,U)<<0)
466 
467 /* HPFAR_EL2: Hypervisor IPA Fault Address Register */
468 #ifdef CONFIG_ARM_64
469 #define HPFAR_MASK	GENMASK(39, 4)
470 #else
471 #define HPFAR_MASK	GENMASK(31, 4)
472 #endif
473 
474 /* Time counter hypervisor control register */
475 #define CNTHCTL_EL2_EL1PCTEN (1u<<0) /* Kernel/user access to physical counter */
476 #define CNTHCTL_EL2_EL1PCEN  (1u<<1) /* Kernel/user access to CNTP timer regs */
477 
478 /* Time counter kernel control register */
479 #define CNTKCTL_EL1_EL0PCTEN (1u<<0) /* Expose phys counters to EL0 */
480 #define CNTKCTL_EL1_EL0VCTEN (1u<<1) /* Expose virt counters to EL0 */
481 #define CNTKCTL_EL1_EL0VTEN  (1u<<8) /* Expose virt timer registers to EL0 */
482 #define CNTKCTL_EL1_EL0PTEN  (1u<<9) /* Expose phys timer registers to EL0 */
483 
484 /* Timer control registers */
485 #define CNTx_CTL_ENABLE   (1u<<0)  /* Enable timer */
486 #define CNTx_CTL_MASK     (1u<<1)  /* Mask IRQ */
487 #define CNTx_CTL_PENDING  (1u<<2)  /* IRQ pending */
488 
489 /* Exception Vector offsets */
490 /* ... ARM32 */
491 #define VECTOR32_RST  0
492 #define VECTOR32_UND  4
493 #define VECTOR32_SVC  8
494 #define VECTOR32_PABT 12
495 #define VECTOR32_DABT 16
496 /* ... ARM64 */
497 #define VECTOR64_CURRENT_SP0_BASE  0x000
498 #define VECTOR64_CURRENT_SPx_BASE  0x200
499 #define VECTOR64_LOWER64_BASE      0x400
500 #define VECTOR64_LOWER32_BASE      0x600
501 
502 #define VECTOR64_SYNC_OFFSET       0x000
503 #define VECTOR64_IRQ_OFFSET        0x080
504 #define VECTOR64_FIQ_OFFSET        0x100
505 #define VECTOR64_ERROR_OFFSET      0x180
506 
507 
508 #if defined(CONFIG_ARM_32)
509 # include <asm/arm32/processor.h>
510 #elif defined(CONFIG_ARM_64)
511 # include <asm/arm64/processor.h>
512 #else
513 # error "unknown ARM variant"
514 #endif
515 
516 #ifndef __ASSEMBLY__
517 void panic_PAR(uint64_t par);
518 
519 void show_execution_state(const struct cpu_user_regs *regs);
520 void show_registers(const struct cpu_user_regs *regs);
521 //#define dump_execution_state() run_in_exception_handler(show_execution_state)
522 #define dump_execution_state() WARN()
523 
524 #define cpu_relax() barrier() /* Could yield? */
525 
526 /* All a bit UP for the moment */
527 #define cpu_to_core(_cpu)   (0)
528 #define cpu_to_socket(_cpu) (0)
529 
530 struct vcpu;
531 void vcpu_regs_hyp_to_user(const struct vcpu *vcpu,
532                            struct vcpu_guest_core_regs *regs);
533 void vcpu_regs_user_to_hyp(struct vcpu *vcpu,
534                            const struct vcpu_guest_core_regs *regs);
535 
536 void do_trap_hyp_serror(struct cpu_user_regs *regs);
537 
538 void do_trap_guest_serror(struct cpu_user_regs *regs);
539 
540 register_t get_default_hcr_flags(void);
541 
542 /*
543  * Synchronize SError unless the feature is selected.
544  * This is relying on the SErrors are currently unmasked.
545  */
546 #define SYNCHRONIZE_SERROR(feat)                                  \
547     do {                                                          \
548         ASSERT(local_abort_is_enabled());                         \
549         asm volatile(ALTERNATIVE("dsb sy; isb",                   \
550                                  "nop; nop", feat)                \
551                                  : : : "memory");                 \
552     } while (0)
553 
554 /*
555  * Clear/Set flags in HCR_EL2 for a given vCPU. It only supports the current
556  * vCPU for now.
557  */
558 #define vcpu_hcr_clear_flags(v, flags)              \
559     do {                                            \
560         ASSERT((v) == current);                     \
561         (v)->arch.hcr_el2 &= ~(flags);              \
562         WRITE_SYSREG((v)->arch.hcr_el2, HCR_EL2);   \
563     } while (0)
564 
565 #define vcpu_hcr_set_flags(v, flags)                \
566     do {                                            \
567         ASSERT((v) == current);                     \
568         (v)->arch.hcr_el2 |= (flags);               \
569         WRITE_SYSREG((v)->arch.hcr_el2, HCR_EL2);   \
570     } while (0)
571 
572 #endif /* __ASSEMBLY__ */
573 #endif /* __ASM_ARM_PROCESSOR_H */
574 /*
575  * Local variables:
576  * mode: C
577  * c-file-style: "BSD"
578  * c-basic-offset: 4
579  * indent-tabs-mode: nil
580  * End:
581  */
582