1/*
2 * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <context.h>
11#include <cortex_a76.h>
12#include <cpu_macros.S>
13#include <plat_macros.S>
14#include <services/arm_arch_svc.h>
15
16/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
20
21/* 64-bit only core */
22#if CTX_INCLUDE_AARCH32_REGS == 1
23#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
24#endif
25
26#define ESR_EL3_A64_SMC0	0x5e000000
27#define ESR_EL3_A32_SMC0	0x4e000000
28
29#if DYNAMIC_WORKAROUND_CVE_2018_3639
30	/*
31	 * This macro applies the mitigation for CVE-2018-3639.
32	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
33	 * SMC calls from a lower EL running in AArch32 or AArch64
34	 * will go through the fast and return early.
35	 *
36	 * The macro saves x2-x3 to the context. In the fast path
37	 * x0-x3 registers do not need to be restored as the calling
38	 * context will have saved them.
39	 */
40	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
41	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
42
43	.if \_is_sync_exception
44		/*
45		 * Ensure SMC is coming from A64/A32 state on #0
46		 * with W0 = SMCCC_ARCH_WORKAROUND_2
47		 *
48		 * This sequence evaluates as:
49		 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
50		 * allowing use of a single branch operation
51		 */
52		orr	w2, wzr, #SMCCC_ARCH_WORKAROUND_2
53		cmp	x0, x2
54		mrs	x3, esr_el3
55		mov_imm	w2, \_esr_el3_val
56		ccmp	w2, w3, #0, eq
57		/*
58		 * Static predictor will predict a fall-through, optimizing
59		 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
60		 */
61		bne	1f
62
63		/*
64		 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
65		 * fast path.
66		 */
67		cmp	x1, xzr /* enable/disable check */
68
69		/*
70		 * When the calling context wants mitigation disabled,
71		 * we program the mitigation disable function in the
72		 * CPU context, which gets invoked on subsequent exits from
73		 * EL3 via the `el3_exit` function. Otherwise NULL is
74		 * programmed in the CPU context, which results in caller's
75		 * inheriting the EL3 mitigation state (enabled) on subsequent
76		 * `el3_exit`.
77		 */
78		mov	x0, xzr
79		adr	x1, cortex_a76_disable_wa_cve_2018_3639
80		csel	x1, x1, x0, eq
81		str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
82
83		mrs	x2, CORTEX_A76_CPUACTLR2_EL1
84		orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
85		bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
86		csel	x3, x3, x1, eq
87		msr	CORTEX_A76_CPUACTLR2_EL1, x3
88		exception_return /* exception_return contains ISB */
89	.endif
901:
91	/*
92	 * Always enable v4 mitigation during EL3 execution. This is not
93	 * required for the fast path above because it does not perform any
94	 * memory loads.
95	 */
96	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
97	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
98	msr	CORTEX_A76_CPUACTLR2_EL1, x2
99	isb
100
101	/*
102	 * The caller may have passed arguments to EL3 via x2-x3.
103	 * Restore these registers from the context before jumping to the
104	 * main runtime vector table entry.
105	 */
106	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
107	.endm
108
109vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
110
111	/* ---------------------------------------------------------------------
112	 * Current EL with SP_EL0 : 0x0 - 0x200
113	 * ---------------------------------------------------------------------
114	 */
115vector_entry cortex_a76_sync_exception_sp_el0
116	b	sync_exception_sp_el0
117end_vector_entry cortex_a76_sync_exception_sp_el0
118
119vector_entry cortex_a76_irq_sp_el0
120	b	irq_sp_el0
121end_vector_entry cortex_a76_irq_sp_el0
122
123vector_entry cortex_a76_fiq_sp_el0
124	b	fiq_sp_el0
125end_vector_entry cortex_a76_fiq_sp_el0
126
127vector_entry cortex_a76_serror_sp_el0
128	b	serror_sp_el0
129end_vector_entry cortex_a76_serror_sp_el0
130
131	/* ---------------------------------------------------------------------
132	 * Current EL with SP_ELx: 0x200 - 0x400
133	 * ---------------------------------------------------------------------
134	 */
135vector_entry cortex_a76_sync_exception_sp_elx
136	b	sync_exception_sp_elx
137end_vector_entry cortex_a76_sync_exception_sp_elx
138
139vector_entry cortex_a76_irq_sp_elx
140	b	irq_sp_elx
141end_vector_entry cortex_a76_irq_sp_elx
142
143vector_entry cortex_a76_fiq_sp_elx
144	b	fiq_sp_elx
145end_vector_entry cortex_a76_fiq_sp_elx
146
147vector_entry cortex_a76_serror_sp_elx
148	b	serror_sp_elx
149end_vector_entry cortex_a76_serror_sp_elx
150
151	/* ---------------------------------------------------------------------
152	 * Lower EL using AArch64 : 0x400 - 0x600
153	 * ---------------------------------------------------------------------
154	 */
155vector_entry cortex_a76_sync_exception_aarch64
156	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
157	b	sync_exception_aarch64
158end_vector_entry cortex_a76_sync_exception_aarch64
159
160vector_entry cortex_a76_irq_aarch64
161	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
162	b	irq_aarch64
163end_vector_entry cortex_a76_irq_aarch64
164
165vector_entry cortex_a76_fiq_aarch64
166	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
167	b	fiq_aarch64
168end_vector_entry cortex_a76_fiq_aarch64
169
170vector_entry cortex_a76_serror_aarch64
171	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
172	b	serror_aarch64
173end_vector_entry cortex_a76_serror_aarch64
174
175	/* ---------------------------------------------------------------------
176	 * Lower EL using AArch32 : 0x600 - 0x800
177	 * ---------------------------------------------------------------------
178	 */
179vector_entry cortex_a76_sync_exception_aarch32
180	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
181	b	sync_exception_aarch32
182end_vector_entry cortex_a76_sync_exception_aarch32
183
184vector_entry cortex_a76_irq_aarch32
185	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
186	b	irq_aarch32
187end_vector_entry cortex_a76_irq_aarch32
188
189vector_entry cortex_a76_fiq_aarch32
190	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
191	b	fiq_aarch32
192end_vector_entry cortex_a76_fiq_aarch32
193
194vector_entry cortex_a76_serror_aarch32
195	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
196	b	serror_aarch32
197end_vector_entry cortex_a76_serror_aarch32
198#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
199
200	/* --------------------------------------------------
201	 * Errata Workaround for Cortex A76 Errata #1073348.
202	 * This applies only to revision <= r1p0 of Cortex A76.
203	 * Inputs:
204	 * x0: variant[4:7] and revision[0:3] of current cpu.
205	 * Shall clobber: x0-x17
206	 * --------------------------------------------------
207	 */
208func errata_a76_1073348_wa
209	/*
210	 * Compare x0 against revision r1p0
211	 */
212	mov	x17, x30
213	bl	check_errata_1073348
214	cbz	x0, 1f
215	mrs	x1, CORTEX_A76_CPUACTLR_EL1
216	orr	x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
217	msr	CORTEX_A76_CPUACTLR_EL1, x1
218	isb
2191:
220	ret	x17
221endfunc errata_a76_1073348_wa
222
223func check_errata_1073348
224	mov	x1, #0x10
225	b	cpu_rev_var_ls
226endfunc check_errata_1073348
227
228	/* --------------------------------------------------
229	 * Errata Workaround for Cortex A76 Errata #1130799.
230	 * This applies only to revision <= r2p0 of Cortex A76.
231	 * Inputs:
232	 * x0: variant[4:7] and revision[0:3] of current cpu.
233	 * Shall clobber: x0-x17
234	 * --------------------------------------------------
235	 */
236func errata_a76_1130799_wa
237	/*
238	 * Compare x0 against revision r2p0
239	 */
240	mov	x17, x30
241	bl	check_errata_1130799
242	cbz	x0, 1f
243	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
244	orr	x1, x1 ,#(1 << 59)
245	msr	CORTEX_A76_CPUACTLR2_EL1, x1
246	isb
2471:
248	ret	x17
249endfunc errata_a76_1130799_wa
250
251func check_errata_1130799
252	mov	x1, #0x20
253	b	cpu_rev_var_ls
254endfunc check_errata_1130799
255
256	/* --------------------------------------------------
257	 * Errata Workaround for Cortex A76 Errata #1220197.
258	 * This applies only to revision <= r2p0 of Cortex A76.
259	 * Inputs:
260	 * x0: variant[4:7] and revision[0:3] of current cpu.
261	 * Shall clobber: x0-x17
262	 * --------------------------------------------------
263	 */
264func errata_a76_1220197_wa
265/*
266 * Compare x0 against revision r2p0
267 */
268	mov	x17, x30
269	bl	check_errata_1220197
270	cbz	x0, 1f
271	mrs	x1, CORTEX_A76_CPUECTLR_EL1
272	orr	x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
273	msr	CORTEX_A76_CPUECTLR_EL1, x1
274	isb
2751:
276	ret	x17
277endfunc errata_a76_1220197_wa
278
279func check_errata_1220197
280	mov	x1, #0x20
281	b	cpu_rev_var_ls
282endfunc check_errata_1220197
283
284	/* --------------------------------------------------
285	 * Errata Workaround for Cortex A76 Errata #1257314.
286	 * This applies only to revision <= r3p0 of Cortex A76.
287	 * Inputs:
288	 * x0: variant[4:7] and revision[0:3] of current cpu.
289	 * Shall clobber: x0-x17
290	 * --------------------------------------------------
291	 */
292func errata_a76_1257314_wa
293	/*
294	 * Compare x0 against revision r3p0
295	 */
296	mov	x17, x30
297	bl	check_errata_1257314
298	cbz	x0, 1f
299	mrs	x1, CORTEX_A76_CPUACTLR3_EL1
300	orr	x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
301	msr	CORTEX_A76_CPUACTLR3_EL1, x1
302	isb
3031:
304	ret	x17
305endfunc errata_a76_1257314_wa
306
307func check_errata_1257314
308	mov	x1, #0x30
309	b	cpu_rev_var_ls
310endfunc check_errata_1257314
311
312	/* --------------------------------------------------
313	 * Errata Workaround for Cortex A76 Errata #1262888.
314	 * This applies only to revision <= r3p0 of Cortex A76.
315	 * Inputs:
316	 * x0: variant[4:7] and revision[0:3] of current cpu.
317	 * Shall clobber: x0-x17
318	 * --------------------------------------------------
319	 */
320func errata_a76_1262888_wa
321	/*
322	 * Compare x0 against revision r3p0
323	 */
324	mov	x17, x30
325	bl	check_errata_1262888
326	cbz	x0, 1f
327	mrs	x1, CORTEX_A76_CPUECTLR_EL1
328	orr	x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
329	msr	CORTEX_A76_CPUECTLR_EL1, x1
330	isb
3311:
332	ret	x17
333endfunc errata_a76_1262888_wa
334
335func check_errata_1262888
336	mov	x1, #0x30
337	b	cpu_rev_var_ls
338endfunc check_errata_1262888
339
340	/* ---------------------------------------------------
341	 * Errata Workaround for Cortex A76 Errata #1286807.
342	 * This applies only to revision <= r3p0 of Cortex A76.
343	 * Due to the nature of the errata it is applied unconditionally
344	 * when built in, report it as applicable in this case
345	 * ---------------------------------------------------
346	 */
347func check_errata_1286807
348#if ERRATA_A76_1286807
349	mov x0, #ERRATA_APPLIES
350	ret
351#else
352	mov	x1, #0x30
353	b	cpu_rev_var_ls
354#endif
355endfunc check_errata_1286807
356
357	/* --------------------------------------------------
358	 * Errata workaround for Cortex A76 Errata #1791580.
359	 * This applies to revisions <= r4p0 of Cortex A76.
360	 * Inputs:
361	 * x0: variant[4:7] and revision[0:3] of current cpu.
362	 * Shall clobber: x0-x17
363	 * --------------------------------------------------
364	 */
365func errata_a76_1791580_wa
366	/* Compare x0 against revision r4p0 */
367	mov	x17, x30
368	bl	check_errata_1791580
369	cbz	x0, 1f
370	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
371	orr	x1, x1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
372	msr	CORTEX_A76_CPUACTLR2_EL1, x1
373	isb
3741:
375	ret	x17
376endfunc errata_a76_1791580_wa
377
378func check_errata_1791580
379	/* Applies to everything <=r4p0. */
380	mov	x1, #0x40
381	b	cpu_rev_var_ls
382endfunc check_errata_1791580
383
384	/* --------------------------------------------------
385	 * Errata Workaround for Cortex A76 Errata #1262606,
386	 * #1275112, and #1868343.  #1262606 and #1275112
387	 * apply to revisions <= r3p0 and #1868343 applies to
388	 * revisions <= r4p0.
389	 * Inputs:
390	 * x0: variant[4:7] and revision[0:3] of current cpu.
391	 * Shall clobber: x0-x17
392	 * --------------------------------------------------
393	 */
394
395func errata_a76_1262606_1275112_1868343_wa
396	mov	x17, x30
397
398/* Check for <= r3p0 cases and branch if check passes. */
399#if ERRATA_A76_1262606 || ERRATA_A76_1275112
400	bl	check_errata_1262606
401	cbnz	x0, 1f
402#endif
403
404/* Check for <= r4p0 cases and branch if check fails. */
405#if ERRATA_A76_1868343
406	bl	check_errata_1868343
407	cbz	x0, 2f
408#endif
4091:
410	mrs	x1, CORTEX_A76_CPUACTLR_EL1
411	orr	x1, x1, #CORTEX_A76_CPUACTLR_EL1_BIT_13
412	msr	CORTEX_A76_CPUACTLR_EL1, x1
413	isb
4142:
415	ret	x17
416endfunc errata_a76_1262606_1275112_1868343_wa
417
418func check_errata_1262606
419	mov	x1, #0x30
420	b	cpu_rev_var_ls
421endfunc check_errata_1262606
422
423func check_errata_1275112
424	mov	x1, #0x30
425	b	cpu_rev_var_ls
426endfunc check_errata_1275112
427
428func check_errata_1868343
429	mov	x1, #0x40
430	b	cpu_rev_var_ls
431endfunc check_errata_1868343
432
433/* --------------------------------------------------
434 * Errata Workaround for A76 Erratum 1946160.
435 * This applies to revisions r3p0 - r4p1 of A76.
436 * It also exists in r0p0 - r2p0 but there is no fix
437 * in those revisions.
438 * Inputs:
439 * x0: variant[4:7] and revision[0:3] of current cpu.
440 * Shall clobber: x0-x17
441 * --------------------------------------------------
442 */
443func errata_a76_1946160_wa
444	/* Compare x0 against revisions r3p0 - r4p1 */
445	mov	x17, x30
446	bl	check_errata_1946160
447	cbz	x0, 1f
448
449	mov	x0, #3
450	msr	S3_6_C15_C8_0, x0
451	ldr	x0, =0x10E3900002
452	msr	S3_6_C15_C8_2, x0
453	ldr	x0, =0x10FFF00083
454	msr	S3_6_C15_C8_3, x0
455	ldr	x0, =0x2001003FF
456	msr	S3_6_C15_C8_1, x0
457
458	mov	x0, #4
459	msr	S3_6_C15_C8_0, x0
460	ldr	x0, =0x10E3800082
461	msr	S3_6_C15_C8_2, x0
462	ldr	x0, =0x10FFF00083
463	msr	S3_6_C15_C8_3, x0
464	ldr	x0, =0x2001003FF
465	msr	S3_6_C15_C8_1, x0
466
467	mov	x0, #5
468	msr	S3_6_C15_C8_0, x0
469	ldr	x0, =0x10E3800200
470	msr	S3_6_C15_C8_2, x0
471	ldr	x0, =0x10FFF003E0
472	msr	S3_6_C15_C8_3, x0
473	ldr	x0, =0x2001003FF
474	msr	S3_6_C15_C8_1, x0
475
476	isb
4771:
478	ret	x17
479endfunc errata_a76_1946160_wa
480
481func check_errata_1946160
482	/* Applies to revisions r3p0 - r4p1. */
483	mov	x1, #0x30
484	mov	x2, #0x41
485	b	cpu_rev_var_range
486endfunc check_errata_1946160
487
488func check_errata_cve_2018_3639
489#if WORKAROUND_CVE_2018_3639
490	mov	x0, #ERRATA_APPLIES
491#else
492	mov	x0, #ERRATA_MISSING
493#endif
494	ret
495endfunc check_errata_cve_2018_3639
496
497func cortex_a76_disable_wa_cve_2018_3639
498	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
499	bic	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
500	msr	CORTEX_A76_CPUACTLR2_EL1, x0
501	isb
502	ret
503endfunc cortex_a76_disable_wa_cve_2018_3639
504
505	/* --------------------------------------------------------------
506	 * Errata Workaround for Cortex A76 Errata #1165522.
507	 * This applies only to revisions <= r3p0 of Cortex A76.
508	 * Due to the nature of the errata it is applied unconditionally
509	 * when built in, report it as applicable in this case
510	 * --------------------------------------------------------------
511	 */
512func check_errata_1165522
513#if ERRATA_A76_1165522
514	mov	x0, #ERRATA_APPLIES
515	ret
516#else
517	mov	x1, #0x30
518	b	cpu_rev_var_ls
519#endif
520endfunc check_errata_1165522
521
522	/* -------------------------------------------------
523	 * The CPU Ops reset function for Cortex-A76.
524	 * Shall clobber: x0-x19
525	 * -------------------------------------------------
526	 */
527func cortex_a76_reset_func
528	mov	x19, x30
529	bl	cpu_get_rev_var
530	mov	x18, x0
531
532#if ERRATA_A76_1073348
533	mov	x0, x18
534	bl	errata_a76_1073348_wa
535#endif
536
537#if ERRATA_A76_1130799
538	mov	x0, x18
539	bl	errata_a76_1130799_wa
540#endif
541
542#if ERRATA_A76_1220197
543	mov	x0, x18
544	bl	errata_a76_1220197_wa
545#endif
546
547#if ERRATA_A76_1257314
548	mov	x0, x18
549	bl	errata_a76_1257314_wa
550#endif
551
552#if ERRATA_A76_1262606 || ERRATA_A76_1275112 || ERRATA_A76_1868343
553	mov	x0, x18
554	bl	errata_a76_1262606_1275112_1868343_wa
555#endif
556
557#if ERRATA_A76_1262888
558	mov	x0, x18
559	bl	errata_a76_1262888_wa
560#endif
561
562#if ERRATA_A76_1791580
563	mov	x0, x18
564	bl	errata_a76_1791580_wa
565#endif
566
567#if ERRATA_A76_1946160
568	mov	x0, x18
569	bl	errata_a76_1946160_wa
570#endif
571
572#if WORKAROUND_CVE_2018_3639
573	/* If the PE implements SSBS, we don't need the dynamic workaround */
574	mrs	x0, id_aa64pfr1_el1
575	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
576	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
577#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
578	cmp	x0, 0
579	ASM_ASSERT(ne)
580#endif
581#if DYNAMIC_WORKAROUND_CVE_2018_3639
582	cbnz	x0, 1f
583	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
584	orr	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
585	msr	CORTEX_A76_CPUACTLR2_EL1, x0
586	isb
587
588#ifdef IMAGE_BL31
589	/*
590	 * The Cortex-A76 generic vectors are overwritten to use the vectors
591	 * defined above. This is required in order to apply mitigation
592	 * against CVE-2018-3639 on exception entry from lower ELs.
593	 */
594	adr	x0, cortex_a76_wa_cve_2018_3639_a76_vbar
595	msr	vbar_el3, x0
596	isb
597#endif /* IMAGE_BL31 */
598
5991:
600#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
601#endif /* WORKAROUND_CVE_2018_3639 */
602
603#if ERRATA_DSU_798953
604	bl	errata_dsu_798953_wa
605#endif
606
607#if ERRATA_DSU_936184
608	bl	errata_dsu_936184_wa
609#endif
610
611	ret	x19
612endfunc cortex_a76_reset_func
613
614	/* ---------------------------------------------
615	 * HW will do the cache maintenance while powering down
616	 * ---------------------------------------------
617	 */
618func cortex_a76_core_pwr_dwn
619	/* ---------------------------------------------
620	 * Enable CPU power down bit in power control register
621	 * ---------------------------------------------
622	 */
623	mrs	x0, CORTEX_A76_CPUPWRCTLR_EL1
624	orr	x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
625	msr	CORTEX_A76_CPUPWRCTLR_EL1, x0
626	isb
627	ret
628endfunc cortex_a76_core_pwr_dwn
629
630#if REPORT_ERRATA
631/*
632 * Errata printing function for Cortex A76. Must follow AAPCS.
633 */
634func cortex_a76_errata_report
635	stp	x8, x30, [sp, #-16]!
636
637	bl	cpu_get_rev_var
638	mov	x8, x0
639
640	/*
641	 * Report all errata. The revision-variant information is passed to
642	 * checking functions of each errata.
643	 */
644	report_errata ERRATA_A76_1073348, cortex_a76, 1073348
645	report_errata ERRATA_A76_1130799, cortex_a76, 1130799
646	report_errata ERRATA_A76_1220197, cortex_a76, 1220197
647	report_errata ERRATA_A76_1257314, cortex_a76, 1257314
648	report_errata ERRATA_A76_1262606, cortex_a76, 1262606
649	report_errata ERRATA_A76_1262888, cortex_a76, 1262888
650	report_errata ERRATA_A76_1275112, cortex_a76, 1275112
651	report_errata ERRATA_A76_1286807, cortex_a76, 1286807
652	report_errata ERRATA_A76_1791580, cortex_a76, 1791580
653	report_errata ERRATA_A76_1165522, cortex_a76, 1165522
654	report_errata ERRATA_A76_1868343, cortex_a76, 1868343
655	report_errata ERRATA_A76_1946160, cortex_a76, 1946160
656	report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
657	report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
658	report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
659
660	ldp	x8, x30, [sp], #16
661	ret
662endfunc cortex_a76_errata_report
663#endif
664
665	/* ---------------------------------------------
666	 * This function provides cortex_a76 specific
667	 * register information for crash reporting.
668	 * It needs to return with x6 pointing to
669	 * a list of register names in ascii and
670	 * x8 - x15 having values of registers to be
671	 * reported.
672	 * ---------------------------------------------
673	 */
674.section .rodata.cortex_a76_regs, "aS"
675cortex_a76_regs:  /* The ascii list of register names to be reported */
676	.asciz	"cpuectlr_el1", ""
677
678func cortex_a76_cpu_reg_dump
679	adr	x6, cortex_a76_regs
680	mrs	x8, CORTEX_A76_CPUECTLR_EL1
681	ret
682endfunc cortex_a76_cpu_reg_dump
683
684declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
685	cortex_a76_reset_func, \
686	CPU_NO_EXTRA1_FUNC, \
687	cortex_a76_disable_wa_cve_2018_3639, \
688	cortex_a76_core_pwr_dwn
689