1/*
2 * Copyright (c) 2017-2021, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <assert_macros.S>
9#include <asm_macros.S>
10
11	.globl	amu_group0_cnt_read_internal
12	.globl	amu_group0_cnt_write_internal
13	.globl	amu_group1_cnt_read_internal
14	.globl	amu_group1_cnt_write_internal
15	.globl	amu_group1_set_evtype_internal
16
17	/* FEAT_AMUv1p1 virtualisation offset register functions */
18	.globl	amu_group0_voffset_read_internal
19	.globl	amu_group0_voffset_write_internal
20	.globl	amu_group1_voffset_read_internal
21	.globl	amu_group1_voffset_write_internal
22
23/*
24 * uint64_t amu_group0_cnt_read_internal(int idx);
25 *
26 * Given `idx`, read the corresponding AMU counter
27 * and return it in `x0`.
28 */
29func amu_group0_cnt_read_internal
30	adr	x1, 1f
31#if ENABLE_ASSERTIONS
32	/*
33	 * It can be dangerous to call this function with an
34	 * out of bounds index.  Ensure `idx` is valid.
35	 */
36	tst	x0, #~3
37	ASM_ASSERT(eq)
38#endif
39	/*
40	 * Given `idx` calculate address of mrs/ret instruction pair
41	 * in the table below.
42	 */
43	add	x1, x1, x0, lsl #3	/* each mrs/ret sequence is 8 bytes */
44#if ENABLE_BTI
45	add	x1, x1, x0, lsl #2	/* + "bti j" instruction */
46#endif
47	br	x1
48
491:	read	AMEVCNTR00_EL0		/* index 0 */
50	read	AMEVCNTR01_EL0		/* index 1 */
51	read	AMEVCNTR02_EL0		/* index 2 */
52	read	AMEVCNTR03_EL0		/* index 3 */
53endfunc amu_group0_cnt_read_internal
54
55/*
56 * void amu_group0_cnt_write_internal(int idx, uint64_t val);
57 *
58 * Given `idx`, write `val` to the corresponding AMU counter.
59 */
60func amu_group0_cnt_write_internal
61	adr	x2, 1f
62#if ENABLE_ASSERTIONS
63	/*
64	 * It can be dangerous to call this function with an
65	 * out of bounds index.  Ensure `idx` is valid.
66	 */
67	tst	x0, #~3
68	ASM_ASSERT(eq)
69#endif
70	/*
71	 * Given `idx` calculate address of mrs/ret instruction pair
72	 * in the table below.
73	 */
74	add	x2, x2, x0, lsl #3	/* each msr/ret sequence is 8 bytes */
75#if ENABLE_BTI
76	add	x2, x2, x0, lsl #2	/* + "bti j" instruction */
77#endif
78	br	x2
79
801:	write	AMEVCNTR00_EL0		/* index 0 */
81	write	AMEVCNTR01_EL0		/* index 1 */
82	write	AMEVCNTR02_EL0		/* index 2 */
83	write	AMEVCNTR03_EL0		/* index 3 */
84endfunc amu_group0_cnt_write_internal
85
86#if ENABLE_AMU_AUXILIARY_COUNTERS
87/*
88 * uint64_t amu_group1_cnt_read_internal(int idx);
89 *
90 * Given `idx`, read the corresponding AMU counter
91 * and return it in `x0`.
92 */
93func amu_group1_cnt_read_internal
94	adr	x1, 1f
95#if ENABLE_ASSERTIONS
96	/*
97	 * It can be dangerous to call this function with an
98	 * out of bounds index.  Ensure `idx` is valid.
99	 */
100	tst	x0, #~0xF
101	ASM_ASSERT(eq)
102#endif
103	/*
104	 * Given `idx` calculate address of mrs/ret instruction pair
105	 * in the table below.
106	 */
107	add	x1, x1, x0, lsl #3	/* each mrs/ret sequence is 8 bytes */
108#if ENABLE_BTI
109	add	x1, x1, x0, lsl #2	/* + "bti j" instruction */
110#endif
111	br	x1
112
1131:	read	AMEVCNTR10_EL0		/* index 0 */
114	read	AMEVCNTR11_EL0		/* index 1 */
115	read	AMEVCNTR12_EL0		/* index 2 */
116	read	AMEVCNTR13_EL0		/* index 3 */
117	read	AMEVCNTR14_EL0		/* index 4 */
118	read	AMEVCNTR15_EL0		/* index 5 */
119	read	AMEVCNTR16_EL0		/* index 6 */
120	read	AMEVCNTR17_EL0		/* index 7 */
121	read	AMEVCNTR18_EL0		/* index 8 */
122	read	AMEVCNTR19_EL0		/* index 9 */
123	read	AMEVCNTR1A_EL0		/* index 10 */
124	read	AMEVCNTR1B_EL0		/* index 11 */
125	read	AMEVCNTR1C_EL0		/* index 12 */
126	read	AMEVCNTR1D_EL0		/* index 13 */
127	read	AMEVCNTR1E_EL0		/* index 14 */
128	read	AMEVCNTR1F_EL0		/* index 15 */
129endfunc amu_group1_cnt_read_internal
130
131/*
132 * void amu_group1_cnt_write_internal(int idx, uint64_t val);
133 *
134 * Given `idx`, write `val` to the corresponding AMU counter.
135 */
136func amu_group1_cnt_write_internal
137	adr	x2, 1f
138#if ENABLE_ASSERTIONS
139	/*
140	 * It can be dangerous to call this function with an
141	 * out of bounds index.  Ensure `idx` is valid.
142	 */
143	tst	x0, #~0xF
144	ASM_ASSERT(eq)
145#endif
146	/*
147	 * Given `idx` calculate address of mrs/ret instruction pair
148	 * in the table below.
149	 */
150	add	x2, x2, x0, lsl #3	/* each msr/ret sequence is 8 bytes */
151#if ENABLE_BTI
152	add	x2, x2, x0, lsl #2	/* + "bti j" instruction */
153#endif
154	br	x2
155
1561:	write	AMEVCNTR10_EL0		/* index 0 */
157	write	AMEVCNTR11_EL0		/* index 1 */
158	write	AMEVCNTR12_EL0		/* index 2 */
159	write	AMEVCNTR13_EL0		/* index 3 */
160	write	AMEVCNTR14_EL0		/* index 4 */
161	write	AMEVCNTR15_EL0		/* index 5 */
162	write	AMEVCNTR16_EL0		/* index 6 */
163	write	AMEVCNTR17_EL0		/* index 7 */
164	write	AMEVCNTR18_EL0		/* index 8 */
165	write	AMEVCNTR19_EL0		/* index 9 */
166	write	AMEVCNTR1A_EL0		/* index 10 */
167	write	AMEVCNTR1B_EL0		/* index 11 */
168	write	AMEVCNTR1C_EL0		/* index 12 */
169	write	AMEVCNTR1D_EL0		/* index 13 */
170	write	AMEVCNTR1E_EL0		/* index 14 */
171	write	AMEVCNTR1F_EL0		/* index 15 */
172endfunc amu_group1_cnt_write_internal
173
174/*
175 * void amu_group1_set_evtype_internal(int idx, unsigned int val);
176 *
177 * Program the AMU event type register indexed by `idx`
178 * with the value `val`.
179 */
180func amu_group1_set_evtype_internal
181	adr	x2, 1f
182#if ENABLE_ASSERTIONS
183	/*
184	 * It can be dangerous to call this function with an
185	 * out of bounds index.  Ensure `idx` is valid.
186	 */
187	tst	x0, #~0xF
188	ASM_ASSERT(eq)
189
190	/* val should be between [0, 65535] */
191	tst	x1, #~0xFFFF
192	ASM_ASSERT(eq)
193#endif
194	/*
195	 * Given `idx` calculate address of msr/ret instruction pair
196	 * in the table below.
197	 */
198	add	x2, x2, x0, lsl #3	/* each msr/ret sequence is 8 bytes */
199#if ENABLE_BTI
200	add	x2, x2, x0, lsl #2	/* + "bti j" instruction */
201#endif
202	br	x2
203
2041:	write	AMEVTYPER10_EL0		/* index 0 */
205	write	AMEVTYPER11_EL0		/* index 1 */
206	write	AMEVTYPER12_EL0		/* index 2 */
207	write	AMEVTYPER13_EL0		/* index 3 */
208	write	AMEVTYPER14_EL0		/* index 4 */
209	write	AMEVTYPER15_EL0		/* index 5 */
210	write	AMEVTYPER16_EL0		/* index 6 */
211	write	AMEVTYPER17_EL0		/* index 7 */
212	write	AMEVTYPER18_EL0		/* index 8 */
213	write	AMEVTYPER19_EL0		/* index 9 */
214	write	AMEVTYPER1A_EL0		/* index 10 */
215	write	AMEVTYPER1B_EL0		/* index 11 */
216	write	AMEVTYPER1C_EL0		/* index 12 */
217	write	AMEVTYPER1D_EL0		/* index 13 */
218	write	AMEVTYPER1E_EL0		/* index 14 */
219	write	AMEVTYPER1F_EL0		/* index 15 */
220endfunc amu_group1_set_evtype_internal
221#endif
222
223/*
224 * Accessor functions for virtual offset registers added with FEAT_AMUv1p1
225 */
226
227/*
228 * uint64_t amu_group0_voffset_read_internal(int idx);
229 *
230 * Given `idx`, read the corresponding AMU virtual offset register
231 * and return it in `x0`.
232 */
233func amu_group0_voffset_read_internal
234	adr	x1, 1f
235#if ENABLE_ASSERTIONS
236	/*
237	 * It can be dangerous to call this function with an
238	 * out of bounds index.  Ensure `idx` is valid.
239	 */
240	tst	x0, #~3
241	ASM_ASSERT(eq)
242	/* Make sure idx != 1 since AMEVCNTVOFF01_EL2 does not exist */
243	cmp	x0, #1
244	ASM_ASSERT(ne)
245#endif
246	/*
247	 * Given `idx` calculate address of mrs/ret instruction pair
248	 * in the table below.
249	 */
250	add	x1, x1, x0, lsl #3	/* each mrs/ret sequence is 8 bytes */
251#if ENABLE_BTI
252	add	x1, x1, x0, lsl #2	/* + "bti j" instruction */
253#endif
254	br	x1
255
2561:	read	AMEVCNTVOFF00_EL2	/* index 0 */
257	.skip	8			/* AMEVCNTVOFF01_EL2 does not exist */
258#if ENABLE_BTI
259	.skip	4
260#endif
261	read	AMEVCNTVOFF02_EL2	/* index 2 */
262	read	AMEVCNTVOFF03_EL2	/* index 3 */
263endfunc amu_group0_voffset_read_internal
264
265/*
266 * void amu_group0_voffset_write_internal(int idx, uint64_t val);
267 *
268 * Given `idx`, write `val` to the corresponding AMU virtual offset register.
269 */
270func amu_group0_voffset_write_internal
271	adr	x2, 1f
272#if ENABLE_ASSERTIONS
273	/*
274	 * It can be dangerous to call this function with an
275	 * out of bounds index.  Ensure `idx` is valid.
276	 */
277	tst	x0, #~3
278	ASM_ASSERT(eq)
279	/* Make sure idx != 1 since AMEVCNTVOFF01_EL2 does not exist */
280	cmp	x0, #1
281	ASM_ASSERT(ne)
282#endif
283	/*
284	 * Given `idx` calculate address of mrs/ret instruction pair
285	 * in the table below.
286	 */
287	add	x2, x2, x0, lsl #3	/* each msr/ret sequence is 8 bytes */
288#if ENABLE_BTI
289	add	x2, x2, x0, lsl #2	/* + "bti j" instruction */
290#endif
291	br	x2
292
2931:	write	AMEVCNTVOFF00_EL2	/* index 0 */
294	.skip	8			/* AMEVCNTVOFF01_EL2 does not exist */
295#if ENABLE_BTI
296	.skip	4
297#endif
298	write	AMEVCNTVOFF02_EL2	/* index 2 */
299	write	AMEVCNTVOFF03_EL2	/* index 3 */
300endfunc amu_group0_voffset_write_internal
301
302#if ENABLE_AMU_AUXILIARY_COUNTERS
303/*
304 * uint64_t amu_group1_voffset_read_internal(int idx);
305 *
306 * Given `idx`, read the corresponding AMU virtual offset register
307 * and return it in `x0`.
308 */
309func amu_group1_voffset_read_internal
310	adr	x1, 1f
311#if ENABLE_ASSERTIONS
312	/*
313	 * It can be dangerous to call this function with an
314	 * out of bounds index.  Ensure `idx` is valid.
315	 */
316	tst	x0, #~0xF
317	ASM_ASSERT(eq)
318#endif
319	/*
320	 * Given `idx` calculate address of mrs/ret instruction pair
321	 * in the table below.
322	 */
323	add	x1, x1, x0, lsl #3	/* each mrs/ret sequence is 8 bytes */
324#if ENABLE_BTI
325	add	x1, x1, x0, lsl #2	/* + "bti j" instruction */
326#endif
327	br	x1
328
3291:	read	AMEVCNTVOFF10_EL2	/* index 0 */
330	read	AMEVCNTVOFF11_EL2	/* index 1 */
331	read	AMEVCNTVOFF12_EL2	/* index 2 */
332	read	AMEVCNTVOFF13_EL2	/* index 3 */
333	read	AMEVCNTVOFF14_EL2	/* index 4 */
334	read	AMEVCNTVOFF15_EL2	/* index 5 */
335	read	AMEVCNTVOFF16_EL2	/* index 6 */
336	read	AMEVCNTVOFF17_EL2	/* index 7 */
337	read	AMEVCNTVOFF18_EL2	/* index 8 */
338	read	AMEVCNTVOFF19_EL2	/* index 9 */
339	read	AMEVCNTVOFF1A_EL2	/* index 10 */
340	read	AMEVCNTVOFF1B_EL2	/* index 11 */
341	read	AMEVCNTVOFF1C_EL2	/* index 12 */
342	read	AMEVCNTVOFF1D_EL2	/* index 13 */
343	read	AMEVCNTVOFF1E_EL2	/* index 14 */
344	read	AMEVCNTVOFF1F_EL2	/* index 15 */
345endfunc amu_group1_voffset_read_internal
346
347/*
348 * void amu_group1_voffset_write_internal(int idx, uint64_t val);
349 *
350 * Given `idx`, write `val` to the corresponding AMU virtual offset register.
351 */
352func amu_group1_voffset_write_internal
353	adr	x2, 1f
354#if ENABLE_ASSERTIONS
355	/*
356	 * It can be dangerous to call this function with an
357	 * out of bounds index.  Ensure `idx` is valid.
358	 */
359	tst	x0, #~0xF
360	ASM_ASSERT(eq)
361#endif
362	/*
363	 * Given `idx` calculate address of mrs/ret instruction pair
364	 * in the table below.
365	 */
366	add	x2, x2, x0, lsl #3	/* each msr/ret sequence is 8 bytes */
367#if ENABLE_BTI
368	add	x2, x2, x0, lsl #2	/* + "bti j" instruction */
369#endif
370	br	x2
371
3721:	write	AMEVCNTVOFF10_EL2	/* index 0 */
373	write	AMEVCNTVOFF11_EL2	/* index 1 */
374	write	AMEVCNTVOFF12_EL2	/* index 2 */
375	write	AMEVCNTVOFF13_EL2	/* index 3 */
376	write	AMEVCNTVOFF14_EL2	/* index 4 */
377	write	AMEVCNTVOFF15_EL2	/* index 5 */
378	write	AMEVCNTVOFF16_EL2	/* index 6 */
379	write	AMEVCNTVOFF17_EL2	/* index 7 */
380	write	AMEVCNTVOFF18_EL2	/* index 8 */
381	write	AMEVCNTVOFF19_EL2	/* index 9 */
382	write	AMEVCNTVOFF1A_EL2	/* index 10 */
383	write	AMEVCNTVOFF1B_EL2	/* index 11 */
384	write	AMEVCNTVOFF1C_EL2	/* index 12 */
385	write	AMEVCNTVOFF1D_EL2	/* index 13 */
386	write	AMEVCNTVOFF1E_EL2	/* index 14 */
387	write	AMEVCNTVOFF1F_EL2	/* index 15 */
388endfunc amu_group1_voffset_write_internal
389#endif
390