1/*
2 * Copyright (c) 2017-2021, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <assert_macros.S>
9#include <asm_macros.S>
10
11	.globl	amu_group0_cnt_read_internal
12	.globl	amu_group0_cnt_write_internal
13	.globl	amu_group1_cnt_read_internal
14	.globl	amu_group1_cnt_write_internal
15	.globl	amu_group1_set_evtype_internal
16
17	/* FEAT_AMUv1p1 virtualisation offset register functions */
18	.globl	amu_group0_voffset_read_internal
19	.globl	amu_group0_voffset_write_internal
20	.globl	amu_group1_voffset_read_internal
21	.globl	amu_group1_voffset_write_internal
22
23/*
24 * uint64_t amu_group0_cnt_read_internal(int idx);
25 *
26 * Given `idx`, read the corresponding AMU counter
27 * and return it in `x0`.
28 */
29func amu_group0_cnt_read_internal
30	adr	x1, 1f
31#if ENABLE_ASSERTIONS
32	/*
33	 * It can be dangerous to call this function with an
34	 * out of bounds index.  Ensure `idx` is valid.
35	 */
36	tst	x0, #~3
37	ASM_ASSERT(eq)
38#endif
39	/*
40	 * Given `idx` calculate address of mrs/ret instruction pair
41	 * in the table below.
42	 */
43	add	x1, x1, x0, lsl #3	/* each mrs/ret sequence is 8 bytes */
44#if ENABLE_BTI
45	add	x1, x1, x0, lsl #2	/* + "bti j" instruction */
46#endif
47	br	x1
48
491:	read	AMEVCNTR00_EL0		/* index 0 */
50	read	AMEVCNTR01_EL0		/* index 1 */
51	read	AMEVCNTR02_EL0		/* index 2 */
52	read	AMEVCNTR03_EL0		/* index 3 */
53endfunc amu_group0_cnt_read_internal
54
55/*
56 * void amu_group0_cnt_write_internal(int idx, uint64_t val);
57 *
58 * Given `idx`, write `val` to the corresponding AMU counter.
59 */
60func amu_group0_cnt_write_internal
61	adr	x2, 1f
62#if ENABLE_ASSERTIONS
63	/*
64	 * It can be dangerous to call this function with an
65	 * out of bounds index.  Ensure `idx` is valid.
66	 */
67	tst	x0, #~3
68	ASM_ASSERT(eq)
69#endif
70	/*
71	 * Given `idx` calculate address of mrs/ret instruction pair
72	 * in the table below.
73	 */
74	add	x2, x2, x0, lsl #3	/* each msr/ret sequence is 8 bytes */
75#if ENABLE_BTI
76	add	x2, x2, x0, lsl #2	/* + "bti j" instruction */
77#endif
78	br	x2
79
801:	write	AMEVCNTR00_EL0		/* index 0 */
81	write	AMEVCNTR01_EL0		/* index 1 */
82	write	AMEVCNTR02_EL0		/* index 2 */
83	write	AMEVCNTR03_EL0		/* index 3 */
84endfunc amu_group0_cnt_write_internal
85
86/*
87 * uint64_t amu_group1_cnt_read_internal(int idx);
88 *
89 * Given `idx`, read the corresponding AMU counter
90 * and return it in `x0`.
91 */
92func amu_group1_cnt_read_internal
93	adr	x1, 1f
94#if ENABLE_ASSERTIONS
95	/*
96	 * It can be dangerous to call this function with an
97	 * out of bounds index.  Ensure `idx` is valid.
98	 */
99	tst	x0, #~0xF
100	ASM_ASSERT(eq)
101#endif
102	/*
103	 * Given `idx` calculate address of mrs/ret instruction pair
104	 * in the table below.
105	 */
106	add	x1, x1, x0, lsl #3	/* each mrs/ret sequence is 8 bytes */
107#if ENABLE_BTI
108	add	x1, x1, x0, lsl #2	/* + "bti j" instruction */
109#endif
110	br	x1
111
1121:	read	AMEVCNTR10_EL0		/* index 0 */
113	read	AMEVCNTR11_EL0		/* index 1 */
114	read	AMEVCNTR12_EL0		/* index 2 */
115	read	AMEVCNTR13_EL0		/* index 3 */
116	read	AMEVCNTR14_EL0		/* index 4 */
117	read	AMEVCNTR15_EL0		/* index 5 */
118	read	AMEVCNTR16_EL0		/* index 6 */
119	read	AMEVCNTR17_EL0		/* index 7 */
120	read	AMEVCNTR18_EL0		/* index 8 */
121	read	AMEVCNTR19_EL0		/* index 9 */
122	read	AMEVCNTR1A_EL0		/* index 10 */
123	read	AMEVCNTR1B_EL0		/* index 11 */
124	read	AMEVCNTR1C_EL0		/* index 12 */
125	read	AMEVCNTR1D_EL0		/* index 13 */
126	read	AMEVCNTR1E_EL0		/* index 14 */
127	read	AMEVCNTR1F_EL0		/* index 15 */
128endfunc amu_group1_cnt_read_internal
129
130/*
131 * void amu_group1_cnt_write_internal(int idx, uint64_t val);
132 *
133 * Given `idx`, write `val` to the corresponding AMU counter.
134 */
135func amu_group1_cnt_write_internal
136	adr	x2, 1f
137#if ENABLE_ASSERTIONS
138	/*
139	 * It can be dangerous to call this function with an
140	 * out of bounds index.  Ensure `idx` is valid.
141	 */
142	tst	x0, #~0xF
143	ASM_ASSERT(eq)
144#endif
145	/*
146	 * Given `idx` calculate address of mrs/ret instruction pair
147	 * in the table below.
148	 */
149	add	x2, x2, x0, lsl #3	/* each msr/ret sequence is 8 bytes */
150#if ENABLE_BTI
151	add	x2, x2, x0, lsl #2	/* + "bti j" instruction */
152#endif
153	br	x2
154
1551:	write	AMEVCNTR10_EL0		/* index 0 */
156	write	AMEVCNTR11_EL0		/* index 1 */
157	write	AMEVCNTR12_EL0		/* index 2 */
158	write	AMEVCNTR13_EL0		/* index 3 */
159	write	AMEVCNTR14_EL0		/* index 4 */
160	write	AMEVCNTR15_EL0		/* index 5 */
161	write	AMEVCNTR16_EL0		/* index 6 */
162	write	AMEVCNTR17_EL0		/* index 7 */
163	write	AMEVCNTR18_EL0		/* index 8 */
164	write	AMEVCNTR19_EL0		/* index 9 */
165	write	AMEVCNTR1A_EL0		/* index 10 */
166	write	AMEVCNTR1B_EL0		/* index 11 */
167	write	AMEVCNTR1C_EL0		/* index 12 */
168	write	AMEVCNTR1D_EL0		/* index 13 */
169	write	AMEVCNTR1E_EL0		/* index 14 */
170	write	AMEVCNTR1F_EL0		/* index 15 */
171endfunc amu_group1_cnt_write_internal
172
173/*
174 * void amu_group1_set_evtype_internal(int idx, unsigned int val);
175 *
176 * Program the AMU event type register indexed by `idx`
177 * with the value `val`.
178 */
179func amu_group1_set_evtype_internal
180	adr	x2, 1f
181#if ENABLE_ASSERTIONS
182	/*
183	 * It can be dangerous to call this function with an
184	 * out of bounds index.  Ensure `idx` is valid.
185	 */
186	tst	x0, #~0xF
187	ASM_ASSERT(eq)
188
189	/* val should be between [0, 65535] */
190	tst	x1, #~0xFFFF
191	ASM_ASSERT(eq)
192#endif
193	/*
194	 * Given `idx` calculate address of msr/ret instruction pair
195	 * in the table below.
196	 */
197	add	x2, x2, x0, lsl #3	/* each msr/ret sequence is 8 bytes */
198#if ENABLE_BTI
199	add	x2, x2, x0, lsl #2	/* + "bti j" instruction */
200#endif
201	br	x2
202
2031:	write	AMEVTYPER10_EL0		/* index 0 */
204	write	AMEVTYPER11_EL0		/* index 1 */
205	write	AMEVTYPER12_EL0		/* index 2 */
206	write	AMEVTYPER13_EL0		/* index 3 */
207	write	AMEVTYPER14_EL0		/* index 4 */
208	write	AMEVTYPER15_EL0		/* index 5 */
209	write	AMEVTYPER16_EL0		/* index 6 */
210	write	AMEVTYPER17_EL0		/* index 7 */
211	write	AMEVTYPER18_EL0		/* index 8 */
212	write	AMEVTYPER19_EL0		/* index 9 */
213	write	AMEVTYPER1A_EL0		/* index 10 */
214	write	AMEVTYPER1B_EL0		/* index 11 */
215	write	AMEVTYPER1C_EL0		/* index 12 */
216	write	AMEVTYPER1D_EL0		/* index 13 */
217	write	AMEVTYPER1E_EL0		/* index 14 */
218	write	AMEVTYPER1F_EL0		/* index 15 */
219endfunc amu_group1_set_evtype_internal
220
221/*
222 * Accessor functions for virtual offset registers added with FEAT_AMUv1p1
223 */
224
225/*
226 * uint64_t amu_group0_voffset_read_internal(int idx);
227 *
228 * Given `idx`, read the corresponding AMU virtual offset register
229 * and return it in `x0`.
230 */
231func amu_group0_voffset_read_internal
232	adr	x1, 1f
233#if ENABLE_ASSERTIONS
234	/*
235	 * It can be dangerous to call this function with an
236	 * out of bounds index.  Ensure `idx` is valid.
237	 */
238	tst	x0, #~3
239	ASM_ASSERT(eq)
240	/* Make sure idx != 1 since AMEVCNTVOFF01_EL2 does not exist */
241	cmp	x0, #1
242	ASM_ASSERT(ne)
243#endif
244	/*
245	 * Given `idx` calculate address of mrs/ret instruction pair
246	 * in the table below.
247	 */
248	add	x1, x1, x0, lsl #3	/* each mrs/ret sequence is 8 bytes */
249#if ENABLE_BTI
250	add	x1, x1, x0, lsl #2	/* + "bti j" instruction */
251#endif
252	br	x1
253
2541:	read	AMEVCNTVOFF00_EL2	/* index 0 */
255	.skip	8			/* AMEVCNTVOFF01_EL2 does not exist */
256#if ENABLE_BTI
257	.skip	4
258#endif
259	read	AMEVCNTVOFF02_EL2	/* index 2 */
260	read	AMEVCNTVOFF03_EL2	/* index 3 */
261endfunc amu_group0_voffset_read_internal
262
263/*
264 * void amu_group0_voffset_write_internal(int idx, uint64_t val);
265 *
266 * Given `idx`, write `val` to the corresponding AMU virtual offset register.
267 */
268func amu_group0_voffset_write_internal
269	adr	x2, 1f
270#if ENABLE_ASSERTIONS
271	/*
272	 * It can be dangerous to call this function with an
273	 * out of bounds index.  Ensure `idx` is valid.
274	 */
275	tst	x0, #~3
276	ASM_ASSERT(eq)
277	/* Make sure idx != 1 since AMEVCNTVOFF01_EL2 does not exist */
278	cmp	x0, #1
279	ASM_ASSERT(ne)
280#endif
281	/*
282	 * Given `idx` calculate address of mrs/ret instruction pair
283	 * in the table below.
284	 */
285	add	x2, x2, x0, lsl #3	/* each msr/ret sequence is 8 bytes */
286#if ENABLE_BTI
287	add	x2, x2, x0, lsl #2	/* + "bti j" instruction */
288#endif
289	br	x2
290
2911:	write	AMEVCNTVOFF00_EL2	/* index 0 */
292	.skip	8			/* AMEVCNTVOFF01_EL2 does not exist */
293#if ENABLE_BTI
294	.skip	4
295#endif
296	write	AMEVCNTVOFF02_EL2	/* index 2 */
297	write	AMEVCNTVOFF03_EL2	/* index 3 */
298endfunc amu_group0_voffset_write_internal
299
300/*
301 * uint64_t amu_group1_voffset_read_internal(int idx);
302 *
303 * Given `idx`, read the corresponding AMU virtual offset register
304 * and return it in `x0`.
305 */
306func amu_group1_voffset_read_internal
307	adr	x1, 1f
308#if ENABLE_ASSERTIONS
309	/*
310	 * It can be dangerous to call this function with an
311	 * out of bounds index.  Ensure `idx` is valid.
312	 */
313	tst	x0, #~0xF
314	ASM_ASSERT(eq)
315#endif
316	/*
317	 * Given `idx` calculate address of mrs/ret instruction pair
318	 * in the table below.
319	 */
320	add	x1, x1, x0, lsl #3	/* each mrs/ret sequence is 8 bytes */
321#if ENABLE_BTI
322	add	x1, x1, x0, lsl #2	/* + "bti j" instruction */
323#endif
324	br	x1
325
3261:	read	AMEVCNTVOFF10_EL2	/* index 0 */
327	read	AMEVCNTVOFF11_EL2	/* index 1 */
328	read	AMEVCNTVOFF12_EL2	/* index 2 */
329	read	AMEVCNTVOFF13_EL2	/* index 3 */
330	read	AMEVCNTVOFF14_EL2	/* index 4 */
331	read	AMEVCNTVOFF15_EL2	/* index 5 */
332	read	AMEVCNTVOFF16_EL2	/* index 6 */
333	read	AMEVCNTVOFF17_EL2	/* index 7 */
334	read	AMEVCNTVOFF18_EL2	/* index 8 */
335	read	AMEVCNTVOFF19_EL2	/* index 9 */
336	read	AMEVCNTVOFF1A_EL2	/* index 10 */
337	read	AMEVCNTVOFF1B_EL2	/* index 11 */
338	read	AMEVCNTVOFF1C_EL2	/* index 12 */
339	read	AMEVCNTVOFF1D_EL2	/* index 13 */
340	read	AMEVCNTVOFF1E_EL2	/* index 14 */
341	read	AMEVCNTVOFF1F_EL2	/* index 15 */
342endfunc amu_group1_voffset_read_internal
343
344/*
345 * void amu_group1_voffset_write_internal(int idx, uint64_t val);
346 *
347 * Given `idx`, write `val` to the corresponding AMU virtual offset register.
348 */
349func amu_group1_voffset_write_internal
350	adr	x2, 1f
351#if ENABLE_ASSERTIONS
352	/*
353	 * It can be dangerous to call this function with an
354	 * out of bounds index.  Ensure `idx` is valid.
355	 */
356	tst	x0, #~0xF
357	ASM_ASSERT(eq)
358#endif
359	/*
360	 * Given `idx` calculate address of mrs/ret instruction pair
361	 * in the table below.
362	 */
363	add	x2, x2, x0, lsl #3	/* each msr/ret sequence is 8 bytes */
364#if ENABLE_BTI
365	add	x2, x2, x0, lsl #2	/* + "bti j" instruction */
366#endif
367	br	x2
368
3691:	write	AMEVCNTVOFF10_EL2	/* index 0 */
370	write	AMEVCNTVOFF11_EL2	/* index 1 */
371	write	AMEVCNTVOFF12_EL2	/* index 2 */
372	write	AMEVCNTVOFF13_EL2	/* index 3 */
373	write	AMEVCNTVOFF14_EL2	/* index 4 */
374	write	AMEVCNTVOFF15_EL2	/* index 5 */
375	write	AMEVCNTVOFF16_EL2	/* index 6 */
376	write	AMEVCNTVOFF17_EL2	/* index 7 */
377	write	AMEVCNTVOFF18_EL2	/* index 8 */
378	write	AMEVCNTVOFF19_EL2	/* index 9 */
379	write	AMEVCNTVOFF1A_EL2	/* index 10 */
380	write	AMEVCNTVOFF1B_EL2	/* index 11 */
381	write	AMEVCNTVOFF1C_EL2	/* index 12 */
382	write	AMEVCNTVOFF1D_EL2	/* index 13 */
383	write	AMEVCNTVOFF1E_EL2	/* index 14 */
384	write	AMEVCNTVOFF1F_EL2	/* index 15 */
385endfunc amu_group1_voffset_write_internal
386