1/* 2 * Copyright (c) 2021, ARM Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7#include <arch.h> 8#include <assert_macros.S> 9#include <asm_macros.S> 10 11 .globl amu_group0_cnt_read_internal 12 .globl amu_group0_cnt_write_internal 13 .globl amu_group1_cnt_read_internal 14 .globl amu_group1_cnt_write_internal 15 .globl amu_group1_set_evtype_internal 16 17/* 18 * uint64_t amu_group0_cnt_read_internal(int idx); 19 * 20 * Given `idx`, read the corresponding AMU counter 21 * and return it in `r0` and `r1`. 22 */ 23func amu_group0_cnt_read_internal 24#if ENABLE_ASSERTIONS 25 /* `idx` should be between [0, 3] */ 26 mov r1, r0 27 lsr r1, r1, #2 28 cmp r1, #0 29 ASM_ASSERT(eq) 30#endif 31 32 /* 33 * Given `idx` calculate address of ldcopr16/bx lr instruction pair 34 * in the table below. 35 */ 36 adr r1, 1f 37 lsl r0, r0, #3 /* each ldcopr16/bx lr sequence is 8 bytes */ 38 add r1, r1, r0 39 bx r1 401: 41 ldcopr16 r0, r1, AMEVCNTR00 /* index 0 */ 42 bx lr 43 ldcopr16 r0, r1, AMEVCNTR01 /* index 1 */ 44 bx lr 45 ldcopr16 r0, r1, AMEVCNTR02 /* index 2 */ 46 bx lr 47 ldcopr16 r0, r1, AMEVCNTR03 /* index 3 */ 48 bx lr 49endfunc amu_group0_cnt_read_internal 50 51/* 52 * void amu_group0_cnt_write_internal(int idx, uint64_t val); 53 * 54 * Given `idx`, write `val` to the corresponding AMU counter. 55 * `idx` is passed in `r0` and `val` is passed in `r2` and `r3`. 56 * `r1` is used as a scratch register. 57 */ 58func amu_group0_cnt_write_internal 59#if ENABLE_ASSERTIONS 60 /* `idx` should be between [0, 3] */ 61 mov r1, r0 62 lsr r1, r1, #2 63 cmp r1, #0 64 ASM_ASSERT(eq) 65#endif 66 67 /* 68 * Given `idx` calculate address of stcopr16/bx lr instruction pair 69 * in the table below. 70 */ 71 adr r1, 1f 72 lsl r0, r0, #3 /* each stcopr16/bx lr sequence is 8 bytes */ 73 add r1, r1, r0 74 bx r1 75 761: 77 stcopr16 r2, r3, AMEVCNTR00 /* index 0 */ 78 bx lr 79 stcopr16 r2, r3, AMEVCNTR01 /* index 1 */ 80 bx lr 81 stcopr16 r2, r3, AMEVCNTR02 /* index 2 */ 82 bx lr 83 stcopr16 r2, r3, AMEVCNTR03 /* index 3 */ 84 bx lr 85endfunc amu_group0_cnt_write_internal 86 87#if ENABLE_AMU_AUXILIARY_COUNTERS 88/* 89 * uint64_t amu_group1_cnt_read_internal(int idx); 90 * 91 * Given `idx`, read the corresponding AMU counter 92 * and return it in `r0` and `r1`. 93 */ 94func amu_group1_cnt_read_internal 95#if ENABLE_ASSERTIONS 96 /* `idx` should be between [0, 15] */ 97 mov r1, r0 98 lsr r1, r1, #4 99 cmp r1, #0 100 ASM_ASSERT(eq) 101#endif 102 103 /* 104 * Given `idx` calculate address of ldcopr16/bx lr instruction pair 105 * in the table below. 106 */ 107 adr r1, 1f 108 lsl r0, r0, #3 /* each ldcopr16/bx lr sequence is 8 bytes */ 109 add r1, r1, r0 110 bx r1 111 1121: 113 ldcopr16 r0, r1, AMEVCNTR10 /* index 0 */ 114 bx lr 115 ldcopr16 r0, r1, AMEVCNTR11 /* index 1 */ 116 bx lr 117 ldcopr16 r0, r1, AMEVCNTR12 /* index 2 */ 118 bx lr 119 ldcopr16 r0, r1, AMEVCNTR13 /* index 3 */ 120 bx lr 121 ldcopr16 r0, r1, AMEVCNTR14 /* index 4 */ 122 bx lr 123 ldcopr16 r0, r1, AMEVCNTR15 /* index 5 */ 124 bx lr 125 ldcopr16 r0, r1, AMEVCNTR16 /* index 6 */ 126 bx lr 127 ldcopr16 r0, r1, AMEVCNTR17 /* index 7 */ 128 bx lr 129 ldcopr16 r0, r1, AMEVCNTR18 /* index 8 */ 130 bx lr 131 ldcopr16 r0, r1, AMEVCNTR19 /* index 9 */ 132 bx lr 133 ldcopr16 r0, r1, AMEVCNTR1A /* index 10 */ 134 bx lr 135 ldcopr16 r0, r1, AMEVCNTR1B /* index 11 */ 136 bx lr 137 ldcopr16 r0, r1, AMEVCNTR1C /* index 12 */ 138 bx lr 139 ldcopr16 r0, r1, AMEVCNTR1D /* index 13 */ 140 bx lr 141 ldcopr16 r0, r1, AMEVCNTR1E /* index 14 */ 142 bx lr 143 ldcopr16 r0, r1, AMEVCNTR1F /* index 15 */ 144 bx lr 145endfunc amu_group1_cnt_read_internal 146 147/* 148 * void amu_group1_cnt_write_internal(int idx, uint64_t val); 149 * 150 * Given `idx`, write `val` to the corresponding AMU counter. 151 * `idx` is passed in `r0` and `val` is passed in `r2` and `r3`. 152 * `r1` is used as a scratch register. 153 */ 154func amu_group1_cnt_write_internal 155#if ENABLE_ASSERTIONS 156 /* `idx` should be between [0, 15] */ 157 mov r1, r0 158 lsr r1, r1, #4 159 cmp r1, #0 160 ASM_ASSERT(eq) 161#endif 162 163 /* 164 * Given `idx` calculate address of ldcopr16/bx lr instruction pair 165 * in the table below. 166 */ 167 adr r1, 1f 168 lsl r0, r0, #3 /* each stcopr16/bx lr sequence is 8 bytes */ 169 add r1, r1, r0 170 bx r1 171 1721: 173 stcopr16 r2, r3, AMEVCNTR10 /* index 0 */ 174 bx lr 175 stcopr16 r2, r3, AMEVCNTR11 /* index 1 */ 176 bx lr 177 stcopr16 r2, r3, AMEVCNTR12 /* index 2 */ 178 bx lr 179 stcopr16 r2, r3, AMEVCNTR13 /* index 3 */ 180 bx lr 181 stcopr16 r2, r3, AMEVCNTR14 /* index 4 */ 182 bx lr 183 stcopr16 r2, r3, AMEVCNTR15 /* index 5 */ 184 bx lr 185 stcopr16 r2, r3, AMEVCNTR16 /* index 6 */ 186 bx lr 187 stcopr16 r2, r3, AMEVCNTR17 /* index 7 */ 188 bx lr 189 stcopr16 r2, r3, AMEVCNTR18 /* index 8 */ 190 bx lr 191 stcopr16 r2, r3, AMEVCNTR19 /* index 9 */ 192 bx lr 193 stcopr16 r2, r3, AMEVCNTR1A /* index 10 */ 194 bx lr 195 stcopr16 r2, r3, AMEVCNTR1B /* index 11 */ 196 bx lr 197 stcopr16 r2, r3, AMEVCNTR1C /* index 12 */ 198 bx lr 199 stcopr16 r2, r3, AMEVCNTR1D /* index 13 */ 200 bx lr 201 stcopr16 r2, r3, AMEVCNTR1E /* index 14 */ 202 bx lr 203 stcopr16 r2, r3, AMEVCNTR1F /* index 15 */ 204 bx lr 205endfunc amu_group1_cnt_write_internal 206 207/* 208 * void amu_group1_set_evtype_internal(int idx, unsigned int val); 209 * 210 * Program the AMU event type register indexed by `idx` 211 * with the value `val`. 212 */ 213func amu_group1_set_evtype_internal 214#if ENABLE_ASSERTIONS 215 /* `idx` should be between [0, 15] */ 216 mov r2, r0 217 lsr r2, r2, #4 218 cmp r2, #0 219 ASM_ASSERT(eq) 220 221 /* val should be between [0, 65535] */ 222 mov r2, r1 223 lsr r2, r2, #16 224 cmp r2, #0 225 ASM_ASSERT(eq) 226#endif 227 228 /* 229 * Given `idx` calculate address of stcopr/bx lr instruction pair 230 * in the table below. 231 */ 232 adr r2, 1f 233 lsl r0, r0, #3 /* each stcopr/bx lr sequence is 8 bytes */ 234 add r2, r2, r0 235 bx r2 236 2371: 238 stcopr r1, AMEVTYPER10 /* index 0 */ 239 bx lr 240 stcopr r1, AMEVTYPER11 /* index 1 */ 241 bx lr 242 stcopr r1, AMEVTYPER12 /* index 2 */ 243 bx lr 244 stcopr r1, AMEVTYPER13 /* index 3 */ 245 bx lr 246 stcopr r1, AMEVTYPER14 /* index 4 */ 247 bx lr 248 stcopr r1, AMEVTYPER15 /* index 5 */ 249 bx lr 250 stcopr r1, AMEVTYPER16 /* index 6 */ 251 bx lr 252 stcopr r1, AMEVTYPER17 /* index 7 */ 253 bx lr 254 stcopr r1, AMEVTYPER18 /* index 8 */ 255 bx lr 256 stcopr r1, AMEVTYPER19 /* index 9 */ 257 bx lr 258 stcopr r1, AMEVTYPER1A /* index 10 */ 259 bx lr 260 stcopr r1, AMEVTYPER1B /* index 11 */ 261 bx lr 262 stcopr r1, AMEVTYPER1C /* index 12 */ 263 bx lr 264 stcopr r1, AMEVTYPER1D /* index 13 */ 265 bx lr 266 stcopr r1, AMEVTYPER1E /* index 14 */ 267 bx lr 268 stcopr r1, AMEVTYPER1F /* index 15 */ 269 bx lr 270endfunc amu_group1_set_evtype_internal 271#endif 272