Lines Matching refs:msr

132 static inline bool is_amd_pmu_msr(unsigned int msr)  in is_amd_pmu_msr()  argument
134 if ((msr >= MSR_F15H_PERF_CTL && in is_amd_pmu_msr()
135 msr < MSR_F15H_PERF_CTR + (amd_num_counters * 2)) || in is_amd_pmu_msr()
136 (msr >= MSR_K7_EVNTSEL0 && in is_amd_pmu_msr()
137 msr < MSR_K7_PERFCTR0 + amd_num_counters)) in is_amd_pmu_msr()
189 static bool xen_intel_pmu_emulate(unsigned int msr, u64 *val, int type, in xen_intel_pmu_emulate() argument
205 switch (msr) { in xen_intel_pmu_emulate()
243 if (msr == MSR_CORE_PERF_GLOBAL_OVF_CTRL) in xen_intel_pmu_emulate()
252 static bool xen_amd_pmu_emulate(unsigned int msr, u64 *val, bool is_read) in xen_amd_pmu_emulate() argument
265 ((msr >= MSR_K7_EVNTSEL0) && (msr <= MSR_K7_PERFCTR3))) in xen_amd_pmu_emulate()
266 msr = get_fam15h_addr(msr); in xen_amd_pmu_emulate()
270 if (msr == amd_ctrls_base + off) { in xen_amd_pmu_emulate()
274 } else if (msr == amd_counters_base + off) { in xen_amd_pmu_emulate()
293 bool pmu_msr_read(unsigned int msr, uint64_t *val, int *err) in pmu_msr_read() argument
296 if (is_amd_pmu_msr(msr)) { in pmu_msr_read()
297 if (!xen_amd_pmu_emulate(msr, val, 1)) in pmu_msr_read()
298 *val = native_read_msr_safe(msr, err); in pmu_msr_read()
304 if (is_intel_pmu_msr(msr, &type, &index)) { in pmu_msr_read()
305 if (!xen_intel_pmu_emulate(msr, val, type, index, 1)) in pmu_msr_read()
306 *val = native_read_msr_safe(msr, err); in pmu_msr_read()
314 bool pmu_msr_write(unsigned int msr, uint32_t low, uint32_t high, int *err) in pmu_msr_write() argument
319 if (is_amd_pmu_msr(msr)) { in pmu_msr_write()
320 if (!xen_amd_pmu_emulate(msr, &val, 0)) in pmu_msr_write()
321 *err = native_write_msr_safe(msr, low, high); in pmu_msr_write()
327 if (is_intel_pmu_msr(msr, &type, &index)) { in pmu_msr_write()
328 if (!xen_intel_pmu_emulate(msr, &val, type, index, 0)) in pmu_msr_write()
329 *err = native_write_msr_safe(msr, low, high); in pmu_msr_write()
345 uint32_t msr; in xen_amd_read_pmc() local
348 msr = amd_counters_base + (counter * amd_msr_step); in xen_amd_read_pmc()
349 return native_read_msr_safe(msr, &err); in xen_amd_read_pmc()
366 uint32_t msr; in xen_intel_read_pmc() local
370 msr = MSR_CORE_PERF_FIXED_CTR0 + (counter & 0xffff); in xen_intel_read_pmc()
372 msr = MSR_IA32_PERFCTR0 + counter; in xen_intel_read_pmc()
374 return native_read_msr_safe(msr, &err); in xen_intel_read_pmc()