Lines Matching refs:box

238 static inline bool uncore_mmio_is_valid_offset(struct intel_uncore_box *box,  in uncore_mmio_is_valid_offset()  argument
241 if (offset < box->pmu->type->mmio_map_size) in uncore_mmio_is_valid_offset()
245 offset, box->pmu->type->name); in uncore_mmio_is_valid_offset()
251 unsigned int uncore_mmio_box_ctl(struct intel_uncore_box *box) in uncore_mmio_box_ctl() argument
253 return box->pmu->type->box_ctl + in uncore_mmio_box_ctl()
254 box->pmu->type->mmio_offset * box->pmu->pmu_idx; in uncore_mmio_box_ctl()
257 static inline unsigned uncore_pci_box_ctl(struct intel_uncore_box *box) in uncore_pci_box_ctl() argument
259 return box->pmu->type->box_ctl; in uncore_pci_box_ctl()
262 static inline unsigned uncore_pci_fixed_ctl(struct intel_uncore_box *box) in uncore_pci_fixed_ctl() argument
264 return box->pmu->type->fixed_ctl; in uncore_pci_fixed_ctl()
267 static inline unsigned uncore_pci_fixed_ctr(struct intel_uncore_box *box) in uncore_pci_fixed_ctr() argument
269 return box->pmu->type->fixed_ctr; in uncore_pci_fixed_ctr()
273 unsigned uncore_pci_event_ctl(struct intel_uncore_box *box, int idx) in uncore_pci_event_ctl() argument
275 if (test_bit(UNCORE_BOX_FLAG_CTL_OFFS8, &box->flags)) in uncore_pci_event_ctl()
276 return idx * 8 + box->pmu->type->event_ctl; in uncore_pci_event_ctl()
278 return idx * 4 + box->pmu->type->event_ctl; in uncore_pci_event_ctl()
282 unsigned uncore_pci_perf_ctr(struct intel_uncore_box *box, int idx) in uncore_pci_perf_ctr() argument
284 return idx * 8 + box->pmu->type->perf_ctr; in uncore_pci_perf_ctr()
287 static inline unsigned uncore_msr_box_offset(struct intel_uncore_box *box) in uncore_msr_box_offset() argument
289 struct intel_uncore_pmu *pmu = box->pmu; in uncore_msr_box_offset()
295 static inline unsigned uncore_msr_box_ctl(struct intel_uncore_box *box) in uncore_msr_box_ctl() argument
297 if (!box->pmu->type->box_ctl) in uncore_msr_box_ctl()
299 return box->pmu->type->box_ctl + uncore_msr_box_offset(box); in uncore_msr_box_ctl()
302 static inline unsigned uncore_msr_fixed_ctl(struct intel_uncore_box *box) in uncore_msr_fixed_ctl() argument
304 if (!box->pmu->type->fixed_ctl) in uncore_msr_fixed_ctl()
306 return box->pmu->type->fixed_ctl + uncore_msr_box_offset(box); in uncore_msr_fixed_ctl()
309 static inline unsigned uncore_msr_fixed_ctr(struct intel_uncore_box *box) in uncore_msr_fixed_ctr() argument
311 return box->pmu->type->fixed_ctr + uncore_msr_box_offset(box); in uncore_msr_fixed_ctr()
356 unsigned int uncore_freerunning_counter(struct intel_uncore_box *box, in uncore_freerunning_counter() argument
361 struct intel_uncore_pmu *pmu = box->pmu; in uncore_freerunning_counter()
371 unsigned uncore_msr_event_ctl(struct intel_uncore_box *box, int idx) in uncore_msr_event_ctl() argument
373 if (test_bit(UNCORE_BOX_FLAG_CFL8_CBOX_MSR_OFFS, &box->flags)) { in uncore_msr_event_ctl()
375 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx); in uncore_msr_event_ctl()
377 return box->pmu->type->event_ctl + in uncore_msr_event_ctl()
378 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) + in uncore_msr_event_ctl()
379 uncore_msr_box_offset(box); in uncore_msr_event_ctl()
384 unsigned uncore_msr_perf_ctr(struct intel_uncore_box *box, int idx) in uncore_msr_perf_ctr() argument
386 if (test_bit(UNCORE_BOX_FLAG_CFL8_CBOX_MSR_OFFS, &box->flags)) { in uncore_msr_perf_ctr()
388 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx); in uncore_msr_perf_ctr()
390 return box->pmu->type->perf_ctr + in uncore_msr_perf_ctr()
391 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) + in uncore_msr_perf_ctr()
392 uncore_msr_box_offset(box); in uncore_msr_perf_ctr()
397 unsigned uncore_fixed_ctl(struct intel_uncore_box *box) in uncore_fixed_ctl() argument
399 if (box->pci_dev || box->io_addr) in uncore_fixed_ctl()
400 return uncore_pci_fixed_ctl(box); in uncore_fixed_ctl()
402 return uncore_msr_fixed_ctl(box); in uncore_fixed_ctl()
406 unsigned uncore_fixed_ctr(struct intel_uncore_box *box) in uncore_fixed_ctr() argument
408 if (box->pci_dev || box->io_addr) in uncore_fixed_ctr()
409 return uncore_pci_fixed_ctr(box); in uncore_fixed_ctr()
411 return uncore_msr_fixed_ctr(box); in uncore_fixed_ctr()
415 unsigned uncore_event_ctl(struct intel_uncore_box *box, int idx) in uncore_event_ctl() argument
417 if (box->pci_dev || box->io_addr) in uncore_event_ctl()
418 return uncore_pci_event_ctl(box, idx); in uncore_event_ctl()
420 return uncore_msr_event_ctl(box, idx); in uncore_event_ctl()
424 unsigned uncore_perf_ctr(struct intel_uncore_box *box, int idx) in uncore_perf_ctr() argument
426 if (box->pci_dev || box->io_addr) in uncore_perf_ctr()
427 return uncore_pci_perf_ctr(box, idx); in uncore_perf_ctr()
429 return uncore_msr_perf_ctr(box, idx); in uncore_perf_ctr()
432 static inline int uncore_perf_ctr_bits(struct intel_uncore_box *box) in uncore_perf_ctr_bits() argument
434 return box->pmu->type->perf_ctr_bits; in uncore_perf_ctr_bits()
437 static inline int uncore_fixed_ctr_bits(struct intel_uncore_box *box) in uncore_fixed_ctr_bits() argument
439 return box->pmu->type->fixed_ctr_bits; in uncore_fixed_ctr_bits()
443 unsigned int uncore_freerunning_bits(struct intel_uncore_box *box, in uncore_freerunning_bits() argument
448 return box->pmu->type->freerunning[type].bits; in uncore_freerunning_bits()
451 static inline int uncore_num_freerunning(struct intel_uncore_box *box, in uncore_num_freerunning() argument
456 return box->pmu->type->freerunning[type].num_counters; in uncore_num_freerunning()
459 static inline int uncore_num_freerunning_types(struct intel_uncore_box *box, in uncore_num_freerunning_types() argument
462 return box->pmu->type->num_freerunning_types; in uncore_num_freerunning_types()
465 static inline bool check_valid_freerunning_event(struct intel_uncore_box *box, in check_valid_freerunning_event() argument
471 return (type < uncore_num_freerunning_types(box, event)) && in check_valid_freerunning_event()
472 (idx < uncore_num_freerunning(box, event)); in check_valid_freerunning_event()
475 static inline int uncore_num_counters(struct intel_uncore_box *box) in uncore_num_counters() argument
477 return box->pmu->type->num_counters; in uncore_num_counters()
489 static inline int uncore_freerunning_hw_config(struct intel_uncore_box *box, in uncore_freerunning_hw_config() argument
498 static inline void uncore_disable_event(struct intel_uncore_box *box, in uncore_disable_event() argument
501 box->pmu->type->ops->disable_event(box, event); in uncore_disable_event()
504 static inline void uncore_enable_event(struct intel_uncore_box *box, in uncore_enable_event() argument
507 box->pmu->type->ops->enable_event(box, event); in uncore_enable_event()
510 static inline u64 uncore_read_counter(struct intel_uncore_box *box, in uncore_read_counter() argument
513 return box->pmu->type->ops->read_counter(box, event); in uncore_read_counter()
516 static inline void uncore_box_init(struct intel_uncore_box *box) in uncore_box_init() argument
518 if (!test_and_set_bit(UNCORE_BOX_FLAG_INITIATED, &box->flags)) { in uncore_box_init()
519 if (box->pmu->type->ops->init_box) in uncore_box_init()
520 box->pmu->type->ops->init_box(box); in uncore_box_init()
524 static inline void uncore_box_exit(struct intel_uncore_box *box) in uncore_box_exit() argument
526 if (test_and_clear_bit(UNCORE_BOX_FLAG_INITIATED, &box->flags)) { in uncore_box_exit()
527 if (box->pmu->type->ops->exit_box) in uncore_box_exit()
528 box->pmu->type->ops->exit_box(box); in uncore_box_exit()
532 static inline bool uncore_box_is_fake(struct intel_uncore_box *box) in uncore_box_is_fake() argument
534 return (box->dieid < 0); in uncore_box_is_fake()
548 u64 uncore_msr_read_counter(struct intel_uncore_box *box, struct perf_event *event);
549 void uncore_mmio_exit_box(struct intel_uncore_box *box);
550 u64 uncore_mmio_read_counter(struct intel_uncore_box *box,
552 void uncore_pmu_start_hrtimer(struct intel_uncore_box *box);
553 void uncore_pmu_cancel_hrtimer(struct intel_uncore_box *box);
559 void uncore_perf_event_update(struct intel_uncore_box *box, struct perf_event *event);
561 uncore_get_constraint(struct intel_uncore_box *box, struct perf_event *event);
562 void uncore_put_constraint(struct intel_uncore_box *box, struct perf_event *event);
563 u64 uncore_shared_reg_config(struct intel_uncore_box *box, int idx);