Lines Matching refs:xd

207 static notrace u8 xive_esb_read(struct xive_irq_data *xd, u32 offset)  in xive_esb_read()  argument
211 if (offset == XIVE_ESB_SET_PQ_10 && xd->flags & XIVE_IRQ_FLAG_STORE_EOI) in xive_esb_read()
214 if ((xd->flags & XIVE_IRQ_FLAG_H_INT_ESB) && xive_ops->esb_rw) in xive_esb_read()
215 val = xive_ops->esb_rw(xd->hw_irq, offset, 0, 0); in xive_esb_read()
217 val = in_be64(xd->eoi_mmio + offset); in xive_esb_read()
222 static void xive_esb_write(struct xive_irq_data *xd, u32 offset, u64 data) in xive_esb_write() argument
224 if ((xd->flags & XIVE_IRQ_FLAG_H_INT_ESB) && xive_ops->esb_rw) in xive_esb_write()
225 xive_ops->esb_rw(xd->hw_irq, offset, data, 1); in xive_esb_write()
227 out_be64(xd->eoi_mmio + offset, data); in xive_esb_write()
294 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d); in xmon_xive_get_irq_config() local
295 u64 val = xive_esb_read(xd, XIVE_ESB_GET); in xmon_xive_get_irq_config()
298 xd->flags & XIVE_IRQ_FLAG_STORE_EOI ? 'S' : ' ', in xmon_xive_get_irq_config()
299 xd->flags & XIVE_IRQ_FLAG_LSI ? 'L' : ' ', in xmon_xive_get_irq_config()
300 xd->flags & XIVE_IRQ_FLAG_H_INT_ESB ? 'H' : ' ', in xmon_xive_get_irq_config()
381 static void xive_do_source_eoi(struct xive_irq_data *xd) in xive_do_source_eoi() argument
385 xd->stale_p = false; in xive_do_source_eoi()
388 if (xd->flags & XIVE_IRQ_FLAG_STORE_EOI) { in xive_do_source_eoi()
389 xive_esb_write(xd, XIVE_ESB_STORE_EOI, 0); in xive_do_source_eoi()
398 if (xd->flags & XIVE_IRQ_FLAG_LSI) { in xive_do_source_eoi()
399 xive_esb_read(xd, XIVE_ESB_LOAD_EOI); in xive_do_source_eoi()
409 eoi_val = xive_esb_read(xd, XIVE_ESB_SET_PQ_00); in xive_do_source_eoi()
413 if ((eoi_val & XIVE_ESB_VAL_Q) && xd->trig_mmio) in xive_do_source_eoi()
414 out_be64(xd->trig_mmio, 0); in xive_do_source_eoi()
420 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d); in xive_irq_eoi() local
431 !(xd->flags & XIVE_IRQ_FLAG_NO_EOI)) in xive_irq_eoi()
432 xive_do_source_eoi(xd); in xive_irq_eoi()
434 xd->stale_p = true; in xive_irq_eoi()
440 xd->saved_p = false; in xive_irq_eoi()
449 static void xive_do_source_set_mask(struct xive_irq_data *xd, in xive_do_source_set_mask() argument
463 val = xive_esb_read(xd, XIVE_ESB_SET_PQ_01); in xive_do_source_set_mask()
464 if (!xd->stale_p && !!(val & XIVE_ESB_VAL_P)) in xive_do_source_set_mask()
465 xd->saved_p = true; in xive_do_source_set_mask()
466 xd->stale_p = false; in xive_do_source_set_mask()
467 } else if (xd->saved_p) { in xive_do_source_set_mask()
468 xive_esb_read(xd, XIVE_ESB_SET_PQ_10); in xive_do_source_set_mask()
469 xd->saved_p = false; in xive_do_source_set_mask()
471 xive_esb_read(xd, XIVE_ESB_SET_PQ_00); in xive_do_source_set_mask()
472 xd->stale_p = false; in xive_do_source_set_mask()
576 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d); in xive_pick_irq_target() local
584 if (xd->src_chip != XIVE_INVALID_CHIP_ID && in xive_pick_irq_target()
589 if (xc->chip_id == xd->src_chip) in xive_pick_irq_target()
609 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d); in xive_irq_startup() local
613 xd->saved_p = false; in xive_irq_startup()
614 xd->stale_p = false; in xive_irq_startup()
633 xd->target = target; in xive_irq_startup()
646 xive_do_source_set_mask(xd, false); in xive_irq_startup()
654 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d); in xive_irq_shutdown() local
660 if (WARN_ON(xd->target == XIVE_INVALID_TARGET)) in xive_irq_shutdown()
664 xive_do_source_set_mask(xd, true); in xive_irq_shutdown()
671 get_hard_smp_processor_id(xd->target), in xive_irq_shutdown()
674 xive_dec_target_count(xd->target); in xive_irq_shutdown()
675 xd->target = XIVE_INVALID_TARGET; in xive_irq_shutdown()
680 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d); in xive_irq_unmask() local
682 pr_devel("xive_irq_unmask: irq %d data @%p\n", d->irq, xd); in xive_irq_unmask()
684 xive_do_source_set_mask(xd, false); in xive_irq_unmask()
689 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d); in xive_irq_mask() local
691 pr_devel("xive_irq_mask: irq %d data @%p\n", d->irq, xd); in xive_irq_mask()
693 xive_do_source_set_mask(xd, true); in xive_irq_mask()
700 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d); in xive_irq_set_affinity() local
715 if (xd->target != XIVE_INVALID_TARGET && in xive_irq_set_affinity()
716 cpu_online(xd->target) && in xive_irq_set_affinity()
717 cpumask_test_cpu(xd->target, cpumask)) in xive_irq_set_affinity()
731 old_target = xd->target; in xive_irq_set_affinity()
747 xd->target = target; in xive_irq_set_affinity()
758 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d); in xive_irq_set_type() local
785 !!(xd->flags & XIVE_IRQ_FLAG_LSI)) { in xive_irq_set_type()
789 (xd->flags & XIVE_IRQ_FLAG_LSI) ? "Level" : "Edge"); in xive_irq_set_type()
797 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d); in xive_irq_retrigger() local
800 if (WARN_ON(xd->flags & XIVE_IRQ_FLAG_LSI)) in xive_irq_retrigger()
807 xive_esb_read(xd, XIVE_ESB_SET_PQ_11); in xive_irq_retrigger()
808 xive_do_source_eoi(xd); in xive_irq_retrigger()
819 struct xive_irq_data *xd = irq_data_get_irq_handler_data(d); in xive_irq_set_vcpu_affinity() local
832 pq = xive_esb_read(xd, XIVE_ESB_SET_PQ_10); in xive_irq_set_vcpu_affinity()
833 if (!xd->stale_p) { in xive_irq_set_vcpu_affinity()
834 xd->saved_p = !!(pq & XIVE_ESB_VAL_P); in xive_irq_set_vcpu_affinity()
835 xd->stale_p = !xd->saved_p; in xive_irq_set_vcpu_affinity()
839 if (xd->target == XIVE_INVALID_TARGET) { in xive_irq_set_vcpu_affinity()
844 WARN_ON(xd->saved_p); in xive_irq_set_vcpu_affinity()
864 if (xd->saved_p) { in xive_irq_set_vcpu_affinity()
865 xive_esb_read(xd, XIVE_ESB_SET_PQ_11); in xive_irq_set_vcpu_affinity()
883 if (xd->target == XIVE_INVALID_TARGET) { in xive_irq_set_vcpu_affinity()
884 xive_do_source_set_mask(xd, true); in xive_irq_set_vcpu_affinity()
905 get_hard_smp_processor_id(xd->target), in xive_irq_set_vcpu_affinity()
922 if (!xd->saved_p) in xive_irq_set_vcpu_affinity()
923 xive_do_source_eoi(xd); in xive_irq_set_vcpu_affinity()
933 struct xive_irq_data *xd = irq_data_get_irq_handler_data(data); in xive_get_irqchip_state() local
938 pq = xive_esb_read(xd, XIVE_ESB_GET); in xive_get_irqchip_state()
947 *state = (pq != XIVE_ESB_INVALID) && !xd->stale_p && in xive_get_irqchip_state()
948 (xd->saved_p || (!!(pq & XIVE_ESB_VAL_P) && in xive_get_irqchip_state()
976 void xive_cleanup_irq_data(struct xive_irq_data *xd) in xive_cleanup_irq_data() argument
978 pr_debug("%s for HW %x\n", __func__, xd->hw_irq); in xive_cleanup_irq_data()
980 if (xd->eoi_mmio) { in xive_cleanup_irq_data()
981 iounmap(xd->eoi_mmio); in xive_cleanup_irq_data()
982 if (xd->eoi_mmio == xd->trig_mmio) in xive_cleanup_irq_data()
983 xd->trig_mmio = NULL; in xive_cleanup_irq_data()
984 xd->eoi_mmio = NULL; in xive_cleanup_irq_data()
986 if (xd->trig_mmio) { in xive_cleanup_irq_data()
987 iounmap(xd->trig_mmio); in xive_cleanup_irq_data()
988 xd->trig_mmio = NULL; in xive_cleanup_irq_data()
995 struct xive_irq_data *xd; in xive_irq_alloc_data() local
998 xd = kzalloc(sizeof(struct xive_irq_data), GFP_KERNEL); in xive_irq_alloc_data()
999 if (!xd) in xive_irq_alloc_data()
1001 rc = xive_ops->populate_irq_data(hw, xd); in xive_irq_alloc_data()
1003 kfree(xd); in xive_irq_alloc_data()
1006 xd->target = XIVE_INVALID_TARGET; in xive_irq_alloc_data()
1007 irq_set_handler_data(virq, xd); in xive_irq_alloc_data()
1016 xive_esb_read(xd, XIVE_ESB_SET_PQ_01); in xive_irq_alloc_data()
1023 struct xive_irq_data *xd = irq_get_handler_data(virq); in xive_irq_free_data() local
1025 if (!xd) in xive_irq_free_data()
1028 xive_cleanup_irq_data(xd); in xive_irq_free_data()
1029 kfree(xd); in xive_irq_free_data()
1038 struct xive_irq_data *xd; in xive_cause_ipi() local
1045 xd = &xc->ipi_data; in xive_cause_ipi()
1046 if (WARN_ON(!xd->trig_mmio)) in xive_cause_ipi()
1048 out_be64(xd->trig_mmio, 0); in xive_cause_ipi()
1334 struct xive_irq_data *xd; in xive_irq_domain_debug_show() local
1348 xd = irq_data_get_irq_handler_data(irqd); in xive_irq_domain_debug_show()
1349 if (!xd) { in xive_irq_domain_debug_show()
1354 val = xive_esb_read(xd, XIVE_ESB_GET); in xive_irq_domain_debug_show()
1356 seq_printf(m, "%*sPstate: %s %s\n", ind, "", xd->stale_p ? "stale" : "", in xive_irq_domain_debug_show()
1357 xd->saved_p ? "saved" : ""); in xive_irq_domain_debug_show()
1358 seq_printf(m, "%*sTarget: %d\n", ind, "", xd->target); in xive_irq_domain_debug_show()
1359 seq_printf(m, "%*sChip: %d\n", ind, "", xd->src_chip); in xive_irq_domain_debug_show()
1360 seq_printf(m, "%*sTrigger: 0x%016llx\n", ind, "", xd->trig_page); in xive_irq_domain_debug_show()
1361 seq_printf(m, "%*sEOI: 0x%016llx\n", ind, "", xd->eoi_page); in xive_irq_domain_debug_show()
1362 seq_printf(m, "%*sFlags: 0x%llx\n", ind, "", xd->flags); in xive_irq_domain_debug_show()
1364 if (xd->flags & xive_irq_flags[i].mask) in xive_irq_domain_debug_show()
1544 struct xive_irq_data *xd; in xive_flush_cpu_queue() local
1563 xd = irq_desc_get_handler_data(desc); in xive_flush_cpu_queue()
1568 xd->saved_p = false; in xive_flush_cpu_queue()
1574 if (xd->flags & XIVE_IRQ_FLAG_LSI) in xive_flush_cpu_queue()
1575 xive_do_source_eoi(xd); in xive_flush_cpu_queue()
1735 struct xive_irq_data *xd; in xive_debug_show_irq() local
1747 xd = irq_data_get_irq_handler_data(d); in xive_debug_show_irq()
1748 val = xive_esb_read(xd, XIVE_ESB_GET); in xive_debug_show_irq()
1750 xd->flags & XIVE_IRQ_FLAG_STORE_EOI ? 'S' : ' ', in xive_debug_show_irq()
1751 xd->flags & XIVE_IRQ_FLAG_LSI ? 'L' : ' ', in xive_debug_show_irq()
1752 xd->flags & XIVE_IRQ_FLAG_H_INT_ESB ? 'H' : ' ', in xive_debug_show_irq()