Searched refs:cq_base (Results 1 – 18 of 18) sorted by relevance
/linux/drivers/misc/habanalabs/common/ |
A D | irq.c | 84 struct hl_cq_entry *cq_entry, *cq_base; in hl_irq_handler_cq() local 93 cq_base = cq->kernel_address; in hl_irq_handler_cq() 96 bool entry_ready = ((le32_to_cpu(cq_base[cq->ci].data) & in hl_irq_handler_cq() 103 cq_entry = (struct hl_cq_entry *) &cq_base[cq->ci]; in hl_irq_handler_cq()
|
/linux/drivers/infiniband/hw/irdma/ |
A D | user.h | 310 void irdma_uk_cq_resize(struct irdma_cq_uk *cq, void *cq_base, int size); 369 struct irdma_cqe *cq_base; member 404 struct irdma_cqe *cq_base; member
|
A D | uk.c | 926 void irdma_uk_cq_resize(struct irdma_cq_uk *cq, void *cq_base, int cq_size) in irdma_uk_cq_resize() argument 928 cq->cq_base = cq_base; in irdma_uk_cq_resize() 1041 ext_cqe = cq->cq_base[peek_head].buf; in irdma_uk_cq_poll_cmpl() 1452 cq->cq_base = info->cq_base; in irdma_uk_cq_init() 1479 cqe = ((struct irdma_extended_cqe *)(cq->cq_base))[cq_head].buf; in irdma_uk_clean_cq() 1481 cqe = cq->cq_base[cq_head].buf; in irdma_uk_clean_cq()
|
A D | defs.h | 941 (_cq)->cq_base[IRDMA_RING_CURRENT_HEAD((_cq)->cq_ring)].buf \ 946 ((_cq)->cq_base))[IRDMA_RING_CURRENT_HEAD((_cq)->cq_ring)].buf \
|
A D | type.h | 665 struct irdma_cqe *cq_base; member 809 struct irdma_cqe *cq_base; member
|
A D | puda.c | 243 ext_cqe = cq_uk->cq_base[peek_head].buf; in irdma_puda_poll_info() 804 init_info->cq_base = mem->va; in irdma_puda_cq_create()
|
A D | ctrl.c | 2633 irdma_uk_cq_resize(&cq->cq_uk, info->cq_base, info->cq_size); in irdma_sc_cq_resize() 4176 cq->cq_uk.cq_base = info->cq_base; in irdma_sc_ccq_init()
|
A D | verbs.c | 1857 info.cq_base = kmem_buf.va; in irdma_resize_cq() 2067 ukinfo->cq_base = iwcq->kmem.va; in irdma_create_cq()
|
A D | hw.c | 1028 info.cq_base = ccq->mem_cq.va; in irdma_create_ccq()
|
/linux/drivers/net/ethernet/pensando/ionic/ |
A D | ionic_lif.c | 381 if (qcq->cq_base) { in ionic_qcq_free() 382 dma_free_coherent(dev, qcq->cq_size, qcq->cq_base, qcq->cq_base_pa); in ionic_qcq_free() 383 qcq->cq_base = NULL; in ionic_qcq_free() 512 void *q_base, *cq_base, *sg_base; in ionic_qcq_alloc() local 586 cq_base = PTR_ALIGN(q_base + q_size, PAGE_SIZE); in ionic_qcq_alloc() 588 ionic_cq_map(&new->cq, cq_base, cq_base_pa); in ionic_qcq_alloc() 606 if (!new->cq_base) { in ionic_qcq_alloc() 611 cq_base = PTR_ALIGN(new->cq_base, PAGE_SIZE); in ionic_qcq_alloc() 613 ionic_cq_map(&new->cq, cq_base, cq_base_pa); in ionic_qcq_alloc() 722 memset(qcq->cq_base, 0, qcq->cq_size); in ionic_qcq_sanitize() [all …]
|
A D | ionic_lif.h | 67 void *cq_base; member
|
/linux/Documentation/networking/device_drivers/qlogic/ |
A D | qlge.rst | 79 .cq_base = (void *)0x0,
|
/linux/drivers/staging/qlge/ |
A D | qlge_main.c | 987 rx_ring->curr_entry = rx_ring->cq_base; in qlge_update_cq() 2840 if (rx_ring->cq_base) { in qlge_free_rx_resources() 2843 rx_ring->cq_base, rx_ring->cq_base_dma); in qlge_free_rx_resources() 2844 rx_ring->cq_base = NULL; in qlge_free_rx_resources() 2857 rx_ring->cq_base = in qlge_alloc_rx_resources() 2861 if (!rx_ring->cq_base) { in qlge_alloc_rx_resources() 2978 rx_ring->curr_entry = rx_ring->cq_base; in qlge_start_rx_ring()
|
A D | qlge.h | 1463 void *cq_base; member
|
/linux/drivers/infiniband/hw/bnxt_re/ |
A D | qplib_fp.h | 343 #define BNXT_QPLIB_MAX_CQE_ENTRY_SIZE sizeof(struct cq_base)
|
A D | qplib_fp.c | 1422 struct cq_base *hw_cqe; in __clean_cq() 2049 hwq_attr.stride = sizeof(struct cq_base); in bnxt_qplib_create_cq() 2237 struct cq_base *peek_hwcqe; in do_wa9060() 2603 struct cq_base *hw_cqe; in bnxt_qplib_is_cq_empty() 2855 struct cq_base *hw_cqe; in bnxt_qplib_poll_cq()
|
A D | ib_verbs.c | 2850 entries * sizeof(struct cq_base), in bnxt_re_create_cq() 3883 resp.cqe_sz = sizeof(struct cq_base); in bnxt_re_alloc_ucontext()
|
A D | roce_hsi.h | 546 struct cq_base { struct
|
Completed in 111 milliseconds