Home
last modified time | relevance | path

Searched refs:pd_addr (Results 1 – 25 of 49) sorted by relevance

12

/linux/drivers/net/ethernet/intel/i40e/
A Di40e_hmc.c120 u64 *pd_addr; in i40e_add_pd_table_entry() local
158 pd_addr = (u64 *)pd_table->pd_page_addr.va; in i40e_add_pd_table_entry()
159 pd_addr += rel_pd_idx; in i40e_add_pd_table_entry()
162 memcpy(pd_addr, &page_desc, sizeof(u64)); in i40e_add_pd_table_entry()
197 u64 *pd_addr; in i40e_remove_pd_bp() local
223 pd_addr = (u64 *)pd_table->pd_page_addr.va; in i40e_remove_pd_bp()
224 pd_addr += rel_pd_idx; in i40e_remove_pd_bp()
225 memset(pd_addr, 0, sizeof(u64)); in i40e_remove_pd_bp()
/linux/drivers/gpu/drm/radeon/
A Dradeon_trace.h108 TP_PROTO(uint64_t pd_addr, unsigned ring, unsigned id),
109 TP_ARGS(pd_addr, ring, id),
111 __field(u64, pd_addr)
117 __entry->pd_addr = pd_addr;
122 __entry->pd_addr, __entry->ring, __entry->id)
A Dradeon_vm.c241 uint64_t pd_addr = radeon_bo_gpu_offset(vm->page_directory); in radeon_vm_flush() local
244 if (pd_addr != vm_id->pd_gpu_addr || !vm_id->flushed_updates || in radeon_vm_flush()
247 trace_radeon_vm_flush(pd_addr, ring, vm->ids[ring].id); in radeon_vm_flush()
250 vm_id->pd_gpu_addr = pd_addr; in radeon_vm_flush()
644 uint64_t pd_addr = radeon_bo_gpu_offset(pd); in radeon_vm_update_page_directory() local
679 pde = pd_addr + pt_idx * 8; in radeon_vm_update_page_directory()
A Dsi_dma.c187 unsigned vm_id, uint64_t pd_addr) in si_dma_vm_flush() argument
196 radeon_ring_write(ring, pd_addr >> 12); in si_dma_vm_flush()
A Dni_dma.c449 unsigned vm_id, uint64_t pd_addr) in cayman_dma_vm_flush() argument
453 radeon_ring_write(ring, pd_addr >> 12); in cayman_dma_vm_flush()
A Dradeon_asic.h614 unsigned vm_id, uint64_t pd_addr);
640 unsigned vm_id, uint64_t pd_addr);
722 unsigned vm_id, uint64_t pd_addr);
745 unsigned vm_id, uint64_t pd_addr);
829 unsigned vm_id, uint64_t pd_addr);
848 unsigned vm_id, uint64_t pd_addr);
/linux/drivers/infiniband/hw/irdma/
A Dhmc.c562 u64 *pd_addr; in irdma_add_pd_table_entry() local
596 pd_addr = pd_table->pd_page_addr.va; in irdma_add_pd_table_entry()
597 pd_addr += rel_pd_idx; in irdma_add_pd_table_entry()
598 memcpy(pd_addr, &page_desc, sizeof(*pd_addr)); in irdma_add_pd_table_entry()
633 u64 *pd_addr; in irdma_remove_pd_bp() local
651 pd_addr = pd_table->pd_page_addr.va; in irdma_remove_pd_bp()
652 pd_addr += rel_pd_idx; in irdma_remove_pd_bp()
653 memset(pd_addr, 0, sizeof(u64)); in irdma_remove_pd_bp()
/linux/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_trace.h226 __field(u64, pd_addr)
235 __entry->pd_addr = job->vm_pd_addr;
240 __entry->vm_hub, __entry->pd_addr, __entry->needs_flush)
417 uint64_t pd_addr),
418 TP_ARGS(ring, vmid, pd_addr),
423 __field(u64, pd_addr)
430 __entry->pd_addr = pd_addr;
434 __entry->vm_hub,__entry->pd_addr)
A Dvcn_v2_0.h37 unsigned vmid, uint64_t pd_addr);
50 unsigned int vmid, uint64_t pd_addr);
A Damdgpu_gmc.c125 uint64_t pd_addr; in amdgpu_gmc_pd_addr() local
131 amdgpu_gmc_get_pde_for_bo(bo, -1, &pd_addr, &flags); in amdgpu_gmc_pd_addr()
132 pd_addr |= flags; in amdgpu_gmc_pd_addr()
134 pd_addr = amdgpu_bo_gpu_offset(bo); in amdgpu_gmc_pd_addr()
136 return pd_addr; in amdgpu_gmc_pd_addr()
A Djpeg_v1_0.c377 unsigned vmid, uint64_t pd_addr) in jpeg_v1_0_decode_ring_emit_vm_flush() argument
382 pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr); in jpeg_v1_0_decode_ring_emit_vm_flush()
386 data1 = lower_32_bits(pd_addr); in jpeg_v1_0_decode_ring_emit_vm_flush()
A Djpeg_v2_0.c610 unsigned vmid, uint64_t pd_addr) in jpeg_v2_0_dec_ring_emit_vm_flush() argument
615 pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr); in jpeg_v2_0_dec_ring_emit_vm_flush()
619 data1 = lower_32_bits(pd_addr); in jpeg_v2_0_dec_ring_emit_vm_flush()
A Djpeg_v2_0.h56 unsigned vmid, uint64_t pd_addr);
A Duvd_v7_0.c1396 unsigned vmid, uint64_t pd_addr) in uvd_v7_0_ring_emit_vm_flush() argument
1401 pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr); in uvd_v7_0_ring_emit_vm_flush()
1405 data1 = lower_32_bits(pd_addr); in uvd_v7_0_ring_emit_vm_flush()
1439 unsigned int vmid, uint64_t pd_addr) in uvd_v7_0_enc_ring_emit_vm_flush() argument
1443 pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr); in uvd_v7_0_enc_ring_emit_vm_flush()
1448 lower_32_bits(pd_addr), 0xffffffff); in uvd_v7_0_enc_ring_emit_vm_flush()
A Dvcn_v1_0.c1545 unsigned vmid, uint64_t pd_addr) in vcn_v1_0_dec_ring_emit_vm_flush() argument
1550 pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr); in vcn_v1_0_dec_ring_emit_vm_flush()
1554 data1 = lower_32_bits(pd_addr); in vcn_v1_0_dec_ring_emit_vm_flush()
1690 unsigned int vmid, uint64_t pd_addr) in vcn_v1_0_enc_ring_emit_vm_flush() argument
1694 pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr); in vcn_v1_0_enc_ring_emit_vm_flush()
1699 lower_32_bits(pd_addr), 0xffffffff); in vcn_v1_0_enc_ring_emit_vm_flush()
A Dvcn_v2_0.c1502 unsigned vmid, uint64_t pd_addr) in vcn_v2_0_dec_ring_emit_vm_flush() argument
1507 pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr); in vcn_v2_0_dec_ring_emit_vm_flush()
1511 data1 = lower_32_bits(pd_addr); in vcn_v2_0_dec_ring_emit_vm_flush()
1662 unsigned int vmid, uint64_t pd_addr) in vcn_v2_0_enc_ring_emit_vm_flush() argument
1666 pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr); in vcn_v2_0_enc_ring_emit_vm_flush()
1671 lower_32_bits(pd_addr), 0xffffffff); in vcn_v2_0_enc_ring_emit_vm_flush()
A Dgmc_v10_0.c465 unsigned vmid, uint64_t pd_addr) in gmc_v10_0_emit_flush_gpu_tlb() argument
488 lower_32_bits(pd_addr)); in gmc_v10_0_emit_flush_gpu_tlb()
492 upper_32_bits(pd_addr)); in gmc_v10_0_emit_flush_gpu_tlb()
509 return pd_addr; in gmc_v10_0_emit_flush_gpu_tlb()
A Dgmc_v6_0.c362 unsigned vmid, uint64_t pd_addr) in gmc_v6_0_emit_flush_gpu_tlb() argument
371 amdgpu_ring_emit_wreg(ring, reg, pd_addr >> 12); in gmc_v6_0_emit_flush_gpu_tlb()
376 return pd_addr; in gmc_v6_0_emit_flush_gpu_tlb()
A Dvce_v4_0.c1022 unsigned int vmid, uint64_t pd_addr) in vce_v4_0_emit_vm_flush() argument
1026 pd_addr = amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr); in vce_v4_0_emit_vm_flush()
1031 lower_32_bits(pd_addr), 0xffffffff); in vce_v4_0_emit_vm_flush()
A Dgmc_v9_0.c936 unsigned vmid, uint64_t pd_addr) in gmc_v9_0_emit_flush_gpu_tlb() argument
960 lower_32_bits(pd_addr)); in gmc_v9_0_emit_flush_gpu_tlb()
964 upper_32_bits(pd_addr)); in gmc_v9_0_emit_flush_gpu_tlb()
981 return pd_addr; in gmc_v9_0_emit_flush_gpu_tlb()
A Duvd_v6_0.c1072 unsigned vmid, uint64_t pd_addr) in uvd_v6_0_ring_emit_vm_flush() argument
1074 amdgpu_gmc_emit_flush_gpu_tlb(ring, vmid, pd_addr); in uvd_v6_0_ring_emit_vm_flush()
1132 unsigned int vmid, uint64_t pd_addr) in uvd_v6_0_enc_ring_emit_vm_flush() argument
1136 amdgpu_ring_write(ring, pd_addr >> 12); in uvd_v6_0_enc_ring_emit_vm_flush()
/linux/drivers/clk/mediatek/
A Dclk-pll.c42 void __iomem *pd_addr; member
126 val = readl(pll->pd_addr); in mtk_pll_set_rate_regs()
131 if (pll->pd_addr != pll->pcw_addr) { in mtk_pll_set_rate_regs()
132 writel(val, pll->pd_addr); in mtk_pll_set_rate_regs()
218 postdiv = (readl(pll->pd_addr) >> pll->data->pd_shift) & POSTDIV_MASK; in mtk_pll_recalc_rate()
328 pll->pd_addr = base + data->pd_reg; in mtk_clk_register_pll()
/linux/drivers/gpu/drm/i915/gt/
A Dgen6_ppgtt.c23 ppgtt->pd_addr + pde); in gen6_write_pde()
161 ioread32(ppgtt->pd_addr + pde - 1); in gen6_flush_pd()
300 ppgtt->pd_addr = (gen6_pte_t __iomem *)ggtt->gsm + ggtt_offset; in pd_vma_bind()
A Dgen6_ppgtt.h18 gen6_pte_t __iomem *pd_addr; member
/linux/drivers/gpu/drm/bridge/analogix/
A Danalogix_dp_reg.c235 u32 pd_addr = ANALOGIX_DP_PLL_CTL; in analogix_dp_set_pll_power_down() local
238 pd_addr = ANALOGIX_DP_PD; in analogix_dp_set_pll_power_down()
242 reg = readl(dp->reg_base + pd_addr); in analogix_dp_set_pll_power_down()
247 writel(reg, dp->reg_base + pd_addr); in analogix_dp_set_pll_power_down()

Completed in 68 milliseconds

12