/linux/drivers/gpu/drm/amd/amdkfd/ |
A D | kfd_packet_manager_v9.c | 65 packet->gds_addr_lo = lower_32_bits(qpd->gds_context_area); in pm_map_process_v9() 69 lower_32_bits(vm_page_table_base_addr); in pm_map_process_v9() 104 packet->gds_addr_lo = lower_32_bits(qpd->gds_context_area); in pm_map_process_aldebaran() 108 lower_32_bits(vm_page_table_base_addr); in pm_map_process_aldebaran() 147 packet->ordinal2 = lower_32_bits(ib); in pm_runlist_v9() 172 packet->gws_mask_lo = lower_32_bits(res->gws_mask); in pm_set_resources_v9() 175 packet->queue_mask_lo = lower_32_bits(res->queue_mask); in pm_set_resources_v9() 234 lower_32_bits(q->gart_mqd_addr); in pm_map_queues_v9() 240 lower_32_bits((uint64_t)q->properties.write_ptr); in pm_map_queues_v9() 343 packet->addr_lo = lower_32_bits((uint64_t)fence_address); in pm_query_status_v9() [all …]
|
A D | kfd_packet_manager_vi.c | 68 packet->gds_addr_lo = lower_32_bits(qpd->gds_context_area); in pm_map_process_vi() 107 packet->ordinal2 = lower_32_bits(ib); in pm_runlist_vi() 132 packet->gws_mask_lo = lower_32_bits(res->gws_mask); in pm_set_resources_vi() 135 packet->queue_mask_lo = lower_32_bits(res->queue_mask); in pm_set_resources_vi() 185 lower_32_bits(q->gart_mqd_addr); in pm_map_queues_vi() 191 lower_32_bits((uint64_t)q->properties.write_ptr); in pm_map_queues_vi() 283 packet->addr_lo = lower_32_bits((uint64_t)fence_address); in pm_query_status_vi() 285 packet->data_lo = lower_32_bits((uint64_t)fence_value); in pm_query_status_vi()
|
A D | kfd_mqd_manager_vi.c | 116 m->cp_mqd_base_addr_lo = lower_32_bits(addr); in init_mqd() 130 m->compute_tba_lo = lower_32_bits(q->tba_addr >> 8); in init_mqd() 132 m->compute_tma_lo = lower_32_bits(q->tma_addr >> 8); in init_mqd() 142 lower_32_bits(q->ctx_save_restore_area_address); in init_mqd() 184 m->cp_hqd_pq_base_lo = lower_32_bits((uint64_t)q->queue_address >> 8); in __update_mqd() 187 m->cp_hqd_pq_rptr_report_addr_lo = lower_32_bits((uint64_t)q->read_ptr); in __update_mqd() 189 m->cp_hqd_pq_wptr_poll_addr_lo = lower_32_bits((uint64_t)q->write_ptr); in __update_mqd() 215 lower_32_bits(q->eop_ring_buffer_address >> 8); in __update_mqd() 368 m->sdmax_rlcx_rb_base = lower_32_bits(q->queue_address >> 8); in update_mqd_sdma() 370 m->sdmax_rlcx_rb_rptr_addr_lo = lower_32_bits((uint64_t)q->read_ptr); in update_mqd_sdma()
|
A D | kfd_mqd_manager_v10.c | 112 m->cp_mqd_base_addr_lo = lower_32_bits(addr); in init_mqd() 128 lower_32_bits(q->ctx_save_restore_area_address); in init_mqd() 178 m->cp_hqd_pq_base_lo = lower_32_bits((uint64_t)q->queue_address >> 8); in update_mqd() 181 m->cp_hqd_pq_rptr_report_addr_lo = lower_32_bits((uint64_t)q->read_ptr); in update_mqd() 183 m->cp_hqd_pq_wptr_poll_addr_lo = lower_32_bits((uint64_t)q->write_ptr); in update_mqd() 204 lower_32_bits(q->eop_ring_buffer_address >> 8); in update_mqd() 343 m->sdmax_rlcx_rb_base = lower_32_bits(q->queue_address >> 8); in update_mqd_sdma() 345 m->sdmax_rlcx_rb_rptr_addr_lo = lower_32_bits((uint64_t)q->read_ptr); in update_mqd_sdma()
|
A D | kfd_mqd_manager_v9.c | 159 m->cp_mqd_base_addr_lo = lower_32_bits(addr); in init_mqd() 180 lower_32_bits(q->ctx_save_restore_area_address); in init_mqd() 227 m->cp_hqd_pq_base_lo = lower_32_bits((uint64_t)q->queue_address >> 8); in update_mqd() 230 m->cp_hqd_pq_rptr_report_addr_lo = lower_32_bits((uint64_t)q->read_ptr); in update_mqd() 232 m->cp_hqd_pq_wptr_poll_addr_lo = lower_32_bits((uint64_t)q->write_ptr); in update_mqd() 255 lower_32_bits(q->eop_ring_buffer_address >> 8); in update_mqd() 398 m->sdmax_rlcx_rb_base = lower_32_bits(q->queue_address >> 8); in update_mqd_sdma() 400 m->sdmax_rlcx_rb_rptr_addr_lo = lower_32_bits((uint64_t)q->read_ptr); in update_mqd_sdma()
|
/linux/drivers/firmware/smccc/ |
A D | kvm_guest.c | 32 val[0] = lower_32_bits(res.a0); in kvm_init_hyp_services() 33 val[1] = lower_32_bits(res.a1); in kvm_init_hyp_services() 34 val[2] = lower_32_bits(res.a2); in kvm_init_hyp_services() 35 val[3] = lower_32_bits(res.a3); in kvm_init_hyp_services()
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
A D | vcn_v2_0.c | 339 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_mc_resume() 902 lower_32_bits(ring->gpu_addr)); in vcn_v2_0_start_dpg_mode() 913 lower_32_bits(ring->wptr)); in vcn_v2_0_start_dpg_mode() 1062 lower_32_bits(ring->gpu_addr)); in vcn_v2_0_start() 1071 lower_32_bits(ring->wptr)); in vcn_v2_0_start() 1351 lower_32_bits(ring->wptr) | 0x80000000); in vcn_v2_0_dec_ring_set_wptr() 1511 data1 = lower_32_bits(pd_addr); in vcn_v2_0_dec_ring_emit_vm_flush() 1671 lower_32_bits(pd_addr), 0xffffffff); in vcn_v2_0_enc_ring_emit_vm_flush() 1892 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v2_0_start_sriov() 1944 lower_32_bits(ring->gpu_addr)); in vcn_v2_0_start_sriov() [all …]
|
A D | vcn_v2_5.c | 399 lower_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_mc_resume() 410 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset)); in vcn_v2_5_mc_resume() 426 lower_32_bits(adev->vcn.inst[i].fw_shared_gpu_addr)); in vcn_v2_5_mc_resume() 881 lower_32_bits(ring->gpu_addr)); in vcn_v2_5_start_dpg_mode() 892 lower_32_bits(ring->wptr)); in vcn_v2_5_start_dpg_mode() 1061 lower_32_bits(ring->gpu_addr)); in vcn_v2_5_start() 1070 lower_32_bits(ring->wptr)); in vcn_v2_5_start() 1199 lower_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v2_5_sriov_start() 1230 lower_32_bits(adev->vcn.inst[i].gpu_addr + offset + in vcn_v2_5_sriov_start() 1249 lower_32_bits(ring->gpu_addr)); in vcn_v2_5_sriov_start() [all …]
|
A D | si_dma.c | 60 (lower_32_bits(ring->wptr) << 2) & 0x3fffc); in si_dma_ring_set_wptr() 72 while ((lower_32_bits(ring->wptr) & 7) != 5) in si_dma_ring_emit_ib() 225 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in si_dma_ring_test_ring() 278 ib.ptr[1] = lower_32_bits(gpu_addr); in si_dma_ring_test_ib() 325 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pte() 326 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pte() 349 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_write_pte() 352 ib->ptr[ib->length_dw++] = lower_32_bits(value); in si_dma_vm_write_pte() 432 amdgpu_ring_write(ring, lower_32_bits(addr)); in si_dma_ring_emit_pipeline_sync() 789 ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset); in si_dma_emit_copy_buffer() [all …]
|
A D | vcn_v3_0.c | 1051 lower_32_bits(ring->gpu_addr)); in vcn_v3_0_start_dpg_mode() 1062 lower_32_bits(ring->wptr)); in vcn_v3_0_start_dpg_mode() 1228 lower_32_bits(ring->gpu_addr)); in vcn_v3_0_start() 1238 lower_32_bits(ring->wptr)); in vcn_v3_0_start() 1332 lower_32_bits(adev->vcn.inst[i].gpu_addr)); in vcn_v3_0_start_sriov() 1349 lower_32_bits(cache_addr)); in vcn_v3_0_start_sriov() 1364 lower_32_bits(cache_addr)); in vcn_v3_0_start_sriov() 1381 lower_32_bits(rb_addr)); in vcn_v3_0_start_sriov() 1395 lower_32_bits(rb_addr)); in vcn_v3_0_start_sriov() 1706 lower_32_bits(ring->wptr)); in vcn_v3_0_dec_ring_set_wptr() [all …]
|
A D | sdma_v2_4.c | 318 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v2_4_ring_emit_fence() 320 amdgpu_ring_write(ring, lower_32_bits(seq)); in sdma_v2_4_ring_emit_fence() 326 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v2_4_ring_emit_fence() 572 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v2_4_ring_test_ring() 627 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib() 681 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v2_4_vm_copy_pte() 683 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v2_4_vm_copy_pte() 710 ib->ptr[ib->length_dw++] = lower_32_bits(value); in sdma_v2_4_vm_write_pte() 1216 ib->ptr[ib->length_dw++] = lower_32_bits(src_offset); in sdma_v2_4_emit_copy_buffer() 1218 ib->ptr[ib->length_dw++] = lower_32_bits(dst_offset); in sdma_v2_4_emit_copy_buffer() [all …]
|
A D | sdma_v3_0.c | 392 WRITE_ONCE(*wb, (lower_32_bits(ring->wptr) << 2)); in sdma_v3_0_ring_set_wptr() 492 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v3_0_ring_emit_fence() 494 amdgpu_ring_write(ring, lower_32_bits(seq)); in sdma_v3_0_ring_emit_fence() 500 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v3_0_ring_emit_fence() 721 lower_32_bits(wptr_gpu_addr)); in sdma_v3_0_gfx_resume() 844 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v3_0_ring_test_ring() 899 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v3_0_ring_test_ib() 952 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v3_0_vm_copy_pte() 954 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v3_0_vm_copy_pte() 977 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v3_0_vm_write_pte() [all …]
|
A D | sdma_v5_2.c | 293 lower_32_bits(ring->wptr << 2), in sdma_v5_2_ring_set_wptr() 306 lower_32_bits(ring->wptr << 2), in sdma_v5_2_ring_set_wptr() 310 lower_32_bits(ring->wptr << 2)); in sdma_v5_2_ring_set_wptr() 441 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v5_2_ring_emit_fence() 443 amdgpu_ring_write(ring, lower_32_bits(seq)); in sdma_v5_2_ring_emit_fence() 452 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v5_2_ring_emit_fence() 635 lower_32_bits(wptr_gpu_addr)); in sdma_v5_2_gfx_resume() 992 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v5_2_ring_test_ib() 1049 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v5_2_vm_copy_pte() 1051 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v5_2_vm_copy_pte() [all …]
|
A D | cik_sdma.c | 198 (lower_32_bits(ring->wptr) << 2) & 0x3fffc); in cik_sdma_ring_set_wptr() 286 amdgpu_ring_write(ring, lower_32_bits(addr)); in cik_sdma_ring_emit_fence() 288 amdgpu_ring_write(ring, lower_32_bits(seq)); in cik_sdma_ring_emit_fence() 294 amdgpu_ring_write(ring, lower_32_bits(addr)); in cik_sdma_ring_emit_fence() 637 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in cik_sdma_ring_test_ring() 692 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ring_test_ib() 742 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cik_sdma_vm_copy_pte() 744 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pte() 767 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_write_pte() 771 ib->ptr[ib->length_dw++] = lower_32_bits(value); in cik_sdma_vm_write_pte() [all …]
|
A D | vcn_v1_0.c | 309 lower_32_bits(adev->vcn.inst->gpu_addr)); in vcn_v1_0_mc_resume_spg_mode() 917 lower_32_bits(ring->gpu_addr)); in vcn_v1_0_start_spg_mode() 928 lower_32_bits(ring->wptr)); in vcn_v1_0_start_spg_mode() 1075 lower_32_bits(ring->gpu_addr)); in vcn_v1_0_start_dpg_mode() 1086 lower_32_bits(ring->wptr)); in vcn_v1_0_start_dpg_mode() 1307 lower_32_bits(ring->gpu_addr)); in vcn_v1_0_pause_dpg_mode() 1410 lower_32_bits(ring->wptr) | 0x80000000); in vcn_v1_0_dec_ring_set_wptr() 1554 data1 = lower_32_bits(pd_addr); in vcn_v1_0_dec_ring_emit_vm_flush() 1622 lower_32_bits(ring->wptr)); in vcn_v1_0_enc_ring_set_wptr() 1625 lower_32_bits(ring->wptr)); in vcn_v1_0_enc_ring_set_wptr() [all …]
|
A D | sdma_v5_0.c | 406 lower_32_bits(ring->wptr << 2), in sdma_v5_0_ring_set_wptr() 419 lower_32_bits(ring->wptr << 2), in sdma_v5_0_ring_set_wptr() 423 lower_32_bits(ring->wptr << 2)); in sdma_v5_0_ring_set_wptr() 556 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v5_0_ring_emit_fence() 558 amdgpu_ring_write(ring, lower_32_bits(seq)); in sdma_v5_0_ring_emit_fence() 567 amdgpu_ring_write(ring, lower_32_bits(addr)); in sdma_v5_0_ring_emit_fence() 754 lower_32_bits(wptr_gpu_addr)); in sdma_v5_0_gfx_resume() 785 lower_32_bits(ring->wptr) << 2); in sdma_v5_0_gfx_resume() 1071 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v5_0_ring_test_ib() 1130 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v5_0_vm_copy_pte() [all …]
|
A D | vce_v4_0.c | 109 adev->wb.wb[ring->wptr_offs] = lower_32_bits(ring->wptr); in vce_v4_0_ring_set_wptr() 110 WDOORBELL32(ring->doorbell_index, lower_32_bits(ring->wptr)); in vce_v4_0_ring_set_wptr() 116 lower_32_bits(ring->wptr)); in vce_v4_0_ring_set_wptr() 119 lower_32_bits(ring->wptr)); in vce_v4_0_ring_set_wptr() 122 lower_32_bits(ring->wptr)); in vce_v4_0_ring_set_wptr() 235 lower_32_bits(ring->gpu_addr)); in vce_v4_0_sriov_start() 343 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR), lower_32_bits(ring->wptr)); in vce_v4_0_start() 344 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_WPTR), lower_32_bits(ring->wptr)); in vce_v4_0_start() 351 WREG32(SOC15_REG_OFFSET(VCE, 0, mmVCE_RB_RPTR2), lower_32_bits(ring->wptr)); in vce_v4_0_start() 990 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in vce_v4_0_ring_emit_ib() [all …]
|
A D | uvd_v7_0.c | 162 lower_32_bits(ring->wptr)); in uvd_v7_0_enc_ring_set_wptr() 165 lower_32_bits(ring->wptr)); in uvd_v7_0_enc_ring_set_wptr() 689 lower_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_mc_resume() 700 lower_32_bits(adev->uvd.inst[i].gpu_addr + offset)); in uvd_v7_0_mc_resume() 831 lower_32_bits(adev->uvd.inst[i].gpu_addr)); in uvd_v7_0_sriov_start() 1096 lower_32_bits(ring->gpu_addr)); in uvd_v7_0_start() 1105 lower_32_bits(ring->wptr)); in uvd_v7_0_start() 1327 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in uvd_v7_0_ring_emit_ib() 1355 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in uvd_v7_0_enc_ring_emit_ib() 1405 data1 = lower_32_bits(pd_addr); in uvd_v7_0_ring_emit_vm_flush() [all …]
|
/linux/drivers/pci/controller/mobiveil/ |
A D | pcie-mobiveil.c | 151 (lower_32_bits(size64) & WIN_SIZE_MASK); in program_ib_windows() 157 mobiveil_csr_writel(pcie, lower_32_bits(cpu_addr), in program_ib_windows() 162 mobiveil_csr_writel(pcie, lower_32_bits(pci_addr), in program_ib_windows() 192 (lower_32_bits(size64) & WIN_SIZE_MASK); in program_ob_windows() 203 lower_32_bits(cpu_addr) & (~AXI_WINDOW_ALIGN_MASK), in program_ob_windows() 208 mobiveil_csr_writel(pcie, lower_32_bits(pci_addr), in program_ob_windows()
|
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/ |
A D | gm20b.c | 83 hdr.code_dma_base = lower_32_bits((addr + adjust) >> 8); in gm20b_pmu_acr_bld_patch() 86 hdr.data_dma_base = lower_32_bits((addr + adjust) >> 8); in gm20b_pmu_acr_bld_patch() 89 hdr.overlay_dma_base = lower_32_bits((addr + adjust) << 8); in gm20b_pmu_acr_bld_patch() 105 .code_dma_base = lower_32_bits(code), in gm20b_pmu_acr_bld_write() 109 .data_dma_base = lower_32_bits(data), in gm20b_pmu_acr_bld_write() 111 .overlay_dma_base = lower_32_bits(code), in gm20b_pmu_acr_bld_write()
|
/linux/arch/x86/include/asm/ |
A D | mshyperv.h | 58 u32 input_address_lo = lower_32_bits(input_address); in hv_do_hypercall() 60 u32 output_address_lo = lower_32_bits(output_address); in hv_do_hypercall() 93 u32 input1_lo = lower_32_bits(input1); in hv_do_fast_hypercall8() 126 u32 input1_lo = lower_32_bits(input1); in hv_do_fast_hypercall16() 128 u32 input2_lo = lower_32_bits(input2); in hv_do_fast_hypercall16()
|
/linux/drivers/pci/controller/ |
A D | pci-xgene.c | 292 val = (val32 & 0x0000ffff) | (lower_32_bits(mask) << 16); in xgene_pcie_set_ib_mask() 296 val = (val32 & 0xffff0000) | (lower_32_bits(mask) >> 16); in xgene_pcie_set_ib_mask() 389 xgene_pcie_writel(port, offset, lower_32_bits(cpu_addr)); in xgene_pcie_setup_ob_reg() 391 xgene_pcie_writel(port, offset + 0x08, lower_32_bits(mask)); in xgene_pcie_setup_ob_reg() 393 xgene_pcie_writel(port, offset + 0x10, lower_32_bits(pci_addr)); in xgene_pcie_setup_ob_reg() 401 xgene_pcie_writel(port, CFGBARL, lower_32_bits(addr)); in xgene_pcie_setup_cfg_reg() 450 xgene_pcie_writel(port, pim_reg, lower_32_bits(pim)); in xgene_pcie_setup_pims() 453 xgene_pcie_writel(port, pim_reg + 0x10, lower_32_bits(size)); in xgene_pcie_setup_pims() 517 xgene_pcie_writel(port, IR2MSK, lower_32_bits(mask)); in xgene_pcie_setup_ib_reg() 523 xgene_pcie_writel(port, IR3MSKL, lower_32_bits(mask)); in xgene_pcie_setup_ib_reg()
|
/linux/drivers/media/pci/pt3/ |
A D | pt3_dma.c | 52 iowrite32(lower_32_bits(adap->desc_buf[0].b_addr), in pt3_start_dma() 184 d->next_l = lower_32_bits(desc_addr); in pt3_alloc_dmabuf() 190 d->addr_l = lower_32_bits(data_addr); in pt3_alloc_dmabuf() 195 d->next_l = lower_32_bits(desc_addr); in pt3_alloc_dmabuf() 204 d->next_l = lower_32_bits(desc_addr); in pt3_alloc_dmabuf()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/gr/ |
A D | gm20b.c | 41 hdr.code_dma_base = lower_32_bits((addr + adjust) >> 8); in gm20b_gr_acr_bld_patch() 44 hdr.data_dma_base = lower_32_bits((addr + adjust) >> 8); in gm20b_gr_acr_bld_patch() 60 .code_dma_base = lower_32_bits(code), in gm20b_gr_acr_bld_write() 64 .data_dma_base = lower_32_bits(data), in gm20b_gr_acr_bld_write()
|
/linux/drivers/dma/ptdma/ |
A D | ptdma-dev.c | 89 tail = lower_32_bits(cmd_q->qdma_tail + cmd_q->qidx * Q_DESC_SIZE); in pt_core_execute_cmd() 109 desc.src_lo = lower_32_bits(pt_engine->src_dma); in pt_core_perform_passthru() 111 desc.dst_lo = lower_32_bits(pt_engine->dst_dma); in pt_core_perform_passthru() 139 tail = lower_32_bits(cmd_q->qdma_tail + cmd_q->qidx * Q_DESC_SIZE); in pt_do_cmd_complete() 240 dma_addr_lo = lower_32_bits(cmd_q->qdma_tail); in pt_core_init()
|