/linux/drivers/gpu/drm/amd/amdgpu/ |
A D | amdgpu_virt.c | 119 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_request_full_gpu() local 142 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_release_full_gpu() local 163 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_reset_gpu() local 166 if (virt->ops && virt->ops->reset_gpu) { in amdgpu_virt_reset_gpu() 179 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_request_init_data() local 198 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_wait_reset() local 275 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_init_ras_err_handler_data() local 310 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_release_bp() local 328 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_release_ras_err_handler_data() local 347 struct amdgpu_virt *virt = &adev->virt; in amdgpu_virt_ras_add_bps() local [all …]
|
A D | amdgpu_vf_error.c | 41 mutex_lock(&adev->virt.vf_errors.lock); in amdgpu_vf_error_put() 46 adev->virt.vf_errors.write_count ++; in amdgpu_vf_error_put() 47 mutex_unlock(&adev->virt.vf_errors.lock); in amdgpu_vf_error_put() 58 (!adev->virt.ops) || (!adev->virt.ops->trans_msg)) { in amdgpu_vf_error_trans_all() 69 mutex_lock(&adev->virt.vf_errors.lock); in amdgpu_vf_error_trans_all() 71 …if (adev->virt.vf_errors.write_count - adev->virt.vf_errors.read_count > AMDGPU_VF_ERROR_ENTRY_SIZ… in amdgpu_vf_error_trans_all() 72 adev->virt.vf_errors.read_count = adev->virt.vf_errors.write_count - AMDGPU_VF_ERROR_ENTRY_SIZE; in amdgpu_vf_error_trans_all() 75 while (adev->virt.vf_errors.read_count < adev->virt.vf_errors.write_count) { in amdgpu_vf_error_trans_all() 78 adev->virt.vf_errors.flags[index]); in amdgpu_vf_error_trans_all() 83 adev->virt.vf_errors.read_count ++; in amdgpu_vf_error_trans_all() [all …]
|
A D | mxgpu_nv.c | 186 adev->virt.req_init_data_ver = 0; in xgpu_nv_send_access_requests() 190 adev->virt.req_init_data_ver = in xgpu_nv_send_access_requests() 194 if (adev->virt.req_init_data_ver < 1) in xgpu_nv_send_access_requests() 195 adev->virt.req_init_data_ver = 1; in xgpu_nv_send_access_requests() 201 adev->virt.fw_reserve.checksum_key = in xgpu_nv_send_access_requests() 277 struct amdgpu_device *adev = container_of(virt, struct amdgpu_device, virt); in xgpu_nv_mailbox_flr_work() 340 schedule_work(&adev->virt.flr_work); in xgpu_nv_mailbox_rcv_irq() 368 adev->virt.ack_irq.num_types = 1; in xgpu_nv_mailbox_set_irq_funcs() 370 adev->virt.rcv_irq.num_types = 1; in xgpu_nv_mailbox_set_irq_funcs() 411 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_nv_mailbox_put_irq() [all …]
|
A D | mxgpu_ai.c | 179 adev->virt.fw_reserve.checksum_key = in xgpu_ai_send_access_requests() 248 struct amdgpu_device *adev = container_of(virt, struct amdgpu_device, virt); in xgpu_ai_mailbox_flr_work() 305 schedule_work(&adev->virt.flr_work); in xgpu_ai_mailbox_rcv_irq() 336 adev->virt.ack_irq.num_types = 1; in xgpu_ai_mailbox_set_irq_funcs() 338 adev->virt.rcv_irq.num_types = 1; in xgpu_ai_mailbox_set_irq_funcs() 352 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_ai_mailbox_add_irq_id() 363 r = amdgpu_irq_get(adev, &adev->virt.rcv_irq, 0); in xgpu_ai_mailbox_get_irq() 366 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_ai_mailbox_get_irq() 368 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_ai_mailbox_get_irq() 379 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_ai_mailbox_put_irq() [all …]
|
A D | amdgpu_virt.h | 248 ((adev)->virt.caps & AMDGPU_SRIOV_CAPS_ENABLE_IOV) 251 ((adev)->virt.caps & AMDGPU_SRIOV_CAPS_IS_VF) 254 ((adev)->virt.caps & AMDGPU_SRIOV_CAPS_SRIOV_VBIOS) 257 ((adev)->virt.caps & AMDGPU_SRIOV_CAPS_RUNTIME) 268 ((adev)->virt.reg_access & (AMDGIM_FEATURE_IH_REG_PSP_EN))) 272 ((adev)->virt.reg_access & (AMDGIM_FEATURE_MMHUB_REG_RLC_EN))) 276 ((adev)->virt.reg_access & (AMDGIM_FEATURE_GC_REG_RLC_EN))) 279 ((adev)->virt.caps & AMDGPU_PASSTHROUGH_MODE) 291 ((adev)->virt.gim_feature & AMDGIM_FEATURE_PP_ONE_VF) 293 ((!amdgpu_in_reset(adev)) && adev->virt.tdr_debug) [all …]
|
A D | mxgpu_vi.c | 514 struct amdgpu_device *adev = container_of(virt, struct amdgpu_device, virt); in xgpu_vi_mailbox_flr_work() 554 schedule_work(&adev->virt.flr_work); in xgpu_vi_mailbox_rcv_irq() 572 adev->virt.ack_irq.num_types = 1; in xgpu_vi_mailbox_set_irq_funcs() 573 adev->virt.ack_irq.funcs = &xgpu_vi_mailbox_ack_irq_funcs; in xgpu_vi_mailbox_set_irq_funcs() 574 adev->virt.rcv_irq.num_types = 1; in xgpu_vi_mailbox_set_irq_funcs() 588 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_vi_mailbox_add_irq_id() 599 r = amdgpu_irq_get(adev, &adev->virt.rcv_irq, 0); in xgpu_vi_mailbox_get_irq() 602 r = amdgpu_irq_get(adev, &adev->virt.ack_irq, 0); in xgpu_vi_mailbox_get_irq() 604 amdgpu_irq_put(adev, &adev->virt.rcv_irq, 0); in xgpu_vi_mailbox_get_irq() 615 amdgpu_irq_put(adev, &adev->virt.ack_irq, 0); in xgpu_vi_mailbox_put_irq() [all …]
|
/linux/drivers/staging/media/atomisp/pci/hmm/ |
A D | hmm.c | 291 WARN_ON(!virt); in hmm_free() 298 (unsigned int)virt); in hmm_free() 415 if (!virt) { in hmm_load() 442 if (!virt) { in hmm_store() 506 virt += len; in hmm_store() 575 virt += len; in hmm_set() 597 virt); in hmm_virt_to_phys() 615 virt); in hmm_mmap() 632 virt); in hmm_vmap() 652 virt); in hmm_flush_vmap() [all …]
|
/linux/arch/m68k/mm/ |
A D | sun3kmap.c | 39 sun3_put_pte(virt, pte); in do_page_mapin() 43 print_pte_vaddr(virt); in do_page_mapin() 52 if(sun3_get_segmap(virt & ~SUN3_PMEG_MASK) == SUN3_INVALID_PMEG) in do_pmeg_mapin() 53 mmu_emu_map_pmeg(sun3_get_context(), virt); in do_pmeg_mapin() 56 do_page_mapin(phys, virt, type); in do_pmeg_mapin() 58 virt += PAGE_SIZE; in do_pmeg_mapin() 67 unsigned long offset, virt, ret; in sun3_ioremap() local 88 virt = (unsigned long)area->addr; in sun3_ioremap() 89 ret = virt + offset; in sun3_ioremap() 98 do_pmeg_mapin(phys, virt, type, seg_pages); in sun3_ioremap() [all …]
|
/linux/drivers/gpio/ |
A D | gpio-stp-xway.c | 81 void __iomem *virt; member 169 xway_stp_w32(chip->virt, 0, XWAY_STP_AR); in xway_stp_hw_init() 170 xway_stp_w32(chip->virt, 0, XWAY_STP_CPU0); in xway_stp_hw_init() 171 xway_stp_w32(chip->virt, 0, XWAY_STP_CPU1); in xway_stp_hw_init() 184 xway_stp_w32_mask(chip->virt, in xway_stp_hw_init() 190 xway_stp_w32_mask(chip->virt, in xway_stp_hw_init() 194 xway_stp_w32_mask(chip->virt, in xway_stp_hw_init() 201 xway_stp_w32_mask(chip->virt, in xway_stp_hw_init() 208 xway_stp_w32_mask(chip->virt, in xway_stp_hw_init() 244 if (IS_ERR(chip->virt)) in xway_stp_probe() [all …]
|
/linux/drivers/scsi/lpfc/ |
A D | lpfc_mem.c | 109 if (!pool->elements[i].virt) in lpfc_mem_alloc() 297 lpfc_mbuf_free(phba, mp->virt, mp->phys); in lpfc_mem_free_all() 307 lpfc_mbuf_free(phba, mp->virt, mp->phys); in lpfc_mem_free_all() 415 pool->elements[pool->current_count].virt = virt; in __lpfc_mbuf_free() 443 __lpfc_mbuf_free(phba, virt, dma); in lpfc_mbuf_free() 510 if (!hbqbp->dbuf.virt) { in lpfc_els_hbq_alloc() 562 if (!dma_buf->hbuf.virt) { in lpfc_sli4_rb_alloc() 568 if (!dma_buf->dbuf.virt) { in lpfc_sli4_rb_alloc() 620 if (!dma_buf->hbuf.virt) { in lpfc_sli4_nvmet_alloc() 626 if (!dma_buf->dbuf.virt) { in lpfc_sli4_nvmet_alloc() [all …]
|
/linux/include/asm-generic/ |
A D | sections.h | 78 static inline bool memory_contains(void *begin, void *end, void *virt, in memory_contains() argument 81 return virt >= begin && virt + size <= end; in memory_contains() 95 static inline bool memory_intersects(void *begin, void *end, void *virt, in memory_intersects() argument 98 void *vend = virt + size; in memory_intersects() 100 return (virt >= begin && virt < end) || (vend >= begin && vend < end); in memory_intersects() 112 static inline bool init_section_contains(void *virt, size_t size) in init_section_contains() argument 114 return memory_contains(__init_begin, __init_end, virt, size); in init_section_contains() 126 static inline bool init_section_intersects(void *virt, size_t size) in init_section_intersects() argument 128 return memory_intersects(__init_begin, __init_end, virt, size); in init_section_intersects()
|
/linux/drivers/net/ethernet/qlogic/qed/ |
A D | qed_chain.c | 73 void *virt, *virt_next; in qed_chain_free_next_ptr() local 77 virt = chain->p_virt_addr; in qed_chain_free_next_ptr() 81 if (!virt) in qed_chain_free_next_ptr() 84 next = virt + size; in qed_chain_free_next_ptr() 90 virt = virt_next; in qed_chain_free_next_ptr() 209 if (!virt) in qed_chain_alloc_next_ptr() 220 virt_prev = virt; in qed_chain_alloc_next_ptr() 236 void *virt; in qed_chain_alloc_single() local 240 if (!virt) in qed_chain_alloc_single() 257 void *virt; in qed_chain_alloc_pbl() local [all …]
|
/linux/drivers/staging/media/atomisp/include/hmm/ |
A D | hmm.h | 45 int hmm_load(ia_css_ptr virt, void *data, unsigned int bytes); 46 int hmm_store(ia_css_ptr virt, const void *data, unsigned int bytes); 47 int hmm_set(ia_css_ptr virt, int c, unsigned int bytes); 48 int hmm_flush(ia_css_ptr virt, unsigned int bytes); 53 phys_addr_t hmm_virt_to_phys(ia_css_ptr virt); 62 void *hmm_vmap(ia_css_ptr virt, bool cached); 63 void hmm_vunmap(ia_css_ptr virt); 69 void hmm_flush_vmap(ia_css_ptr virt); 90 int hmm_mmap(struct vm_area_struct *vma, ia_css_ptr virt);
|
/linux/drivers/mtd/maps/ |
A D | nettel.c | 221 if (!nettel_amd_map.virt) { in nettel_init() 280 iounmap(nettel_amd_map.virt); in nettel_init() 281 nettel_amd_map.virt = NULL; in nettel_init() 307 if (!nettel_intel_map.virt) { in nettel_init() 341 if (!nettel_intel_map.virt) { in nettel_init() 417 iounmap(nettel_amd_map.virt); in nettel_init() 437 if (nettel_amd_map.virt) { in nettel_cleanup() 438 iounmap(nettel_amd_map.virt); in nettel_cleanup() 439 nettel_amd_map.virt = NULL; in nettel_cleanup() 446 if (nettel_intel_map.virt) { in nettel_cleanup() [all …]
|
A D | dc21285.c | 57 val.x[0] = *(uint8_t*)(map->virt + ofs); in dc21285_read8() 64 val.x[0] = *(uint16_t*)(map->virt + ofs); in dc21285_read16() 71 val.x[0] = *(uint32_t*)(map->virt + ofs); in dc21285_read32() 77 memcpy(to, (void*)(map->virt + from), len); in dc21285_copy_from() 86 *(uint8_t*)(map->virt + adr) = d.x[0]; in dc21285_write8() 95 *(uint16_t*)(map->virt + adr) = d.x[0]; in dc21285_write16() 102 *(uint32_t*)(map->virt + adr) = d.x[0]; in dc21285_write32() 179 dc21285_map.virt = ioremap(DC21285_FLASH, 16*1024*1024); in init_dc21285() 180 if (!dc21285_map.virt) { in init_dc21285() 192 iounmap(dc21285_map.virt); in init_dc21285() [all …]
|
A D | ts5500_flash.c | 59 ts5500_map.virt = ioremap(ts5500_map.phys, ts5500_map.size); in init_ts5500_map() 61 if (!ts5500_map.virt) { in init_ts5500_map() 84 iounmap(ts5500_map.virt); in init_ts5500_map() 96 if (ts5500_map.virt) { in cleanup_ts5500_map() 97 iounmap(ts5500_map.virt); in cleanup_ts5500_map() 98 ts5500_map.virt = NULL; in cleanup_ts5500_map()
|
/linux/drivers/media/platform/s5p-mfc/ |
A D | s5p_mfc_opr.c | 54 b->virt = dev->mem_virt + offset; in s5p_mfc_alloc_priv_buf() 61 b->virt = dma_alloc_coherent(mem_dev, b->size, &b->dma, GFP_KERNEL); in s5p_mfc_alloc_priv_buf() 62 if (!b->virt) in s5p_mfc_alloc_priv_buf() 67 dma_free_coherent(mem_dev, b->size, b->virt, b->dma); in s5p_mfc_alloc_priv_buf() 72 mfc_debug(3, "Allocated addr %p %pad\n", b->virt, &b->dma); in s5p_mfc_alloc_priv_buf() 88 if (!b->virt) in s5p_mfc_alloc_generic_buf() 91 mfc_debug(3, "Allocated addr %p %pad\n", b->virt, &b->dma); in s5p_mfc_alloc_generic_buf() 109 dma_free_coherent(mem_dev, b->size, b->virt, b->dma); in s5p_mfc_release_priv_buf() 111 b->virt = NULL; in s5p_mfc_release_priv_buf() 120 dma_free_coherent(mem_dev, b->size, b->virt, b->dma); in s5p_mfc_release_generic_buf() [all …]
|
/linux/drivers/net/ipa/ |
A D | gsi.c | 315 iowrite32(0, gsi->virt + GSI_CNTXT_GSI_IRQ_EN_OFFSET); in gsi_irq_disable() 349 iowrite32(val, gsi->virt + reg); in gsi_command() 469 void __iomem *virt = channel->gsi->virt; in gsi_channel_state() local 1151 val = ioread32(gsi->virt + GSI_ERROR_LOG_OFFSET); in gsi_isr_glob_err() 1152 iowrite32(0, gsi->virt + GSI_ERROR_LOG_OFFSET); in gsi_isr_glob_err() 1153 iowrite32(~0, gsi->virt + GSI_ERROR_LOG_CLR_OFFSET); in gsi_isr_glob_err() 1422 if (!ring->virt) in gsi_ring_alloc() 1815 iowrite32(1, gsi->virt + GSI_CNTXT_INTSET_OFFSET); in gsi_irq_setup() 1903 val = ioread32(gsi->virt + GSI_GSI_STATUS_OFFSET); in gsi_setup() 1918 iowrite32(0, gsi->virt + GSI_ERROR_LOG_OFFSET); in gsi_setup() [all …]
|
/linux/Documentation/devicetree/bindings/interconnect/ |
A D | qcom,rpmh.yaml | 34 - qcom,sc7180-ipa-virt 35 - qcom,sc7180-mc-virt 38 - qcom,sc7180-qup-virt 42 - qcom,sc7280-clk-virt 48 - qcom,sc7280-mc-virt 60 - qcom,sc8180x-mc-virt 71 - qcom,sdx55-ipa-virt 72 - qcom,sdx55-mc-virt 83 - qcom,sm8150-mc-virt 93 - qcom,sm8250-mc-virt [all …]
|
/linux/arch/powerpc/mm/nohash/ |
A D | fsl_book3e.c | 117 TLBCAM[index].MAS2 = virt & PAGE_MASK; in settlbcam() 139 tlbcam_addrs[index].start = virt; in settlbcam() 140 tlbcam_addrs[index].limit = virt + size - 1; in settlbcam() 148 unsigned int align = __ffs(virt | phys); in calc_cam_sz() 187 cam_sz = calc_cam_sz(boundary, virt, phys); in map_mem_in_cams_addr() 189 settlbcam(i, virt, phys, cam_sz, pgprot_val(prot), 0); in map_mem_in_cams_addr() 193 virt += cam_sz; in map_mem_in_cams_addr() 200 cam_sz = calc_cam_sz(ram, virt, phys); in map_mem_in_cams_addr() 202 settlbcam(i, virt, phys, cam_sz, pgprot_val(prot), 0); in map_mem_in_cams_addr() 206 virt += cam_sz; in map_mem_in_cams_addr() [all …]
|
/linux/arch/parisc/kernel/ |
A D | kexec.c | 79 void *virt = (void *)__fix_to_virt(FIX_TEXT_KEXEC); in machine_kexec() local 88 desc.addr = (long long)virt; in machine_kexec() 90 reloc = (void *)virt; in machine_kexec() 93 memcpy(virt, dereference_function_descriptor(relocate_new_kernel), in machine_kexec() 96 *(unsigned long *)(virt + kexec_cmdline_offset) = arch->cmdline; in machine_kexec() 97 *(unsigned long *)(virt + kexec_initrd_start_offset) = arch->initrd_start; in machine_kexec() 98 *(unsigned long *)(virt + kexec_initrd_end_offset) = arch->initrd_end; in machine_kexec() 99 *(unsigned long *)(virt + kexec_free_mem_offset) = PAGE0->mem_free; in machine_kexec()
|
/linux/arch/arm/mach-hisi/ |
A D | platsmp.c | 105 void __iomem *virt; in hix5hd2_set_scu_boot_addr() local 107 virt = ioremap(start_addr, PAGE_SIZE); in hix5hd2_set_scu_boot_addr() 109 writel_relaxed(0xe51ff004, virt); /* ldr pc, [pc, #-4] */ in hix5hd2_set_scu_boot_addr() 110 writel_relaxed(jump_addr, virt + 4); /* pc jump phy address */ in hix5hd2_set_scu_boot_addr() 111 iounmap(virt); in hix5hd2_set_scu_boot_addr() 141 void __iomem *virt; in hip01_set_boot_addr() local 143 virt = phys_to_virt(start_addr); in hip01_set_boot_addr() 145 writel_relaxed(0xe51ff004, virt); in hip01_set_boot_addr() 146 writel_relaxed(jump_addr, virt + 4); in hip01_set_boot_addr()
|
/linux/crypto/ |
A D | cfb.c | 49 u8 *src = walk->src.virt.addr; in crypto_cfb_final() 50 u8 *dst = walk->dst.virt.addr; in crypto_cfb_final() 63 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_segment() 64 u8 *dst = walk->dst.virt.addr; in crypto_cfb_encrypt_segment() 86 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_inplace() 113 if (walk.src.virt.addr == walk.dst.virt.addr) in crypto_cfb_encrypt() 133 u8 *src = walk->src.virt.addr; in crypto_cfb_decrypt_segment() 134 u8 *dst = walk->dst.virt.addr; in crypto_cfb_decrypt_segment() 156 u8 *src = walk->src.virt.addr; in crypto_cfb_decrypt_inplace() 173 if (walk->src.virt.addr == walk->dst.virt.addr) in crypto_cfb_decrypt_blocks()
|
/linux/arch/arm/mm/ |
A D | ioremap.c | 110 int ioremap_page(unsigned long virt, unsigned long phys, in ioremap_page() argument 113 return ioremap_page_range(virt, virt + PAGE_SIZE, phys, in ioremap_page() 143 static void unmap_area_sections(unsigned long virt, unsigned long size) in unmap_area_sections() argument 145 unsigned long addr = virt, end = virt + (size & ~(SZ_1M - 1)); in unmap_area_sections() 180 flush_tlb_kernel_range(virt, end); in unmap_area_sections() 184 remap_area_sections(unsigned long virt, unsigned long pfn, in remap_area_sections() argument 187 unsigned long addr = virt, end = virt + size; in remap_area_sections() 194 unmap_area_sections(virt, size); in remap_area_sections() 211 remap_area_supersections(unsigned long virt, unsigned long pfn, in remap_area_supersections() argument 214 unsigned long addr = virt, end = virt + size; in remap_area_supersections() [all …]
|
/linux/arch/powerpc/kernel/ |
A D | exceptions-64s.S | 269 .if \virt 298 .if !\virt 363 GEN_BRANCH_TO_COMMON \name \virt 1361 GEN_INT_ENTRY data_access, virt=0 1364 GEN_INT_ENTRY data_access, virt=1 1579 GEN_INT_ENTRY alignment, virt=0 1582 GEN_INT_ENTRY alignment, virt=1 1777 GEN_INT_ENTRY decrementer, virt=0 1780 GEN_INT_ENTRY decrementer, virt=1 1909 .macro SYSTEM_CALL virt [all …]
|