/linux/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/ |
A D | gk20a.c | 145 struct gk20a_instmem *imem = obj->base.imem; in gk20a_instobj_iommu_recycle_vaddr() local 162 while (imem->vaddr_use + size > imem->vaddr_max) { in gk20a_instmem_vaddr_gc() 177 struct gk20a_instmem *imem = node->imem; in gk20a_instobj_acquire_dma() local 189 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_acquire_iommu() local 219 imem->vaddr_use, imem->vaddr_max); in gk20a_instobj_acquire_iommu() 232 struct gk20a_instmem *imem = node->imem; in gk20a_instobj_release_dma() local 244 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_release_iommu() local 298 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_dtor_dma() local 315 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_dtor_iommu() local 539 node->imem = imem; in gk20a_instobj_new() [all …]
|
A D | base.c | 76 spin_lock(&imem->lock); in nvkm_instobj_dtor() 101 ret = imem->func->memory_new(imem, size, align, zero, &memory); in nvkm_instobj_new() 135 return imem->func->rd32(imem, addr); in nvkm_instmem_rd32() 141 return imem->func->wr32(imem, addr, data); in nvkm_instmem_wr32() 181 if (imem->func->fini) in nvkm_instmem_fini() 182 imem->func->fini(imem); in nvkm_instmem_fini() 213 return imem->func->oneinit(imem); in nvkm_instmem_oneinit() 221 void *data = imem; in nvkm_instmem_dtor() 222 if (imem->func->dtor) in nvkm_instmem_dtor() 223 data = imem->func->dtor(imem); in nvkm_instmem_dtor() [all …]
|
A D | nv40.c | 43 struct nv40_instmem *imem; member 133 iobj->imem = imem; in nv40_instobj_new() 177 imem->base.reserved = round_up(imem->base.reserved, 4096); in nv40_instmem_oneinit() 179 ret = nvkm_mm_init(&imem->heap, 0, 0, imem->base.reserved, 1); in nv40_instmem_oneinit() 221 nvkm_mm_fini(&imem->heap); in nv40_instmem_dtor() 222 if (imem->iomem) in nv40_instmem_dtor() 223 iounmap(imem->iomem); in nv40_instmem_dtor() 224 return imem; in nv40_instmem_dtor() 244 if (!(imem = kzalloc(sizeof(*imem), GFP_KERNEL))) in nv40_instmem_new() 247 *pimem = &imem->base; in nv40_instmem_new() [all …]
|
A D | nv04.c | 41 struct nv04_instmem *imem; member 133 iobj->imem = imem; in nv04_instobj_new() 167 ret = nvkm_mm_init(&imem->heap, 0, 0, imem->base.reserved, 1); in nv04_instmem_oneinit() 173 &imem->base.vbios); in nv04_instmem_oneinit() 184 &imem->base.ramfc); in nv04_instmem_oneinit() 190 &imem->base.ramro); in nv04_instmem_oneinit() 205 nvkm_mm_fini(&imem->heap); in nv04_instmem_dtor() 206 return imem; in nv04_instmem_dtor() 223 struct nv04_instmem *imem; in nv04_instmem_new() local 225 if (!(imem = kzalloc(sizeof(*imem), GFP_KERNEL))) in nv04_instmem_new() [all …]
|
A D | nv50.c | 59 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_wr32_slow() local 68 imem->addr = base; in nv50_instobj_wr32_slow() 78 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_rd32_slow() local 88 imem->addr = base; in nv50_instobj_rd32_slow() 122 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_kmap() local 194 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_release() local 219 struct nvkm_instmem *imem = &iobj->imem->base; in nv50_instobj_acquire() local 263 struct nvkm_instmem *imem = &iobj->imem->base; in nv50_instobj_boot() local 314 struct nvkm_instmem *imem = &iobj->imem->base; in nv50_instobj_dtor() local 364 iobj->imem = imem; in nv50_instobj_new() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
A D | dmanv40.c | 74 struct nvkm_instmem *imem = device->imem; in nv40_fifo_dma_engine_fini() local 88 nvkm_kmap(imem->ramfc); in nv40_fifo_dma_engine_fini() 90 nvkm_done(imem->ramfc); in nv40_fifo_dma_engine_fini() 104 struct nvkm_instmem *imem = device->imem; in nv40_fifo_dma_engine_init() local 119 nvkm_kmap(imem->ramfc); in nv40_fifo_dma_engine_init() 120 nvkm_wo32(imem->ramfc, chan->ramfc + ctx, inst); in nv40_fifo_dma_engine_init() 121 nvkm_done(imem->ramfc); in nv40_fifo_dma_engine_init() 155 struct nvkm_instmem *imem = chan->fifo->base.engine.subdev.device->imem; in nv40_fifo_dma_object_ctor() local 201 struct nvkm_instmem *imem = device->imem; in nv40_fifo_dma_new() local 232 nvkm_kmap(imem->ramfc); in nv40_fifo_dma_new() [all …]
|
A D | dmanv04.c | 39 struct nvkm_instmem *imem = chan->fifo->base.engine.subdev.device->imem; in nv04_fifo_dma_object_dtor() local 42 nvkm_ramht_remove(imem->ramht, cookie); in nv04_fifo_dma_object_dtor() 51 struct nvkm_instmem *imem = chan->fifo->base.engine.subdev.device->imem; in nv04_fifo_dma_object_ctor() local 79 struct nvkm_memory *fctx = device->imem->ramfc; in nv04_fifo_dma_fini() 144 struct nvkm_instmem *imem = fifo->base.engine.subdev.device->imem; in nv04_fifo_dma_dtor() local 147 nvkm_kmap(imem->ramfc); in nv04_fifo_dma_dtor() 151 nvkm_done(imem->ramfc); in nv04_fifo_dma_dtor() 175 struct nvkm_instmem *imem = device->imem; in nv04_fifo_dma_new() local 205 nvkm_kmap(imem->ramfc); in nv04_fifo_dma_new() 209 nvkm_wo32(imem->ramfc, chan->ramfc + 0x10, in nv04_fifo_dma_new() [all …]
|
A D | dmanv10.c | 46 struct nvkm_instmem *imem = device->imem; in nv10_fifo_dma_new() local 76 nvkm_kmap(imem->ramfc); in nv10_fifo_dma_new() 77 nvkm_wo32(imem->ramfc, chan->ramfc + 0x00, args->v0.offset); in nv10_fifo_dma_new() 78 nvkm_wo32(imem->ramfc, chan->ramfc + 0x04, args->v0.offset); in nv10_fifo_dma_new() 79 nvkm_wo32(imem->ramfc, chan->ramfc + 0x0c, chan->base.push->addr >> 4); in nv10_fifo_dma_new() 80 nvkm_wo32(imem->ramfc, chan->ramfc + 0x14, in nv10_fifo_dma_new() 87 nvkm_done(imem->ramfc); in nv10_fifo_dma_new()
|
A D | dmanv17.c | 46 struct nvkm_instmem *imem = device->imem; in nv17_fifo_dma_new() local 77 nvkm_kmap(imem->ramfc); in nv17_fifo_dma_new() 78 nvkm_wo32(imem->ramfc, chan->ramfc + 0x00, args->v0.offset); in nv17_fifo_dma_new() 79 nvkm_wo32(imem->ramfc, chan->ramfc + 0x04, args->v0.offset); in nv17_fifo_dma_new() 80 nvkm_wo32(imem->ramfc, chan->ramfc + 0x0c, chan->base.push->addr >> 4); in nv17_fifo_dma_new() 81 nvkm_wo32(imem->ramfc, chan->ramfc + 0x14, in nv17_fifo_dma_new() 88 nvkm_done(imem->ramfc); in nv17_fifo_dma_new()
|
A D | nv17.c | 55 struct nvkm_instmem *imem = device->imem; in nv17_fifo_init() local 56 struct nvkm_ramht *ramht = imem->ramht; in nv17_fifo_init() 57 struct nvkm_memory *ramro = imem->ramro; in nv17_fifo_init() 58 struct nvkm_memory *ramfc = imem->ramfc; in nv17_fifo_init()
|
A D | nv40.c | 65 struct nvkm_instmem *imem = device->imem; in nv40_fifo_init() local 66 struct nvkm_ramht *ramht = imem->ramht; in nv40_fifo_init() 67 struct nvkm_memory *ramro = imem->ramro; in nv40_fifo_init() 68 struct nvkm_memory *ramfc = imem->ramfc; in nv40_fifo_init()
|
A D | nv04.c | 335 struct nvkm_instmem *imem = device->imem; in nv04_fifo_init() local 336 struct nvkm_ramht *ramht = imem->ramht; in nv04_fifo_init() 337 struct nvkm_memory *ramro = imem->ramro; in nv04_fifo_init() 338 struct nvkm_memory *ramfc = imem->ramfc; in nv04_fifo_init()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/device/ |
A D | base.c | 86 .imem = { 0x00000001, nv04_instmem_new }, 107 .imem = { 0x00000001, nv04_instmem_new }, 129 .imem = { 0x00000001, nv04_instmem_new }, 149 .imem = { 0x00000001, nv04_instmem_new }, 171 .imem = { 0x00000001, nv04_instmem_new }, 193 .imem = { 0x00000001, nv04_instmem_new }, 215 .imem = { 0x00000001, nv04_instmem_new }, 237 .imem = { 0x00000001, nv04_instmem_new }, 259 .imem = { 0x00000001, nv04_instmem_new }, 281 .imem = { 0x00000001, nv04_instmem_new }, [all …]
|
A D | user.c | 78 struct nvkm_instmem *imem = device->imem; in nvkm_udevice_info() local 161 if (imem && args->v0.ram_size > 0) in nvkm_udevice_info() 162 args->v0.ram_user = args->v0.ram_user - imem->reserved; in nvkm_udevice_info()
|
/linux/drivers/net/wwan/iosm/ |
A D | iosm_ipc_mux.c | 185 ipc_imem_channel_close(ipc_mux->imem, ipc_mux->channel_id); in ipc_mux_channel_close() 221 ipc_imem_td_update_timer_suspend(ipc_mux->imem, true); in ipc_mux_schedule() 225 ipc_imem_td_update_timer_suspend(ipc_mux->imem, false); in ipc_mux_schedule() 234 ipc_imem_td_update_timer_suspend(ipc_mux->imem, true); in ipc_mux_schedule() 238 ipc_imem_td_update_timer_suspend(ipc_mux->imem, false); in ipc_mux_schedule() 272 struct iosm_imem *imem) in ipc_mux_init() argument 288 ipc_mux->pcie = imem->pcie; in ipc_mux_init() 289 ipc_mux->imem = imem; in ipc_mux_init() 290 ipc_mux->ipc_protocol = imem->ipc_protocol; in ipc_mux_init() 291 ipc_mux->dev = imem->dev; in ipc_mux_init() [all …]
|
A D | iosm_ipc_protocol.c | 17 int index = ipc_protocol_msg_prep(ipc_protocol->imem, msg_type, in ipc_protocol_tq_msg_send() 25 ipc_protocol_msg_hp_update(ipc_protocol->imem); in ipc_protocol_tq_msg_send() 83 index = ipc_task_queue_send_task(ipc_protocol->imem, in ipc_protocol_msg_send() 100 ipc_task_queue_send_task(ipc_protocol->imem, in ipc_protocol_msg_send() 182 ipc_task_queue_send_task(ipc_protocol->imem, in ipc_protocol_suspend() 235 ipc_protocol->imem = ipc_imem; in ipc_protocol_init()
|
A D | iosm_ipc_pcie.c | 43 ipc_imem_cleanup(ipc_pcie->imem); in ipc_pcie_cleanup() 53 kfree(ipc_pcie->imem); in ipc_pcie_deinit() 302 ipc_pcie->imem = ipc_imem_init(ipc_pcie, pci->device, in ipc_pcie_probe() 304 if (!ipc_pcie->imem) { in ipc_pcie_probe() 341 ipc_imem_pm_s2idle_sleep(ipc_pcie->imem, true); in ipc_pcie_suspend_s2idle() 352 ipc_imem_pm_s2idle_sleep(ipc_pcie->imem, false); in ipc_pcie_resume_s2idle() 378 ipc_imem_pm_suspend(ipc_pcie->imem); in ipc_pcie_suspend() 423 ipc_imem_pm_resume(ipc_pcie->imem); in ipc_pcie_resume()
|
A D | iosm_ipc_task_queue.c | 140 int ipc_task_queue_send_task(struct iosm_imem *imem, in ipc_task_queue_send_task() argument 157 ret = ipc_task_queue_add_task(imem, arg, copy, func, in ipc_task_queue_send_task() 160 dev_err(imem->ipc_task->dev, in ipc_task_queue_send_task()
|
A D | iosm_ipc_mux_codec.c | 20 ipc_imem_ul_send(ipc_mux->imem); in ipc_mux_tq_cmd_send() 28 int ret = ipc_task_queue_send_task(ipc_mux->imem, ipc_mux_tq_cmd_send, in ipc_mux_acb_send() 49 ipc_uevent_send(ipc_mux->imem->dev, UEVENT_MDM_TIMEOUT); in ipc_mux_acb_send() 601 (void)ipc_imem_ul_write_td(ipc_mux->imem); in ipc_mux_lite_send_qlt() 759 (void)ipc_imem_ul_write_td(ipc_mux->imem); in ipc_mux_ul_adgh_encode() 851 ipc_imem_td_update_timer_start(ipc_mux->imem); in ipc_mux_tq_ul_trigger_encode() 898 ret = ipc_task_queue_send_task(ipc_mux->imem, in ipc_mux_ul_trigger_encode()
|
/linux/drivers/remoteproc/ |
A D | qcom_pil_info.c | 33 struct resource imem; in qcom_pil_info_init() local 45 ret = of_address_to_resource(np, 0, &imem); in qcom_pil_info_init() 50 base = ioremap(imem.start, resource_size(&imem)); in qcom_pil_info_init() 56 memset_io(base, 0, resource_size(&imem)); in qcom_pil_info_init() 59 _reloc.num_entries = (u32)resource_size(&imem) / PIL_RELOC_ENTRY_SIZE; in qcom_pil_info_init()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/mpeg/ |
A D | nv40.c | 33 struct nvkm_instmem *imem = device->imem; in nv40_mpeg_mthd_dma() local 37 u32 dma0 = nvkm_instmem_rd32(imem, inst + 0); in nv40_mpeg_mthd_dma() 38 u32 dma1 = nvkm_instmem_rd32(imem, inst + 4); in nv40_mpeg_mthd_dma() 39 u32 dma2 = nvkm_instmem_rd32(imem, inst + 8); in nv40_mpeg_mthd_dma()
|
/linux/drivers/gpu/drm/nouveau/nvkm/core/ |
A D | memory.c | 141 struct nvkm_instmem *imem = device->imem; in nvkm_memory_new() local 145 if (unlikely(target != NVKM_MEM_TARGET_INST || !imem)) in nvkm_memory_new() 148 ret = nvkm_instobj_new(imem, size, align, zero, &memory); in nvkm_memory_new()
|
/linux/drivers/memory/ |
A D | brcmstb_dpfe.c | 185 void __iomem *imem; member 570 u32 __iomem *imem = priv->imem; in __verify_fw_checksum() local 589 sum += readl_relaxed(imem + i); in __verify_fw_checksum() 625 const u32 *dmem, *imem; in brcmstb_dpfe_download_firmware() local 675 imem = fw_blob; in brcmstb_dpfe_download_firmware() 682 ret = __write_firmware(priv->imem, imem, imem_size, is_big_endian); in brcmstb_dpfe_download_firmware() 887 priv->imem = devm_ioremap_resource(dev, res); in brcmstb_dpfe_probe() 888 if (IS_ERR(priv->imem)) { in brcmstb_dpfe_probe()
|
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/acr/ |
A D | hsfw.c | 33 kfree(hsfw->imem); in nvkm_acr_hsfw_del() 145 hsfw->imem = kmalloc(desc->code_size, GFP_KERNEL); in nvkm_acr_hsfw_load_bl() 146 memcpy(hsfw->imem, data + desc->code_off, desc->code_size); in nvkm_acr_hsfw_load_bl()
|
/linux/Documentation/devicetree/bindings/memory-controllers/ |
A D | brcm,dpfe-cpu.yaml | 31 - const: dpfe-imem 47 reg-names = "dpfe-cpu", "dpfe-dmem", "dpfe-imem";
|