Lines Matching refs:gpu

89 static void etnaviv_cmd_select_pipe(struct etnaviv_gpu *gpu,  in etnaviv_cmd_select_pipe()  argument
94 lockdep_assert_held(&gpu->lock); in etnaviv_cmd_select_pipe()
102 if (gpu->exec_state == ETNA_PIPE_2D) in etnaviv_cmd_select_pipe()
104 else if (gpu->exec_state == ETNA_PIPE_3D) in etnaviv_cmd_select_pipe()
115 static void etnaviv_buffer_dump(struct etnaviv_gpu *gpu, in etnaviv_buffer_dump() argument
121 dev_info(gpu->dev, "virt %p phys 0x%08x free 0x%08x\n", in etnaviv_buffer_dump()
123 &gpu->mmu_context->cmdbuf_mapping) + in etnaviv_buffer_dump()
151 static u32 etnaviv_buffer_reserve(struct etnaviv_gpu *gpu, in etnaviv_buffer_reserve() argument
158 &gpu->mmu_context->cmdbuf_mapping) + in etnaviv_buffer_reserve()
162 u16 etnaviv_buffer_init(struct etnaviv_gpu *gpu) in etnaviv_buffer_init() argument
164 struct etnaviv_cmdbuf *buffer = &gpu->buffer; in etnaviv_buffer_init()
166 lockdep_assert_held(&gpu->lock); in etnaviv_buffer_init()
173 etnaviv_cmdbuf_get_va(buffer, &gpu->mmu_context->cmdbuf_mapping) in etnaviv_buffer_init()
179 u16 etnaviv_buffer_config_mmuv2(struct etnaviv_gpu *gpu, u32 mtlb_addr, u32 safe_addr) in etnaviv_buffer_config_mmuv2() argument
181 struct etnaviv_cmdbuf *buffer = &gpu->buffer; in etnaviv_buffer_config_mmuv2()
183 lockdep_assert_held(&gpu->lock); in etnaviv_buffer_config_mmuv2()
187 if (gpu->identity.features & chipFeatures_PIPE_3D) { in etnaviv_buffer_config_mmuv2()
197 if (gpu->identity.features & chipFeatures_PIPE_2D) { in etnaviv_buffer_config_mmuv2()
214 u16 etnaviv_buffer_config_pta(struct etnaviv_gpu *gpu, unsigned short id) in etnaviv_buffer_config_pta() argument
216 struct etnaviv_cmdbuf *buffer = &gpu->buffer; in etnaviv_buffer_config_pta()
218 lockdep_assert_held(&gpu->lock); in etnaviv_buffer_config_pta()
232 void etnaviv_buffer_end(struct etnaviv_gpu *gpu) in etnaviv_buffer_end() argument
234 struct etnaviv_cmdbuf *buffer = &gpu->buffer; in etnaviv_buffer_end()
237 bool has_blt = !!(gpu->identity.minor_features5 & in etnaviv_buffer_end()
240 lockdep_assert_held(&gpu->lock); in etnaviv_buffer_end()
242 if (gpu->exec_state == ETNA_PIPE_2D) in etnaviv_buffer_end()
244 else if (gpu->exec_state == ETNA_PIPE_3D) in etnaviv_buffer_end()
257 link_target = etnaviv_buffer_reserve(gpu, buffer, dwords); in etnaviv_buffer_end()
268 if (gpu->exec_state == ETNA_PIPE_3D) { in etnaviv_buffer_end()
300 void etnaviv_sync_point_queue(struct etnaviv_gpu *gpu, unsigned int event) in etnaviv_sync_point_queue() argument
302 struct etnaviv_cmdbuf *buffer = &gpu->buffer; in etnaviv_sync_point_queue()
306 lockdep_assert_held(&gpu->lock); in etnaviv_sync_point_queue()
313 target = etnaviv_buffer_reserve(gpu, buffer, dwords); in etnaviv_sync_point_queue()
325 etnaviv_cmdbuf_get_va(buffer, &gpu->mmu_context->cmdbuf_mapping) in etnaviv_sync_point_queue()
339 void etnaviv_buffer_queue(struct etnaviv_gpu *gpu, u32 exec_state, in etnaviv_buffer_queue() argument
343 struct etnaviv_cmdbuf *buffer = &gpu->buffer; in etnaviv_buffer_queue()
347 bool switch_context = gpu->exec_state != exec_state; in etnaviv_buffer_queue()
348 bool switch_mmu_context = gpu->mmu_context != mmu_context; in etnaviv_buffer_queue()
349 unsigned int new_flush_seq = READ_ONCE(gpu->mmu_context->flush_seq); in etnaviv_buffer_queue()
350 bool need_flush = switch_mmu_context || gpu->flush_seq != new_flush_seq; in etnaviv_buffer_queue()
351 bool has_blt = !!(gpu->identity.minor_features5 & in etnaviv_buffer_queue()
354 lockdep_assert_held(&gpu->lock); in etnaviv_buffer_queue()
357 etnaviv_buffer_dump(gpu, buffer, 0, 0x50); in etnaviv_buffer_queue()
360 &gpu->mmu_context->cmdbuf_mapping); in etnaviv_buffer_queue()
376 if (gpu->mmu_context->global->version == ETNAVIV_IOMMU_V1) in etnaviv_buffer_queue()
387 if (switch_mmu_context && gpu->sec_mode == ETNA_SEC_KERNEL) in etnaviv_buffer_queue()
390 target = etnaviv_buffer_reserve(gpu, buffer, extra_dwords); in etnaviv_buffer_queue()
398 struct etnaviv_iommu_context *old_context = gpu->mmu_context; in etnaviv_buffer_queue()
400 gpu->mmu_context = etnaviv_iommu_context_get(mmu_context); in etnaviv_buffer_queue()
406 if (gpu->mmu_context->global->version == ETNAVIV_IOMMU_V1) { in etnaviv_buffer_queue()
418 gpu->sec_mode == ETNA_SEC_KERNEL) { in etnaviv_buffer_queue()
420 etnaviv_iommuv2_get_pta_id(gpu->mmu_context); in etnaviv_buffer_queue()
426 if (gpu->sec_mode == ETNA_SEC_NONE) in etnaviv_buffer_queue()
427 flush |= etnaviv_iommuv2_get_mtlb_addr(gpu->mmu_context); in etnaviv_buffer_queue()
437 gpu->flush_seq = new_flush_seq; in etnaviv_buffer_queue()
441 etnaviv_cmd_select_pipe(gpu, buffer, exec_state); in etnaviv_buffer_queue()
442 gpu->exec_state = exec_state; in etnaviv_buffer_queue()
447 &gpu->mmu_context->cmdbuf_mapping); in etnaviv_buffer_queue()
471 return_target = etnaviv_buffer_reserve(gpu, buffer, return_dwords); in etnaviv_buffer_queue()
478 if (gpu->exec_state == ETNA_PIPE_2D) { in etnaviv_buffer_queue()
508 etnaviv_cmdbuf_get_va(buffer, &gpu->mmu_context->cmdbuf_mapping) in etnaviv_buffer_queue()
514 etnaviv_cmdbuf_get_va(cmdbuf, &gpu->mmu_context->cmdbuf_mapping), in etnaviv_buffer_queue()
537 etnaviv_buffer_dump(gpu, buffer, 0, 0x50); in etnaviv_buffer_queue()