Home
last modified time | relevance | path

Searched refs:num_instances (Results 1 – 25 of 28) sorted by relevance

12

/linux/drivers/iommu/arm/arm-smmu/
A Darm-smmu-nvidia.c37 unsigned int num_instances; member
69 for (i = 0; i < nvidia->num_instances; i++) { in nvidia_smmu_write_reg()
90 for (i = 0; i < nvidia->num_instances; i++) { in nvidia_smmu_write_reg64()
112 for (i = 0; i < nvidia->num_instances; i++) { in nvidia_smmu_tlb_sync()
137 for (i = 0; i < nvidia->num_instances; i++) { in nvidia_smmu_reset()
182 for (inst = 0; inst < nvidia->num_instances; inst++) { in nvidia_smmu_global_fault()
232 for (inst = 0; inst < nvidia->num_instances; inst++) { in nvidia_smmu_context_fault()
295 nvidia_smmu->num_instances++; in nvidia_smmu_impl_init()
306 nvidia_smmu->num_instances++; in nvidia_smmu_impl_init()
309 if (nvidia_smmu->num_instances == 1) in nvidia_smmu_impl_init()
/linux/drivers/gpu/drm/amd/amdgpu/
A Dsdma_v4_0.c555 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_setup_ulv()
587 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_destroy_inst_ctx()
669 for (i = 1; i < adev->sdma.num_instances; i++) { in sdma_v4_0_init_microcode()
995 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_gfx_stop()
1038 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_page_stop()
1095 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_ctx_switch_enable()
1140 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_enable()
1443 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_0_load_microcode()
2097 if (j == adev->sdma.num_instances) in sdma_v4_0_wait_for_idle()
2604 switch (adev->sdma.num_instances) { in sdma_v4_0_set_irq_funcs()
[all …]
A Dsdma_v3_0.c253 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_free_microcode()
307 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
335 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
528 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_stop()
587 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_ctx_switch_enable()
629 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_enable()
657 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
758 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
1107 adev->sdma.num_instances = 1; in sdma_v3_0_early_init()
1110 adev->sdma.num_instances = SDMA_MAX_INSTANCE; in sdma_v3_0_early_init()
[all …]
A Dcik_sdma.c76 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_free_microcode()
135 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode()
148 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_init_microcode()
321 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_gfx_stop()
378 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_ctx_switch_enable()
416 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_enable()
442 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_gfx_resume()
508 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_gfx_resume()
553 for (i = 0; i < adev->sdma.num_instances; i++) { in cik_sdma_load_microcode()
941 adev->sdma.num_instances = SDMA_MAX_INSTANCE; in cik_sdma_early_init()
[all …]
A Dsi_dma.c120 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_stop()
139 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_start()
475 adev->sdma.num_instances = 2; in si_dma_early_init()
503 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_sw_init()
525 for (i = 0; i < adev->sdma.num_instances; i++) in si_dma_sw_fini()
659 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_set_clockgating_state()
671 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_set_clockgating_state()
753 for (i = 0; i < adev->sdma.num_instances; i++) in si_dma_set_ring_funcs()
847 for (i = 0; i < adev->sdma.num_instances; i++) { in si_dma_set_vm_pte_funcs()
851 adev->vm_manager.vm_pte_num_scheds = adev->sdma.num_instances; in si_dma_set_vm_pte_funcs()
A Dsdma_v2_4.c116 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_free_microcode()
149 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_init_microcode()
179 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_init_microcode()
354 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_gfx_stop()
394 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_enable()
420 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_gfx_resume()
486 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_gfx_resume()
834 adev->sdma.num_instances = SDMA_MAX_INSTANCE; in sdma_v2_4_early_init()
874 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v2_4_sw_init()
895 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v2_4_sw_fini()
[all …]
A Dsdma_v5_2.c172 for (i = 1; i < adev->sdma.num_instances; i++) in sdma_v5_2_init_microcode()
184 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_init_microcode()
487 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_stop()
546 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_ctx_switch_enable()
581 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_enable()
609 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_gfx_resume()
783 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_load_microcode()
816 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_soft_reset()
1269 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_2_sw_init()
1311 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v5_2_sw_fini()
[all …]
A Damdgpu_sdma.c41 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_sdma_get_instance_from_ring()
54 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_get_index_from_ring()
117 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_sdma_ras_late_init()
A Dsdma_v5_0.c276 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_init_microcode()
307 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_init_microcode()
598 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_gfx_stop()
657 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_ctx_switch_enable()
699 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_enable()
727 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_gfx_resume()
905 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_load_microcode()
1333 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_sw_init()
1362 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_sw_fini()
1416 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v5_0_is_idle()
[all …]
A Damdgpu_discovery.c388 adev->sdma.num_instances++; in amdgpu_discovery_reg_base_init()
1001 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks()
1022 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks()
1043 adev->sdma.num_instances = 1; in amdgpu_discovery_set_ip_blocks()
1081 adev->sdma.num_instances = 2; in amdgpu_discovery_set_ip_blocks()
1103 adev->sdma.num_instances = 8; in amdgpu_discovery_set_ip_blocks()
1130 adev->sdma.num_instances = 5; in amdgpu_discovery_set_ip_blocks()
A Damdgpu_sdma.h72 int num_instances; member
A Dsdma_v4_4.c239 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v4_4_reset_ras_error_count()
A Damdgpu_kms.c384 if (query_fw->index >= adev->sdma.num_instances) in amdgpu_firmware_info()
447 for (i = 0; i < adev->sdma.num_instances; i++) in amdgpu_hw_ip_info()
1587 for (i = 0; i < adev->sdma.num_instances; i++) { in amdgpu_debugfs_firmware_info_show()
A Dgfx_v9_4_2.c1599 uint32_t num_instances; in gfx_v9_4_2_query_utc_edc_count() local
1609 num_instances = in gfx_v9_4_2_query_utc_edc_count()
1611 for (j = 0; j < num_instances; j++) { in gfx_v9_4_2_query_utc_edc_count()
A Dnv.c456 if ((i == 7 && (adev->sdma.num_instances == 1)) || /* some asics don't have SDMA1 */ in nv_read_register()
A Dsoc15.c1264 for (i = 0; i < adev->sdma.num_instances; i++) { in soc15_doorbell_range_init()
/linux/drivers/media/platform/exynos4-is/
A Dfimc-lite.h69 unsigned short num_instances; member
A Dfimc-lite.c1470 if (!drv_data || fimc->index >= drv_data->num_instances || in fimc_lite_probe()
1636 .num_instances = 2,
1647 .num_instances = 3,
/linux/include/sound/
A Dtimer.h79 int num_instances; /* current number of timer instances */ member
/linux/drivers/hwmon/
A Dibmaem.c191 u8 num_instances; member
203 u8 num_instances; member
519 return ff_resp.num_instances; in aem_find_aem1_count()
655 fi_resp->num_instances <= instance_num) in aem_find_aem2()
/linux/sound/core/
A Dtimer.c223 if (master->timer->num_instances >= master->timer->max_instances) in check_matching_master_slave()
226 master->timer->num_instances++; in check_matching_master_slave()
343 if (timer->num_instances >= timer->max_instances) { in snd_timer_open()
370 timer->num_instances++; in snd_timer_open()
411 timer->num_instances--; in snd_timer_close_locked()
428 timer->num_instances--; in snd_timer_close_locked()
/linux/drivers/media/platform/ti-vpe/
A Dvpe.c377 atomic_t num_instances; /* count of driver instances */ member
2372 if (atomic_inc_return(&dev->num_instances) == 1) in vpe_open()
2435 if (atomic_dec_return(&dev->num_instances) == 0) in vpe_release()
2541 atomic_set(&dev->num_instances, 0); in vpe_probe()
/linux/include/net/bluetooth/
A Dmgmt.h546 __u8 num_instances; member
/linux/drivers/gpu/drm/i915/gt/
A Dintel_engine_cs.c594 u8 class, const u8 *map, u8 num_instances) in populate_logical_ids() argument
599 for (j = 0; j < num_instances; ++j) { in populate_logical_ids()
/linux/drivers/net/ethernet/mellanox/mlx4/
A Dresource_tracker.c426 int vf, int num_instances) in initialize_res_quotas() argument
428 res_alloc->guaranteed[vf] = num_instances / in initialize_res_quotas()
430 res_alloc->quota[vf] = (num_instances / 2) + res_alloc->guaranteed[vf]; in initialize_res_quotas()
432 res_alloc->res_free = num_instances; in initialize_res_quotas()

Completed in 106 milliseconds

12