/linux/drivers/gpu/drm/radeon/ |
A D | radeon_ring.c | 147 mutex_lock(&rdev->ring_lock); in radeon_ring_lock() 150 mutex_unlock(&rdev->ring_lock); in radeon_ring_lock() 202 mutex_unlock(&rdev->ring_lock); in radeon_ring_unlock_commit() 228 mutex_unlock(&rdev->ring_lock); in radeon_ring_unlock_undo() 289 mutex_lock(&rdev->ring_lock); in radeon_ring_backup() 293 mutex_unlock(&rdev->ring_lock); in radeon_ring_backup() 299 mutex_unlock(&rdev->ring_lock); in radeon_ring_backup() 310 mutex_unlock(&rdev->ring_lock); in radeon_ring_backup() 333 mutex_unlock(&rdev->ring_lock); in radeon_ring_backup() 442 mutex_lock(&rdev->ring_lock); in radeon_ring_fini() [all …]
|
A D | radeon_fence.c | 917 mutex_lock(&rdev->ring_lock); in radeon_fence_driver_fini() 931 mutex_unlock(&rdev->ring_lock); in radeon_fence_driver_fini()
|
A D | radeon_pm.c | 263 mutex_lock(&rdev->ring_lock); in radeon_pm_set_clocks() 274 mutex_unlock(&rdev->ring_lock); in radeon_pm_set_clocks() 317 mutex_unlock(&rdev->ring_lock); in radeon_pm_set_clocks() 1133 mutex_lock(&rdev->ring_lock); in radeon_dpm_change_power_state_locked() 1180 mutex_unlock(&rdev->ring_lock); in radeon_dpm_change_power_state_locked()
|
/linux/drivers/dma/ |
A D | plx_dma.c | 110 spinlock_t ring_lock; member 140 spin_lock_bh(&plxdev->ring_lock); in plx_dma_process_desc() 168 spin_unlock_bh(&plxdev->ring_lock); in plx_dma_process_desc() 178 spin_lock_bh(&plxdev->ring_lock); in plx_dma_abort_desc() 195 spin_unlock_bh(&plxdev->ring_lock); in plx_dma_abort_desc() 254 __acquires(plxdev->ring_lock) in plx_dma_prep_memcpy() 259 spin_lock_bh(&plxdev->ring_lock); in plx_dma_prep_memcpy() 294 __acquire(plxdev->ring_lock); in plx_dma_prep_memcpy() 301 __releases(plxdev->ring_lock) in plx_dma_tx_submit() 460 spin_lock_bh(&plxdev->ring_lock); in plx_dma_free_chan_resources() [all …]
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
A D | amdgpu_gfx.c | 301 spin_lock_init(&kiq->ring_lock); in amdgpu_gfx_kiq_init_ring() 472 spin_lock(&adev->gfx.kiq.ring_lock); in amdgpu_gfx_disable_kcq() 475 spin_unlock(&adev->gfx.kiq.ring_lock); in amdgpu_gfx_disable_kcq() 483 spin_unlock(&adev->gfx.kiq.ring_lock); in amdgpu_gfx_disable_kcq() 528 spin_lock(&adev->gfx.kiq.ring_lock); in amdgpu_gfx_enable_kcq() 534 spin_unlock(&adev->gfx.kiq.ring_lock); in amdgpu_gfx_enable_kcq() 543 spin_unlock(&adev->gfx.kiq.ring_lock); in amdgpu_gfx_enable_kcq() 738 spin_lock_irqsave(&kiq->ring_lock, flags); in amdgpu_kiq_rreg() 750 spin_unlock_irqrestore(&kiq->ring_lock, flags); in amdgpu_kiq_rreg() 782 spin_unlock_irqrestore(&kiq->ring_lock, flags); in amdgpu_kiq_rreg() [all …]
|
A D | amdgpu_ctx.c | 232 spin_lock_init(&ctx->ring_lock); in amdgpu_ctx_init() 553 spin_lock(&ctx->ring_lock); in amdgpu_ctx_add_fence() 556 spin_unlock(&ctx->ring_lock); in amdgpu_ctx_add_fence() 570 spin_lock(&ctx->ring_lock); in amdgpu_ctx_get_fence() 576 spin_unlock(&ctx->ring_lock); in amdgpu_ctx_get_fence() 582 spin_unlock(&ctx->ring_lock); in amdgpu_ctx_get_fence() 587 spin_unlock(&ctx->ring_lock); in amdgpu_ctx_get_fence() 646 spin_lock(&ctx->ring_lock); in amdgpu_ctx_wait_prev_fence() 649 spin_unlock(&ctx->ring_lock); in amdgpu_ctx_wait_prev_fence() 754 spin_lock(&ctx->ring_lock); in amdgpu_ctx_fence_time() [all …]
|
A D | amdgpu_ctx.h | 47 spinlock_t ring_lock; member
|
A D | amdgpu_virt.c | 74 spin_lock_irqsave(&kiq->ring_lock, flags); in amdgpu_virt_kiq_reg_write_reg_wait() 83 spin_unlock_irqrestore(&kiq->ring_lock, flags); in amdgpu_virt_kiq_reg_write_reg_wait() 105 spin_unlock_irqrestore(&kiq->ring_lock, flags); in amdgpu_virt_kiq_reg_write_reg_wait()
|
A D | gmc_v10_0.c | 421 spin_lock(&adev->gfx.kiq.ring_lock); in gmc_v10_0_flush_gpu_tlb_pasid() 429 spin_unlock(&adev->gfx.kiq.ring_lock); in gmc_v10_0_flush_gpu_tlb_pasid() 434 spin_unlock(&adev->gfx.kiq.ring_lock); in gmc_v10_0_flush_gpu_tlb_pasid()
|
A D | amdgpu_amdkfd_gfx_v10.c | 324 spin_lock(&adev->gfx.kiq.ring_lock); in kgd_hiq_mqd_load() 351 spin_unlock(&adev->gfx.kiq.ring_lock); in kgd_hiq_mqd_load()
|
A D | gmc_v9_0.c | 886 spin_lock(&adev->gfx.kiq.ring_lock); in gmc_v9_0_flush_gpu_tlb_pasid() 897 spin_unlock(&adev->gfx.kiq.ring_lock); in gmc_v9_0_flush_gpu_tlb_pasid() 903 spin_unlock(&adev->gfx.kiq.ring_lock); in gmc_v9_0_flush_gpu_tlb_pasid()
|
A D | amdgpu_amdkfd_gfx_v9.c | 336 spin_lock(&adev->gfx.kiq.ring_lock); in kgd_gfx_v9_hiq_mqd_load() 363 spin_unlock(&adev->gfx.kiq.ring_lock); in kgd_gfx_v9_hiq_mqd_load()
|
A D | amdgpu_amdkfd_gfx_v10_3.c | 309 spin_lock(&adev->gfx.kiq.ring_lock); in hiq_mqd_load_v10_3() 336 spin_unlock(&adev->gfx.kiq.ring_lock); in hiq_mqd_load_v10_3()
|
/linux/virt/kvm/ |
A D | coalesced_mmio.c | 75 spin_lock(&dev->kvm->ring_lock); in coalesced_mmio_write() 80 spin_unlock(&dev->kvm->ring_lock); in coalesced_mmio_write() 92 spin_unlock(&dev->kvm->ring_lock); in coalesced_mmio_write() 125 spin_lock_init(&kvm->ring_lock); in kvm_coalesced_mmio_init()
|
/linux/drivers/hv/ |
A D | ring_buffer.c | 265 spin_lock_init(&ring_info->ring_lock); in hv_ringbuffer_init() 305 spin_lock_irqsave(&outring_info->ring_lock, flags); in hv_ringbuffer_write() 322 spin_unlock_irqrestore(&outring_info->ring_lock, flags); in hv_ringbuffer_write() 350 spin_unlock_irqrestore(&outring_info->ring_lock, flags); in hv_ringbuffer_write() 373 spin_unlock_irqrestore(&outring_info->ring_lock, flags); in hv_ringbuffer_write()
|
/linux/drivers/block/ |
A D | xen-blkfront.c | 175 spinlock_t ring_lock; member 901 spin_lock_irqsave(&rinfo->ring_lock, flags); in blkif_queue_rq() 912 spin_unlock_irqrestore(&rinfo->ring_lock, flags); in blkif_queue_rq() 916 spin_unlock_irqrestore(&rinfo->ring_lock, flags); in blkif_queue_rq() 921 spin_unlock_irqrestore(&rinfo->ring_lock, flags); in blkif_queue_rq() 1189 spin_lock_irqsave(&rinfo->ring_lock, flags); in kick_pending_request_queues() 1191 spin_unlock_irqrestore(&rinfo->ring_lock, flags); in kick_pending_request_queues() 1522 spin_lock_irqsave(&rinfo->ring_lock, flags); in blkif_interrupt() 1652 spin_unlock_irqrestore(&rinfo->ring_lock, flags); in blkif_interrupt() 1918 spin_lock_init(&rinfo->ring_lock); in negotiate_mq() [all …]
|
/linux/drivers/net/hamradio/ |
A D | dmascc.c | 205 spinlock_t ring_lock; member 557 spin_lock_init(&priv->ring_lock); in setup_adapter() 928 spin_lock_irqsave(&priv->ring_lock, flags); in scc_send_packet() 950 spin_unlock_irqrestore(&priv->ring_lock, flags); in scc_send_packet() 1254 spin_lock_irqsave(&priv->ring_lock, flags); in rx_bh() 1256 spin_unlock_irqrestore(&priv->ring_lock, flags); in rx_bh() 1273 spin_lock_irqsave(&priv->ring_lock, flags); in rx_bh() 1278 spin_unlock_irqrestore(&priv->ring_lock, flags); in rx_bh()
|
/linux/drivers/net/ethernet/amd/ |
A D | ni65.c | 245 spinlock_t ring_lock; member 456 spin_lock_init(&p->ring_lock); in ni65_probe1() 880 spin_lock(&p->ring_lock); in ni65_interrupt() 972 spin_unlock(&p->ring_lock); in ni65_interrupt() 1182 spin_lock_irqsave(&p->ring_lock, flags); in ni65_send_packet() 1190 spin_lock_irqsave(&p->ring_lock, flags); in ni65_send_packet() 1210 spin_unlock_irqrestore(&p->ring_lock, flags); in ni65_send_packet()
|
/linux/fs/ |
A D | aio.c | 153 struct mutex ring_lock; member 406 if (!mutex_trylock(&ctx->ring_lock)) { in aio_migratepage() 446 mutex_unlock(&ctx->ring_lock); in aio_migratepage() 741 mutex_init(&ctx->ring_lock); in ioctx_alloc() 744 mutex_lock(&ctx->ring_lock); in ioctx_alloc() 787 mutex_unlock(&ctx->ring_lock); in ioctx_alloc() 801 mutex_unlock(&ctx->ring_lock); in ioctx_alloc() 1187 mutex_lock(&ctx->ring_lock); in aio_read_events_ring() 1247 mutex_unlock(&ctx->ring_lock); in aio_read_events_ring()
|
/linux/drivers/net/wireless/ath/carl9170/ |
A D | debug.h | 125 struct mutex ring_lock; member
|
/linux/drivers/scsi/lpfc/ |
A D | lpfc_nvmet.c | 1106 spin_lock_irqsave(&pring->ring_lock, iflags); in lpfc_nvmet_xmt_fcp_op() 1109 spin_unlock_irqrestore(&pring->ring_lock, iflags); in lpfc_nvmet_xmt_fcp_op() 2021 spin_lock_irqsave(&pring->ring_lock, iflags); in lpfc_nvmet_wqfull_flush() 2028 spin_unlock_irqrestore(&pring->ring_lock, in lpfc_nvmet_wqfull_flush() 2038 spin_unlock_irqrestore(&pring->ring_lock, iflags); in lpfc_nvmet_wqfull_flush() 2040 spin_lock_irqsave(&pring->ring_lock, iflags); in lpfc_nvmet_wqfull_flush() 2045 spin_unlock_irqrestore(&pring->ring_lock, iflags); in lpfc_nvmet_wqfull_flush() 2064 spin_lock_irqsave(&pring->ring_lock, iflags); in lpfc_nvmet_wqfull_process() 2068 spin_unlock_irqrestore(&pring->ring_lock, iflags); in lpfc_nvmet_wqfull_process() 2071 spin_lock_irqsave(&pring->ring_lock, iflags); in lpfc_nvmet_wqfull_process() [all …]
|
A D | lpfc_sli.c | 3558 temp_lock = &pring->ring_lock; in lpfc_sli_iocbq_lookup() 3607 temp_lock = &pring->ring_lock; in lpfc_sli_iocbq_lookup_by_tag() 4480 spin_lock_irq(&pring->ring_lock); in lpfc_sli_abort_iocb_ring() 4571 spin_lock_irq(&pring->ring_lock); in lpfc_sli_flush_io_rings() 11704 spin_lock_init(&pring->ring_lock); in lpfc_sli4_queue_init() 11713 spin_lock_init(&pring->ring_lock); in lpfc_sli4_queue_init() 11897 spin_lock(&pring->ring_lock); in lpfc_sli_host_down() 11904 spin_unlock(&pring->ring_lock); in lpfc_sli_host_down() 11984 spin_lock(&pring->ring_lock); in lpfc_sli_hba_down() 11986 spin_unlock(&pring->ring_lock); in lpfc_sli_hba_down() [all …]
|
A D | lpfc_sli.h | 245 spinlock_t ring_lock; /* lock for issuing commands */ member
|
/linux/include/linux/ |
A D | hyperv.h | 175 spinlock_t ring_lock; member 1080 spin_lock_irqsave(&c->outbound.ring_lock, flags); in set_channel_pending_send_size() 1087 spin_unlock_irqrestore(&c->outbound.ring_lock, flags); in set_channel_pending_send_size()
|
/linux/drivers/xen/ |
A D | xen-scsiback.c | 93 spinlock_t ring_lock; member 293 spin_lock_irqsave(&info->ring_lock, flags); in scsiback_send_response() 315 spin_unlock_irqrestore(&info->ring_lock, flags); in scsiback_send_response() 1222 spin_lock_init(&info->ring_lock); in scsiback_probe()
|