Home
last modified time | relevance | path

Searched refs:ww (Results 1 – 25 of 76) sorted by relevance

1234

/linux/drivers/gpu/drm/i915/
A Di915_gem_ww.c12 INIT_LIST_HEAD(&ww->obj_list); in i915_gem_ww_ctx_init()
13 ww->intr = intr; in i915_gem_ww_ctx_init()
14 ww->contended = NULL; in i915_gem_ww_ctx_init()
38 WARN_ON(ww->contended); in i915_gem_ww_ctx_fini()
39 ww_acquire_fini(&ww->ctx); in i915_gem_ww_ctx_fini()
46 if (WARN_ON(!ww->contended)) in i915_gem_ww_ctx_backoff()
50 if (ww->intr) in i915_gem_ww_ctx_backoff()
51 ret = dma_resv_lock_slow_interruptible(ww->contended->base.resv, &ww->ctx); in i915_gem_ww_ctx_backoff()
53 dma_resv_lock_slow(ww->contended->base.resv, &ww->ctx); in i915_gem_ww_ctx_backoff()
56 list_add_tail(&ww->contended->obj_link, &ww->obj_list); in i915_gem_ww_ctx_backoff()
[all …]
A Di915_vma.h250 i915_vma_pin_ww(struct i915_vma *vma, struct i915_gem_ww_ctx *ww,
256 struct i915_gem_ww_ctx ww; in i915_vma_pin() local
259 i915_gem_ww_ctx_init(&ww, true); in i915_vma_pin()
261 err = i915_gem_object_lock(vma->obj, &ww); in i915_vma_pin()
263 err = i915_vma_pin_ww(vma, &ww, size, alignment, flags); in i915_vma_pin()
265 err = i915_gem_ww_ctx_backoff(&ww); in i915_vma_pin()
269 i915_gem_ww_ctx_fini(&ww); in i915_vma_pin()
274 int i915_ggtt_pin(struct i915_vma *vma, struct i915_gem_ww_ctx *ww,
A Di915_gem_ww.h23 static inline int __i915_gem_ww_fini(struct i915_gem_ww_ctx *ww, int err) in __i915_gem_ww_fini() argument
26 err = i915_gem_ww_ctx_backoff(ww); in __i915_gem_ww_fini()
32 i915_gem_ww_ctx_fini(ww); in __i915_gem_ww_fini()
/linux/drivers/staging/vt6655/
A Dmac.c258 for (ww = 0; ww < W_MAX_TIMEOUT; ww++) { in MACbSoftwareReset()
323 for (ww = 0; ww < W_MAX_TIMEOUT; ww++) { in MACbSafeRxOff()
331 for (ww = 0; ww < W_MAX_TIMEOUT; ww++) { in MACbSafeRxOff()
343 for (ww = 0; ww < W_MAX_TIMEOUT; ww++) { in MACbSafeRxOff()
378 for (ww = 0; ww < W_MAX_TIMEOUT; ww++) { in MACbSafeTxOff()
386 for (ww = 0; ww < W_MAX_TIMEOUT; ww++) { in MACbSafeTxOff()
399 for (ww = 0; ww < W_MAX_TIMEOUT; ww++) { in MACbSafeTxOff()
528 for (ww = 0; ww < W_MAX_TIMEOUT; ww++) { in MACvSetCurrRx0DescAddr()
562 for (ww = 0; ww < W_MAX_TIMEOUT; ww++) { in MACvSetCurrRx1DescAddr()
597 for (ww = 0; ww < W_MAX_TIMEOUT; ww++) { in MACvSetCurrTx0DescAddrEx()
[all …]
A Dbaseband.c1909 unsigned short ww; in bb_read_embedded() local
1918 for (ww = 0; ww < W_MAX_TIMEOUT; ww++) { in bb_read_embedded()
1927 if (ww == W_MAX_TIMEOUT) { in bb_read_embedded()
1952 unsigned short ww; in bb_write_embedded() local
1963 for (ww = 0; ww < W_MAX_TIMEOUT; ww++) { in bb_write_embedded()
1969 if (ww == W_MAX_TIMEOUT) { in bb_write_embedded()
/linux/drivers/gpu/drm/i915/gem/selftests/
A Di915_gem_migrate.c45 struct i915_gem_ww_ctx ww; in igt_create_migrate() local
55 for_i915_gem_ww(&ww, err, true) { in igt_create_migrate()
56 err = i915_gem_object_lock(obj, &ww); in igt_create_migrate()
64 err = i915_gem_object_migrate(obj, &ww, dst); in igt_create_migrate()
107 err = i915_gem_object_lock(obj, ww); in lmem_pages_migrate_one()
156 struct i915_gem_ww_ctx ww; in igt_lmem_pages_migrate() local
168 for_i915_gem_ww(&ww, err, true) { in igt_lmem_pages_migrate()
169 err = i915_gem_object_lock(obj, &ww); in igt_lmem_pages_migrate()
177 err = intel_migrate_clear(&gt->migrate, &ww, NULL, in igt_lmem_pages_migrate()
205 for_i915_gem_ww(&ww, err, true) in igt_lmem_pages_migrate()
[all …]
/linux/drivers/gpu/drm/i915/gt/
A Dintel_context.c136 struct i915_gem_ww_ctx *ww) in __ring_active() argument
140 err = intel_ring_pin(ring, ww); in __ring_active()
162 struct i915_gem_ww_ctx *ww) in intel_context_pre_pin() argument
168 err = __ring_active(ce->ring, ww); in intel_context_pre_pin()
203 struct i915_gem_ww_ctx *ww) in __intel_context_do_pin_ww() argument
297 struct i915_gem_ww_ctx ww; in __intel_context_do_pin() local
300 i915_gem_ww_ctx_init(&ww, true); in __intel_context_do_pin()
308 i915_gem_ww_ctx_fini(&ww); in __intel_context_do_pin()
494 struct i915_gem_ww_ctx ww; in intel_context_create_request() local
498 i915_gem_ww_ctx_init(&ww, true); in intel_context_create_request()
[all …]
A Dselftest_migrate.c34 struct i915_gem_ww_ctx *ww, in copy() argument
43 struct i915_gem_ww_ctx ww; in copy() local
56 for_i915_gem_ww(&ww, err, true) { in copy()
57 err = i915_gem_object_lock(src, &ww); in copy()
134 struct i915_gem_ww_ctx *ww, in clear() argument
143 struct i915_gem_ww_ctx ww; in clear() local
152 for_i915_gem_ww(&ww, err, true) { in clear()
209 struct i915_gem_ww_ctx *ww, in __migrate_copy() argument
223 struct i915_gem_ww_ctx *ww, in __global_copy() argument
249 struct i915_gem_ww_ctx *ww, in __migrate_clear() argument
[all …]
A Dintel_renderstate.c165 i915_gem_ww_ctx_init(&so->ww, true); in intel_renderstate_init()
167 err = intel_context_pin_ww(ce, &so->ww); in intel_renderstate_init()
175 err = i915_gem_object_lock(so->vma->obj, &so->ww); in intel_renderstate_init()
179 err = i915_vma_pin_ww(so->vma, &so->ww, 0, 0, PIN_GLOBAL | PIN_HIGH); in intel_renderstate_init()
195 err = i915_gem_ww_ctx_backoff(&so->ww); in intel_renderstate_init()
199 i915_gem_ww_ctx_fini(&so->ww); in intel_renderstate_init()
248 i915_gem_ww_ctx_fini(&so->ww); in intel_renderstate_fini()
A Dintel_ring_submission.c458 struct i915_gem_ww_ctx *ww) in ring_context_init_default_state() argument
478 struct i915_gem_ww_ctx *ww, in ring_context_pre_pin() argument
486 err = ring_context_init_default_state(ce, ww); in ring_context_pre_pin()
493 err = gen6_ppgtt_pin(i915_vm_to_ppgtt((vm)), ww); in ring_context_pre_pin()
1239 struct i915_gem_ww_ctx *ww, in gen7_ctx_switch_bb_init() argument
1304 struct i915_gem_ww_ctx ww; in intel_ring_submission_setup() local
1354 i915_gem_ww_ctx_init(&ww, false); in intel_ring_submission_setup()
1363 err = intel_timeline_pin(timeline, &ww); in intel_ring_submission_setup()
1365 err = intel_ring_pin(ring, &ww); in intel_ring_submission_setup()
1384 err = i915_gem_ww_ctx_backoff(&ww); in intel_ring_submission_setup()
[all …]
A Dintel_migrate.h25 struct i915_gem_ww_ctx *ww,
47 struct i915_gem_ww_ctx *ww,
/linux/drivers/gpu/drm/i915/gem/
A Di915_gem_dmabuf.c123 struct i915_gem_ww_ctx ww; in i915_gem_begin_cpu_access() local
126 i915_gem_ww_ctx_init(&ww, true); in i915_gem_begin_cpu_access()
128 err = i915_gem_object_lock(obj, &ww); in i915_gem_begin_cpu_access()
136 err = i915_gem_ww_ctx_backoff(&ww); in i915_gem_begin_cpu_access()
140 i915_gem_ww_ctx_fini(&ww); in i915_gem_begin_cpu_access()
147 struct i915_gem_ww_ctx ww; in i915_gem_end_cpu_access() local
150 i915_gem_ww_ctx_init(&ww, true); in i915_gem_end_cpu_access()
160 err = i915_gem_ww_ctx_backoff(&ww); in i915_gem_end_cpu_access()
164 i915_gem_ww_ctx_fini(&ww); in i915_gem_end_cpu_access()
172 struct i915_gem_ww_ctx ww; in i915_gem_dmabuf_attach() local
[all …]
A Di915_gem_object.h176 struct i915_gem_ww_ctx *ww, in __i915_gem_object_lock() argument
182 ret = dma_resv_lock_interruptible(obj->base.resv, ww ? &ww->ctx : NULL); in __i915_gem_object_lock()
184 ret = dma_resv_lock(obj->base.resv, ww ? &ww->ctx : NULL); in __i915_gem_object_lock()
186 if (!ret && ww) { in __i915_gem_object_lock()
195 ww->contended = obj; in __i915_gem_object_lock()
202 struct i915_gem_ww_ctx *ww) in i915_gem_object_lock() argument
204 return __i915_gem_object_lock(obj, ww, ww && ww->intr); in i915_gem_object_lock()
208 struct i915_gem_ww_ctx *ww) in i915_gem_object_lock_interruptible() argument
210 WARN_ON(ww && !ww->intr); in i915_gem_object_lock_interruptible()
529 struct i915_gem_ww_ctx *ww,
[all …]
A Di915_gem_region.c110 GEM_WARN_ON(apply->ww); in i915_gem_process_region()
115 struct i915_gem_ww_ctx ww; in i915_gem_process_region() local
132 apply->ww = &ww; in i915_gem_process_region()
133 for_i915_gem_ww(&ww, ret, apply->interruptible) { in i915_gem_process_region()
134 ret = i915_gem_object_lock(obj, apply->ww); in i915_gem_process_region()
A Di915_gem_mman.c299 struct i915_gem_ww_ctx ww; in vm_fault_gtt() local
313 i915_gem_ww_ctx_init(&ww, true); in vm_fault_gtt()
315 ret = i915_gem_object_lock(obj, &ww); in vm_fault_gtt()
334 vma = i915_gem_object_ggtt_pin_ww(obj, &ww, NULL, 0, 0, in vm_fault_gtt()
418 ret = i915_gem_ww_ctx_backoff(&ww); in vm_fault_gtt()
422 i915_gem_ww_ctx_fini(&ww); in vm_fault_gtt()
433 struct i915_gem_ww_ctx ww; in vm_access() local
444 i915_gem_ww_ctx_init(&ww, true); in vm_access()
446 err = i915_gem_object_lock(obj, &ww); in vm_access()
467 err = i915_gem_ww_ctx_backoff(&ww); in vm_access()
[all …]
/linux/kernel/locking/
A Dww_mutex.h190 DEBUG_LOCKS_WARN_ON(ww->ctx); in ww_mutex_lock_acquired()
202 DEBUG_LOCKS_WARN_ON(ww_ctx->contending_lock != ww); in ww_mutex_lock_acquired()
215 DEBUG_LOCKS_WARN_ON(ww_ctx->ww_class != ww->ww_class); in ww_mutex_lock_acquired()
218 ww->ctx = ww_ctx; in ww_mutex_lock_acquired()
417 struct ww_mutex *ww; in __ww_mutex_kill() local
419 ww = container_of(lock, struct ww_mutex, base); in __ww_mutex_kill()
421 ww_ctx->contending_lock = ww; in __ww_mutex_kill()
444 struct ww_mutex *ww = container_of(lock, struct ww_mutex, base); in __ww_mutex_check_kill() local
445 struct ww_acquire_ctx *hold_ctx = READ_ONCE(ww->ctx); in __ww_mutex_check_kill()
545 struct ww_mutex *ww = container_of(lock, struct ww_mutex, base); in __ww_mutex_add_waiter() local
[all …]
A Dmutex.c304 struct ww_mutex *ww; in ww_mutex_spin_on_owner() local
306 ww = container_of(lock, struct ww_mutex, base); in ww_mutex_spin_on_owner()
319 if (ww_ctx->acquired > 0 && READ_ONCE(ww->ctx)) in ww_mutex_spin_on_owner()
578 struct ww_mutex *ww; in __mutex_lock_common() local
590 if (unlikely(ww_ctx == READ_ONCE(ww->ctx))) in __mutex_lock_common()
614 ww_mutex_set_context_fastpath(ww, ww_ctx); in __mutex_lock_common()
719 ww_mutex_lock_acquired(ww, ww_ctx); in __mutex_lock_common()
766 return mutex_trylock(&ww->base); in ww_mutex_trylock()
768 MUTEX_WARN_ON(ww->base.magic != &ww->base); in ww_mutex_trylock()
778 if (__mutex_trylock(&ww->base)) { in ww_mutex_trylock()
[all …]
/linux/drivers/gpu/drm/i915/selftests/
A Di915_gem.c207 struct i915_gem_ww_ctx ww; in igt_gem_ww_ctx() local
220 i915_gem_ww_ctx_init(&ww, true); in igt_gem_ww_ctx()
223 err = i915_gem_object_lock(obj, &ww); in igt_gem_ww_ctx()
225 err = i915_gem_object_lock_interruptible(obj, &ww); in igt_gem_ww_ctx()
227 err = i915_gem_object_lock_interruptible(obj2, &ww); in igt_gem_ww_ctx()
229 err = i915_gem_object_lock(obj2, &ww); in igt_gem_ww_ctx()
232 err = i915_gem_ww_ctx_backoff(&ww); in igt_gem_ww_ctx()
236 i915_gem_ww_ctx_fini(&ww); in igt_gem_ww_ctx()
A Digt_spinner.c42 struct i915_gem_ww_ctx *ww, in igt_spinner_pin_obj() argument
53 ret = i915_gem_object_lock(obj, ww); in igt_spinner_pin_obj()
59 if (!ww) in igt_spinner_pin_obj()
65 if (ww) in igt_spinner_pin_obj()
66 ret = i915_vma_pin_ww(*vma, ww, 0, 0, PIN_USER); in igt_spinner_pin_obj()
80 struct i915_gem_ww_ctx *ww) in igt_spinner_pin() argument
89 vaddr = igt_spinner_pin_obj(ce, ww, spin->hws, I915_MAP_WB, &spin->hws_vma); in igt_spinner_pin()
100 vaddr = igt_spinner_pin_obj(ce, ww, spin->obj, mode, &spin->batch_vma); in igt_spinner_pin()
/linux/arch/x86/crypto/
A Dcamellia_glue.c981 u64 kl, kr, ww; in camellia_setup128() local
1044 CAMELLIA_F(kl, CAMELLIA_SIGMA1L, CAMELLIA_SIGMA1R, ww); in camellia_setup128()
1045 kr ^= ww; in camellia_setup128()
1050 kr ^= ww; in camellia_setup128()
1052 kl ^= ww; in camellia_setup128()
1089 u64 ww; /* temporary variables */ in camellia_setup256() local
1154 kr ^= ww; in camellia_setup256()
1158 kr ^= ww ^ krr; in camellia_setup256()
1160 kl ^= ww; in camellia_setup256()
1166 krr ^= ww; in camellia_setup256()
[all …]
/linux/drivers/gpu/drm/i915/display/
A Dintel_fb_pin.c85 struct i915_gem_ww_ctx ww; in intel_pin_and_fence_fb_obj() local
132 i915_gem_ww_ctx_init(&ww, true); in intel_pin_and_fence_fb_obj()
134 ret = i915_gem_object_lock(obj, &ww); in intel_pin_and_fence_fb_obj()
138 ret = i915_gem_object_migrate(obj, &ww, INTEL_REGION_LMEM); in intel_pin_and_fence_fb_obj()
146 vma = i915_gem_object_pin_to_display_plane(obj, &ww, alignment, in intel_pin_and_fence_fb_obj()
189 ret = i915_gem_ww_ctx_backoff(&ww); in intel_pin_and_fence_fb_obj()
193 i915_gem_ww_ctx_fini(&ww); in intel_pin_and_fence_fb_obj()
A Dintel_dpt.c124 struct i915_gem_ww_ctx ww; in intel_dpt_pin() local
130 for_i915_gem_ww(&ww, err, true) { in intel_dpt_pin()
131 err = i915_gem_object_lock(dpt->obj, &ww); in intel_dpt_pin()
135 vma = i915_gem_object_ggtt_pin_ww(dpt->obj, &ww, NULL, 0, 4096, in intel_dpt_pin()
/linux/drivers/scsi/aic94xx/
A Daic94xx_reg.c108 #define ASD_READ_SW(ww, type, ord) \ argument
109 static type asd_read_##ww##_##ord(struct asd_ha_struct *asd_ha, \
113 u32 map_offs = (reg - io_handle->ww##_base) + asd_mem_offs_##ww();\
117 #define ASD_WRITE_SW(ww, type, ord) \ argument
118 static void asd_write_##ww##_##ord(struct asd_ha_struct *asd_ha, \
122 u32 map_offs = (reg - io_handle->ww##_base) + asd_mem_offs_##ww();\
/linux/tools/memory-model/
A Dlinux-kernel.cat182 let ww-vis = fence | (strong-fence ; xbstar ; w-pre-bounded) |
194 let ww-incoh = pre-race & co & ww-vis^-1
195 empty (wr-incoh | rw-incoh | ww-incoh) as plain-coherence
198 let ww-nonrace = ww-vis & ((Marked * W) | rw-xbstar) & ((W * Marked) | wr-vis)
199 let ww-race = (pre-race & co) \ ww-nonrace
203 flag ~empty (ww-race | wr-race | rw-race) as data-race
/linux/drivers/i2c/busses/
A Di2c-sis96x.c245 u16 ww = 0; in sis96x_probe() local
253 pci_read_config_word(dev, PCI_CLASS_DEVICE, &ww); in sis96x_probe()
254 if (PCI_CLASS_SERIAL_SMBUS != ww) { in sis96x_probe()
255 dev_err(&dev->dev, "Unsupported device class 0x%04x!\n", ww); in sis96x_probe()

Completed in 52 milliseconds

1234