Home
last modified time | relevance | path

Searched refs:rhp (Results 1 – 25 of 37) sorted by relevance

12

/linux/include/trace/events/
A Drcu.h520 __field(void *, rhp)
527 __entry->rhp = rhp;
528 __entry->func = rhp->func;
580 __field(void *, rhp)
587 __entry->rhp = rhp;
635 TP_ARGS(rcuname, rhp),
639 __field(void *, rhp)
645 __entry->rhp = rhp;
668 __field(void *, rhp)
674 __entry->rhp = rhp;
[all …]
/linux/drivers/infiniband/hw/cxgb4/
A Dmem.c432 struct c4iw_dev *rhp; in c4iw_get_dma_mr() local
440 rhp = php->rhp; in c4iw_get_dma_mr()
458 mhp->rhp = rhp; in c4iw_get_dma_mr()
498 struct c4iw_dev *rhp; in c4iw_reg_user_mr() local
511 rhp = php->rhp; in c4iw_reg_user_mr()
527 mhp->rhp = rhp; in c4iw_reg_user_mr()
601 struct c4iw_dev *rhp; in c4iw_alloc_mr() local
610 rhp = php->rhp; in c4iw_alloc_mr()
638 mhp->rhp = rhp; in c4iw_alloc_mr()
702 struct c4iw_dev *rhp; in c4iw_dereg_mr() local
[all …]
A Dprovider.c64 struct c4iw_dev *rhp; in c4iw_dealloc_ucontext() local
195 struct c4iw_dev *rhp; in c4iw_deallocate_pd() local
199 rhp = php->rhp; in c4iw_deallocate_pd()
202 mutex_lock(&rhp->rdev.stats.lock); in c4iw_deallocate_pd()
203 rhp->rdev.stats.pd.cur--; in c4iw_deallocate_pd()
213 struct c4iw_dev *rhp; in c4iw_allocate_pd() local
216 rhp = (struct c4iw_dev *) ibdev; in c4iw_allocate_pd()
222 php->rhp = rhp; in c4iw_allocate_pd()
232 rhp->rdev.stats.pd.cur++; in c4iw_allocate_pd()
233 if (rhp->rdev.stats.pd.cur > rhp->rdev.stats.pd.max) in c4iw_allocate_pd()
[all …]
A Dqp.c1085 struct c4iw_dev *rhp; in c4iw_post_send() local
1093 rhp = qhp->rhp; in c4iw_post_send()
2072 struct c4iw_dev *rhp; in c4iw_destroy_qp() local
2078 rhp = qhp->rhp; in c4iw_destroy_qp()
2113 struct c4iw_dev *rhp; in c4iw_create_qp() local
2130 rhp = php->rhp; in c4iw_create_qp()
2186 qhp->rhp = rhp; in c4iw_create_qp()
2378 rhp = qhp->rhp; in c4iw_ib_modify_qp()
2681 rhp = php->rhp; in c4iw_create_srq()
2716 srq->rhp = rhp; in c4iw_create_srq()
[all …]
A Dcq.c351 qhp = get_qhp(chp->rhp, CQE_QPID(hw_cqe)); in c4iw_flush_hw_cq()
844 c4iw_invalidate_mr(qhp->rhp, in __c4iw_poll_cq_one()
933 qhp = get_qhp(chp->rhp, CQE_QPID(rd_cqe)); in c4iw_poll_cq_one()
984 xa_erase_irq(&chp->rhp->cqs, chp->cq.cqid); in c4iw_destroy_cq()
990 destroy_cq(&chp->rhp->rdev, &chp->cq, in c4iw_destroy_cq()
1020 if (vector >= rhp->rdev.lldi.nciq) in c4iw_create_cq()
1078 ret = create_cq(&rhp->rdev, &chp->cq, in c4iw_create_cq()
1084 chp->rhp = rhp; in c4iw_create_cq()
1105 uresp.qid_mask = rhp->rdev.cqmask; in c4iw_create_cq()
1147 xa_erase_irq(&rhp->cqs, chp->cq.cqid); in c4iw_create_cq()
[all …]
A Diw_cxgb4.h344 static inline struct c4iw_cq *get_chp(struct c4iw_dev *rhp, u32 cqid) in get_chp() argument
346 return xa_load(&rhp->cqs, cqid); in get_chp()
351 return xa_load(&rhp->qps, qpid); in get_qhp()
364 struct c4iw_dev *rhp; member
393 struct c4iw_dev *rhp; member
411 struct c4iw_dev *rhp; member
425 struct c4iw_dev *rhp; member
482 struct c4iw_dev *rhp; member
505 struct c4iw_dev *rhp; member
598 int c4iw_modify_qp(struct c4iw_dev *rhp,
[all …]
A Ddevice.c1376 xa_lock_irq(&qp->rhp->qps); in recover_lost_dbs()
1378 ret = cxgb4_sync_txq_pidx(qp->rhp->rdev.lldi.ports[0], in recover_lost_dbs()
1386 xa_unlock_irq(&qp->rhp->qps); in recover_lost_dbs()
1391 ret = cxgb4_sync_txq_pidx(qp->rhp->rdev.lldi.ports[0], in recover_lost_dbs()
1400 xa_unlock_irq(&qp->rhp->qps); in recover_lost_dbs()
1405 xa_unlock_irq(&qp->rhp->qps); in recover_lost_dbs()
1408 while (cxgb4_dbfifo_count(qp->rhp->rdev.lldi.ports[0], 1) > 0) { in recover_lost_dbs()
/linux/kernel/rcu/
A Drcu_segcblist.c30 *rclp->tail = rhp; in rcu_cblist_enqueue()
31 rclp->tail = &rhp->next; in rcu_cblist_enqueue()
53 if (!rhp) { in rcu_cblist_flush_enqueue()
56 rhp->next = NULL; in rcu_cblist_flush_enqueue()
57 srclp->head = rhp; in rcu_cblist_flush_enqueue()
69 struct rcu_head *rhp; in rcu_cblist_dequeue() local
71 rhp = rclp->head; in rcu_cblist_dequeue()
72 if (!rhp) in rcu_cblist_dequeue()
78 return rhp; in rcu_cblist_dequeue()
347 rhp->next = NULL; in rcu_segcblist_enqueue()
[all …]
A Dsrcutiny.c116 struct rcu_head *rhp; in srcu_drive_gp() local
139 rhp = lh; in srcu_drive_gp()
142 rhp->func(rhp); in srcu_drive_gp()
178 void call_srcu(struct srcu_struct *ssp, struct rcu_head *rhp, in call_srcu() argument
183 rhp->func = func; in call_srcu()
184 rhp->next = NULL; in call_srcu()
186 *ssp->srcu_cb_tail = rhp; in call_srcu()
187 ssp->srcu_cb_tail = &rhp->next; in call_srcu()
A Dupdate.c469 void do_trace_rcu_torture_read(const char *rcutorturename, struct rcu_head *rhp, in do_trace_rcu_torture_read() argument
473 trace_rcu_torture_read(rcutorturename, rhp, secs, c_old, c); in do_trace_rcu_torture_read()
477 #define do_trace_rcu_torture_read(rcutorturename, rhp, secs, c_old, c) \ argument
537 struct early_boot_kfree_rcu *rhp; in early_boot_test_call_rcu() local
544 rhp = kmalloc(sizeof(*rhp), GFP_KERNEL); in early_boot_test_call_rcu()
545 if (!WARN_ON_ONCE(!rhp)) in early_boot_test_call_rcu()
546 kfree_rcu(rhp, rh); in early_boot_test_call_rcu()
A Drcutorture.c1350 kfree(rhp); in rcu_torture_timer_cb()
1660 struct rcu_head *rhp = kmalloc(sizeof(*rhp), GFP_NOWAIT); in rcu_torture_timer() local
1662 if (rhp) in rcu_torture_timer()
1915 mem_dump_obj(rhp); in rcu_torture_mem_dump_obj()
1922 rhp = kmalloc(sizeof(*rhp), GFP_KERNEL); in rcu_torture_mem_dump_obj()
1925 mem_dump_obj(rhp); in rcu_torture_mem_dump_obj()
1928 kfree(rhp); in rcu_torture_mem_dump_obj()
1932 mem_dump_obj(rhp); in rcu_torture_mem_dump_obj()
1935 vfree(rhp); in rcu_torture_mem_dump_obj()
2929 struct rcu_head *rhp = kmalloc(sizeof(*rhp), GFP_KERNEL); in rcu_test_debug_objects() local
[all …]
A Dsrcutree.c809 struct rcu_head *rhp, bool do_norm) in srcu_gp_start_if_needed() argument
822 if (rhp) in srcu_gp_start_if_needed()
876 if (debug_rcu_head_queue(rhp)) { in __call_srcu()
878 WRITE_ONCE(rhp->func, srcu_leak_callback); in __call_srcu()
882 rhp->func = func; in __call_srcu()
906 __call_srcu(ssp, rhp, func, true); in call_srcu()
1246 struct rcu_head *rhp; in srcu_invoke_callbacks() local
1268 rhp = rcu_cblist_dequeue(&ready_cbs); in srcu_invoke_callbacks()
1269 for (; rhp != NULL; rhp = rcu_cblist_dequeue(&ready_cbs)) { in srcu_invoke_callbacks()
1270 debug_rcu_head_unqueue(rhp); in srcu_invoke_callbacks()
[all …]
A Dtasks.h158 rhp->next = NULL; in call_rcu_tasks_generic()
159 rhp->func = func; in call_rcu_tasks_generic()
162 WRITE_ONCE(*rtp->cbs_tail, rhp); in call_rcu_tasks_generic()
163 rtp->cbs_tail = &rhp->next; in call_rcu_tasks_generic()
530 void call_rcu_tasks(struct rcu_head *rhp, rcu_callback_t func);
551 void call_rcu_tasks(struct rcu_head *rhp, rcu_callback_t func) in call_rcu_tasks() argument
553 call_rcu_tasks_generic(rhp, func, &rcu_tasks); in call_rcu_tasks()
691 call_rcu_tasks_generic(rhp, func, &rcu_tasks_rude); in call_rcu_tasks_rude()
1223 call_rcu_tasks_generic(rhp, func, &rcu_tasks_trace); in call_rcu_tasks_trace()
1338 static void test_rcu_tasks_callback(struct rcu_head *rhp) in test_rcu_tasks_callback() argument
[all …]
A Drcu_segcblist.h22 void rcu_cblist_enqueue(struct rcu_cblist *rclp, struct rcu_head *rhp);
25 struct rcu_head *rhp);
135 struct rcu_head *rhp);
137 struct rcu_head *rhp);
A Drcuscale.c378 static void rcu_scale_async_cb(struct rcu_head *rhp) in rcu_scale_async_cb() argument
381 kfree(rhp); in rcu_scale_async_cb()
393 struct rcu_head *rhp = NULL; in rcu_scale_writer() local
433 if (!rhp) in rcu_scale_writer()
434 rhp = kmalloc(sizeof(*rhp), GFP_KERNEL); in rcu_scale_writer()
435 if (rhp && atomic_read(this_cpu_ptr(&n_async_inflight)) < gp_async_max) { in rcu_scale_writer()
437 cur_ops->async(rhp, rcu_scale_async_cb); in rcu_scale_writer()
438 rhp = NULL; in rcu_scale_writer()
443 kfree(rhp); /* Because we are stopping. */ in rcu_scale_writer()
A Dsync.c43 static void rcu_sync_func(struct rcu_head *rhp);
73 static void rcu_sync_func(struct rcu_head *rhp) in rcu_sync_func() argument
75 struct rcu_sync *rsp = container_of(rhp, struct rcu_sync, cb_head); in rcu_sync_func()
A Dtree_nocb.h303 static bool rcu_nocb_do_flush_bypass(struct rcu_data *rdp, struct rcu_head *rhp, in rcu_nocb_do_flush_bypass() argument
311 if (rhp && !rcu_cblist_n_cbs(&rdp->nocb_bypass)) { in rcu_nocb_do_flush_bypass()
316 if (rhp) in rcu_nocb_do_flush_bypass()
318 rcu_cblist_flush_enqueue(&rcl, &rdp->nocb_bypass, rhp); in rcu_nocb_do_flush_bypass()
333 static bool rcu_nocb_flush_bypass(struct rcu_data *rdp, struct rcu_head *rhp, in rcu_nocb_flush_bypass() argument
340 return rcu_nocb_do_flush_bypass(rdp, rhp, j); in rcu_nocb_flush_bypass()
374 static bool rcu_nocb_try_bypass(struct rcu_data *rdp, struct rcu_head *rhp, in rcu_nocb_try_bypass() argument
441 if (!rcu_nocb_flush_bypass(rdp, rhp, j)) { in rcu_nocb_try_bypass()
464 rcu_cblist_enqueue(&rdp->nocb_bypass, rhp); in rcu_nocb_try_bypass()
1452 static bool rcu_nocb_flush_bypass(struct rcu_data *rdp, struct rcu_head *rhp, in rcu_nocb_flush_bypass() argument
[all …]
A Drcu.h466 struct rcu_head *rhp,
480 struct rcu_head *rhp,
485 #define do_trace_rcu_torture_read(rcutorturename, rhp, secs, c_old, c) \ argument
A Dtree.h431 static bool rcu_nocb_flush_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
433 static bool rcu_nocb_try_bypass(struct rcu_data *rdp, struct rcu_head *rhp,
A Dtree.c2448 struct rcu_head *rhp; in rcu_do_batch() local
2493 rhp = rcu_cblist_dequeue(&rcl); in rcu_do_batch()
2495 for (; rhp; rhp = rcu_cblist_dequeue(&rcl)) { in rcu_do_batch()
2499 debug_rcu_head_unqueue(rhp); in rcu_do_batch()
2502 trace_rcu_invoke_callback(rcu_state.name, rhp); in rcu_do_batch()
2504 f = rhp->func; in rcu_do_batch()
2505 WRITE_ONCE(rhp->func, (rcu_callback_t)0L); in rcu_do_batch()
2506 f(rhp); in rcu_do_batch()
2911 static void rcu_leak_callback(struct rcu_head *rhp) in rcu_leak_callback() argument
3933 static void rcu_barrier_callback(struct rcu_head *rhp) in rcu_barrier_callback() argument
/linux/drivers/media/usb/pvrusb2/
A Dpvrusb2-v4l2.c42 struct pvr2_ioread *rhp; member
915 if (fhp->rhp) { in pvr2_v4l2_release()
918 sp = pvr2_ioread_get_stream(fhp->rhp); in pvr2_v4l2_release()
920 pvr2_ioread_destroy(fhp->rhp); in pvr2_v4l2_release()
921 fhp->rhp = NULL; in pvr2_v4l2_release()
1046 if (fh->rhp) return 0; in pvr2_v4l2_iosetup()
1063 if (!fh->rhp) { in pvr2_v4l2_iosetup()
1116 if (!fh->rhp) { in pvr2_v4l2_read()
1131 pvr2_ioread_avail(fh->rhp) >= 0); in pvr2_v4l2_read()
1150 if (!fh->rhp) { in pvr2_v4l2_poll()
[all …]
/linux/include/linux/
A Drcupdate_trace.h87 void call_rcu_tasks_trace(struct rcu_head *rhp, rcu_callback_t func);
95 static inline void call_rcu_tasks_trace(struct rcu_head *rhp, rcu_callback_t func) { BUG(); } in call_rcu_tasks_trace() argument
A Drcupdate.h976 static inline void rcu_head_init(struct rcu_head *rhp) in rcu_head_init() argument
978 rhp->func = (rcu_callback_t)~0L; in rcu_head_init()
995 rcu_head_after_call_rcu(struct rcu_head *rhp, rcu_callback_t f) in rcu_head_after_call_rcu() argument
997 rcu_callback_t func = READ_ONCE(rhp->func); in rcu_head_after_call_rcu()
/linux/include/linux/sched/
A Dmm.h57 static inline void __mmdrop_delayed(struct rcu_head *rhp) in __mmdrop_delayed() argument
59 struct mm_struct *mm = container_of(rhp, struct mm_struct, delayed_drop); in __mmdrop_delayed()
/linux/tools/testing/selftests/rcutorture/formal/srcu-cbmc/src/
A Dmisc.h33 #define trace_rcu_torture_read(rcutorturename, rhp, secs, c_old, c) \ argument

Completed in 76 milliseconds

12