Home
last modified time | relevance | path

Searched refs:dreq (Results 1 – 25 of 28) sorted by relevance

12

/linux/fs/nfs/
A Ddirect.c192 cinfo->dreq = dreq; in nfs_init_cinfo_from_dreq()
201 if (!dreq) in nfs_direct_req_alloc()
219 pnfs_release_ds_info(&dreq->ds_cinfo, dreq->inode); in nfs_direct_req_free()
278 dreq->iocb->ki_complete(dreq->iocb, res); in nfs_direct_complete()
289 struct nfs_direct_req *dreq = hdr->dreq; in nfs_direct_read_completion() local
468 dreq->bytes_left = dreq->max_count = count; in nfs_file_direct_read()
591 struct nfs_direct_req *dreq = data->dreq; in nfs_direct_commit_complete() local
630 struct nfs_direct_req *dreq = cinfo->dreq; in nfs_direct_resched_write() local
701 struct nfs_direct_req *dreq = hdr->dreq; in nfs_direct_write_completion() local
758 struct nfs_direct_req *dreq = hdr->dreq; in nfs_direct_write_reschedule_io() local
[all …]
A Dcache_lib.c71 kfree(dreq); in nfs_cache_defer_req_put()
76 struct nfs_cache_defer_req *dreq; in nfs_dns_cache_revisit() local
80 complete(&dreq->completion); in nfs_dns_cache_revisit()
81 nfs_cache_defer_req_put(dreq); in nfs_dns_cache_revisit()
86 struct nfs_cache_defer_req *dreq; in nfs_dns_cache_defer() local
90 refcount_inc(&dreq->count); in nfs_dns_cache_defer()
92 return &dreq->deferred_req; in nfs_dns_cache_defer()
99 dreq = kzalloc(sizeof(*dreq), GFP_KERNEL); in nfs_cache_defer_req_alloc()
100 if (dreq) { in nfs_cache_defer_req_alloc()
102 refcount_set(&dreq->count, 1); in nfs_cache_defer_req_alloc()
[all …]
A Ddns_resolve.c284 struct nfs_cache_defer_req *dreq) in do_cache_lookup() argument
290 ret = cache_check(cd, &(*item)->h, &dreq->req); in do_cache_lookup()
326 struct nfs_cache_defer_req *dreq; in do_cache_lookup_wait() local
329 dreq = nfs_cache_defer_req_alloc(); in do_cache_lookup_wait()
330 if (!dreq) in do_cache_lookup_wait()
332 ret = do_cache_lookup(cd, key, item, dreq); in do_cache_lookup_wait()
334 ret = nfs_cache_wait_for_upcall(dreq); in do_cache_lookup_wait()
338 nfs_cache_defer_req_put(dreq); in do_cache_lookup_wait()
A Dcache_lib.h24 extern void nfs_cache_defer_req_put(struct nfs_cache_defer_req *dreq);
25 extern int nfs_cache_wait_for_upcall(struct nfs_cache_defer_req *dreq);
A Dwrite.c924 cinfo->dreq = NULL; in nfs_init_cinfo_from_inode()
930 struct nfs_direct_req *dreq) in nfs_init_cinfo() argument
932 if (dreq) in nfs_init_cinfo()
933 nfs_init_cinfo_from_dreq(cinfo, dreq); in nfs_init_cinfo()
1052 if ((ret == max) && !cinfo->dreq) in nfs_scan_commit_list()
1770 data->dreq = cinfo->dreq; in nfs_init_commit()
1795 if (!cinfo->dreq) in nfs_retry_commit()
1897 nfs_init_cinfo(&cinfo, data->inode, data->dreq); in nfs_commit_release_pages()
A Dinternal.h519 struct nfs_direct_req *dreq);
584 struct nfs_direct_req *dreq);
585 extern ssize_t nfs_dreq_bytes_left(struct nfs_direct_req *dreq);
693 if (!cinfo->dreq) { in nfs_mark_page_unstable()
A Dpagelist.c70 hdr->dreq = desc->pg_dreq; in nfs_pgheader_init()
1358 desc->pg_dreq = hdr->dreq; in nfs_pageio_resend()
/linux/net/dccp/
A Dminisocks.c116 newdp->dccps_iss = dreq->dreq_iss; in dccp_create_openreq_child()
117 newdp->dccps_gss = dreq->dreq_gss; in dccp_create_openreq_child()
119 newdp->dccps_isr = dreq->dreq_isr; in dccp_create_openreq_child()
120 newdp->dccps_gsr = dreq->dreq_gsr; in dccp_create_openreq_child()
154 spin_lock_bh(&dreq->dreq_lock); in dccp_check_req()
181 dreq->dreq_iss, dreq->dreq_gss)) { in dccp_check_req()
186 (unsigned long long) dreq->dreq_iss, in dccp_check_req()
191 if (dccp_parse_options(sk, dreq, skb)) in dccp_check_req()
208 spin_unlock_bh(&dreq->dreq_lock); in dccp_check_req()
261 spin_lock_init(&dreq->dreq_lock); in dccp_reqsk_init()
[all …]
A Doptions.c99 if (dreq != NULL && (opt >= DCCPO_MIN_RX_CCID_SPECIFIC || in dccp_parse_options()
125 rc = dccp_feat_parse_options(sk, dreq, mandatory, opt, in dccp_parse_options()
144 if (dreq != NULL) { in dccp_parse_options()
145 dreq->dreq_timestamp_echo = ntohl(opt_val); in dccp_parse_options()
146 dreq->dreq_timestamp_time = dccp_timestamp(); in dccp_parse_options()
353 struct dccp_request_sock *dreq, in dccp_insert_option_timestamp_echo() argument
360 if (dreq != NULL) { in dccp_insert_option_timestamp_echo()
362 tstamp_echo = htonl(dreq->dreq_timestamp_echo); in dccp_insert_option_timestamp_echo()
363 dreq->dreq_timestamp_echo = 0; in dccp_insert_option_timestamp_echo()
596 if (dccp_feat_insert_opts(NULL, dreq, skb)) in dccp_insert_options_rsk()
[all …]
A Doutput.c403 struct dccp_request_sock *dreq; in dccp_make_response() local
422 dreq = dccp_rsk(req); in dccp_make_response()
424 dccp_inc_seqno(&dreq->dreq_gss); in dccp_make_response()
426 DCCP_SKB_CB(skb)->dccpd_seq = dreq->dreq_gss; in dccp_make_response()
429 if (dccp_feat_server_ccid_dependencies(dreq)) in dccp_make_response()
432 if (dccp_insert_options_rsk(dreq, skb)) in dccp_make_response()
444 dccp_hdr_set_seq(dh, dreq->dreq_gss); in dccp_make_response()
445 dccp_hdr_set_ack(dccp_hdr_ack_bits(skb), dreq->dreq_gsr); in dccp_make_response()
446 dccp_hdr_response(skb)->dccph_resp_service = dreq->dreq_service; in dccp_make_response()
A Dipv4.c589 struct dccp_request_sock *dreq; in dccp_v4_conn_request() local
620 dreq = dccp_rsk(req); in dccp_v4_conn_request()
621 if (dccp_parse_options(sk, dreq, skb)) in dccp_v4_conn_request()
641 dreq->dreq_isr = dcb->dccpd_seq; in dccp_v4_conn_request()
642 dreq->dreq_gsr = dreq->dreq_isr; in dccp_v4_conn_request()
643 dreq->dreq_iss = dccp_v4_init_sequence(skb); in dccp_v4_conn_request()
644 dreq->dreq_gss = dreq->dreq_iss; in dccp_v4_conn_request()
645 dreq->dreq_service = service; in dccp_v4_conn_request()
A Dipv6.c320 struct dccp_request_sock *dreq; in dccp_v6_conn_request() local
358 dreq = dccp_rsk(req); in dccp_v6_conn_request()
359 if (dccp_parse_options(sk, dreq, skb)) in dccp_v6_conn_request()
391 dreq->dreq_isr = dcb->dccpd_seq; in dccp_v6_conn_request()
392 dreq->dreq_gsr = dreq->dreq_isr; in dccp_v6_conn_request()
393 dreq->dreq_iss = dccp_v6_init_sequence(skb); in dccp_v6_conn_request()
394 dreq->dreq_gss = dreq->dreq_iss; in dccp_v6_conn_request()
395 dreq->dreq_service = service; in dccp_v6_conn_request()
A Dfeat.c633 int dccp_feat_insert_opts(struct dccp_sock *dp, struct dccp_request_sock *dreq, in dccp_feat_insert_opts() argument
636 struct list_head *fn = dreq ? &dreq->dreq_featneg : &dp->dccps_featneg; in dccp_feat_insert_opts()
1004 int dccp_feat_server_ccid_dependencies(struct dccp_request_sock *dreq) in dccp_feat_server_ccid_dependencies() argument
1006 struct list_head *fn = &dreq->dreq_featneg; in dccp_feat_server_ccid_dependencies()
1404 int dccp_feat_parse_options(struct sock *sk, struct dccp_request_sock *dreq, in dccp_feat_parse_options() argument
1408 struct list_head *fn = dreq ? &dreq->dreq_featneg : &dp->dccps_featneg; in dccp_feat_parse_options()
A Ddccp.h460 int dccp_feat_server_ccid_dependencies(struct dccp_request_sock *dreq);
/linux/drivers/crypto/marvell/cesa/
A Dtdma.c37 void mv_cesa_dma_step(struct mv_cesa_req *dreq) in mv_cesa_dma_step() argument
39 struct mv_cesa_engine *engine = dreq->engine; in mv_cesa_dma_step()
51 writel_relaxed(dreq->chain.first->cur_dma, in mv_cesa_dma_step()
62 for (tdma = dreq->chain.first; tdma;) { in mv_cesa_dma_cleanup()
75 dreq->chain.first = NULL; in mv_cesa_dma_cleanup()
76 dreq->chain.last = NULL; in mv_cesa_dma_cleanup()
97 struct mv_cesa_req *dreq) in mv_cesa_tdma_chain() argument
100 engine->chain.first = dreq->chain.first; in mv_cesa_tdma_chain()
101 engine->chain.last = dreq->chain.last; in mv_cesa_tdma_chain()
106 last->next = dreq->chain.first; in mv_cesa_tdma_chain()
[all …]
A Dcesa.h826 void mv_cesa_dma_step(struct mv_cesa_req *dreq);
828 static inline int mv_cesa_dma_process(struct mv_cesa_req *dreq, in mv_cesa_dma_process() argument
840 void mv_cesa_dma_prepare(struct mv_cesa_req *dreq,
842 void mv_cesa_dma_cleanup(struct mv_cesa_req *dreq);
844 struct mv_cesa_req *dreq);
/linux/net/sunrpc/
A Dcache.c585 hlist_del_init(&dreq->hash); in __unhash_deferred_req()
587 list_del_init(&dreq->recent); in __unhash_deferred_req()
596 INIT_LIST_HEAD(&dreq->recent); in __hash_deferred_req()
605 dreq->item = item; in setup_deferral()
640 setup_deferral(dreq, item, 0); in cache_wait_req()
701 dreq = req->defer(req); in cache_defer_req()
702 if (dreq == NULL) in cache_defer_req()
726 if (dreq->item == item) { in cache_revisit_request()
736 dreq->revisit(dreq, 0); in cache_revisit_request()
750 if (dreq->owner == owner) { in cache_clean_deferred()
[all …]
A Dsvc_xprt.c1159 static void svc_revisit(struct cache_deferred_req *dreq, int too_many) in svc_revisit() argument
1162 container_of(dreq, struct svc_deferred_req, handle); in svc_revisit()
/linux/drivers/s390/block/
A Ddasd_diag.c167 struct dasd_diag_req *dreq; in dasd_start_diag() local
178 dreq = cqr->data; in dasd_start_diag()
183 private->iob.block_count = dreq->block_count; in dasd_start_diag()
185 private->iob.bio_list = dreq->bio; in dasd_start_diag()
512 struct dasd_diag_req *dreq; in dasd_diag_build_cp() local
545 cqr = dasd_smalloc_request(DASD_DIAG_MAGIC, 0, struct_size(dreq, bio, count), in dasd_diag_build_cp()
550 dreq = (struct dasd_diag_req *) cqr->data; in dasd_diag_build_cp()
551 dreq->block_count = count; in dasd_diag_build_cp()
552 dbio = dreq->bio; in dasd_diag_build_cp()
/linux/drivers/dma/
A Dbcm2835-dma.c72 unsigned int dreq; member
659 if (c->dreq != 0) in bcm2835_dma_prep_slave_sg()
660 info |= BCM2835_DMA_PER_MAP(c->dreq); in bcm2835_dma_prep_slave_sg()
733 if (c->dreq != 0) in bcm2835_dma_prep_dma_cyclic()
734 info |= BCM2835_DMA_PER_MAP(c->dreq); in bcm2835_dma_prep_dma_cyclic()
873 to_bcm2835_dma_chan(chan)->dreq = spec->args[0]; in bcm2835_dma_xlate()
/linux/fs/nilfs2/
A Dbtree.c1726 union nilfs_bmap_ptr_req *dreq, in nilfs_btree_prepare_convert_and_insert() argument
1744 ret = nilfs_bmap_prepare_alloc_ptr(btree, dreq, dat); in nilfs_btree_prepare_convert_and_insert()
1751 nreq->bpr_ptr = dreq->bpr_ptr + 1; in nilfs_btree_prepare_convert_and_insert()
1771 nilfs_bmap_abort_alloc_ptr(btree, dreq, dat); in nilfs_btree_prepare_convert_and_insert()
1782 union nilfs_bmap_ptr_req *dreq, in nilfs_btree_commit_convert_and_insert() argument
1802 nilfs_bmap_commit_alloc_ptr(btree, dreq, dat); in nilfs_btree_commit_convert_and_insert()
1824 nilfs_bmap_commit_alloc_ptr(btree, dreq, dat); in nilfs_btree_commit_convert_and_insert()
1838 nilfs_bmap_set_target_v(btree, key, dreq->bpr_ptr); in nilfs_btree_commit_convert_and_insert()
1855 union nilfs_bmap_ptr_req dreq, nreq, *di, *ni; in nilfs_btree_convert_and_insert() local
1860 di = &dreq; in nilfs_btree_convert_and_insert()
[all …]
/linux/Documentation/devicetree/bindings/dma/
A Dst_fdma.txt51 -bit 2-0: Holdoff value, dreq will be masked for
/linux/include/linux/
A Ddccp.h180 extern int dccp_parse_options(struct sock *sk, struct dccp_request_sock *dreq,
A Dnfs_xdr.h1609 struct nfs_direct_req *dreq; member
1650 struct nfs_direct_req *dreq; /* O_DIRECT request */ member
1662 struct nfs_direct_req *dreq; /* O_DIRECT request */ member
/linux/drivers/infiniband/core/
A Dcm_trace.h183 DEFINE_CM_SEND_EVENT(dreq);

Completed in 80 milliseconds

12