/linux/drivers/scsi/ |
A D | scsi_bsg.c | 15 struct scsi_request *sreq; in scsi_bsg_sg_io_fn() local 35 sreq = scsi_req(rq); in scsi_bsg_sg_io_fn() 36 sreq->cmd_len = hdr->request_len; in scsi_bsg_sg_io_fn() 37 if (sreq->cmd_len > BLK_MAX_CDB) { in scsi_bsg_sg_io_fn() 38 sreq->cmd = kzalloc(sreq->cmd_len, GFP_KERNEL); in scsi_bsg_sg_io_fn() 39 if (!sreq->cmd) in scsi_bsg_sg_io_fn() 44 if (copy_from_user(sreq->cmd, uptr64(hdr->request), sreq->cmd_len)) in scsi_bsg_sg_io_fn() 47 if (!scsi_cmd_allowed(sreq->cmd, mode)) in scsi_bsg_sg_io_fn() 80 sreq->sense_len); in scsi_bsg_sg_io_fn() 89 hdr->din_resid = sreq->resid_len; in scsi_bsg_sg_io_fn() [all …]
|
/linux/include/trace/events/ |
A D | netfs.h | 183 TP_ARGS(sreq, what), 200 __entry->error = sreq->error; 201 __entry->flags = sreq->flags; 204 __entry->len = sreq->len; 206 __entry->start = sreq->start; 239 __entry->index = sreq ? sreq->debug_index : 0; 241 __entry->flags = sreq ? sreq->flags : 0; 242 __entry->source = sreq ? sreq->source : NETFS_INVALID_READ; 244 __entry->len = sreq ? sreq->len : 0; 245 __entry->transferred = sreq ? sreq->transferred : 0; [all …]
|
/linux/drivers/crypto/marvell/cesa/ |
A D | cipher.c | 93 memcpy(engine->sram_pool, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step() 95 memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step() 101 sreq->size = len; in mv_cesa_skcipher_std_step() 107 memcpy(engine->sram_pool, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step() 109 memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step() 112 memcpy(engine->sram_pool, &sreq->op, sizeof(sreq->op.desc)); in mv_cesa_skcipher_std_step() 114 memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op.desc)); in mv_cesa_skcipher_std_step() 133 sreq->offset); in mv_cesa_skcipher_std_process() 135 sreq->offset += len; in mv_cesa_skcipher_std_process() 181 sreq->size = 0; in mv_cesa_skcipher_std_prepare() [all …]
|
A D | hash.c | 161 struct mv_cesa_ahash_std_req *sreq = &creq->req.std; in mv_cesa_ahash_std_step() local 178 if (!sreq->offset) { in mv_cesa_ahash_std_step() 194 len = min_t(size_t, req->nbytes + creq->cache_ptr - sreq->offset, in mv_cesa_ahash_std_step() 203 sreq->offset += mv_cesa_sg_copy_to_sram( in mv_cesa_ahash_std_step() 206 len - creq->cache_ptr, sreq->offset); in mv_cesa_ahash_std_step() 212 if (creq->last_req && sreq->offset == req->nbytes && in mv_cesa_ahash_std_step() 287 struct mv_cesa_ahash_std_req *sreq = &creq->req.std; in mv_cesa_ahash_std_process() local 289 if (sreq->offset < (req->nbytes - creq->cache_ptr)) in mv_cesa_ahash_std_process() 306 struct mv_cesa_ahash_std_req *sreq = &creq->req.std; in mv_cesa_ahash_std_prepare() local 308 sreq->offset = 0; in mv_cesa_ahash_std_prepare()
|
/linux/drivers/crypto/inside-secure/ |
A D | safexcel_cipher.c | 627 while (sreq->rdescs--) { in safexcel_handle_req_result() 732 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst); in safexcel_send_req() 733 sreq->nr_dst = sreq->nr_src; in safexcel_send_req() 912 while (sreq->rdescs--) { in safexcel_handle_inv_result() 965 if (sreq->needs_inv) { in safexcel_skcipher_handle_result() 988 if (sreq->needs_inv) { in safexcel_aead_handle_result() 1030 if (sreq->needs_inv) { in safexcel_skcipher_send() 1047 sreq->rdescs = *results; in safexcel_skcipher_send() 1063 if (sreq->needs_inv) in safexcel_aead_send() 1087 sreq->needs_inv = true; in safexcel_cipher_exit_inv() [all …]
|
A D | safexcel_hash.c | 251 if (sreq->nents) { in safexcel_handle_req_result() 253 sreq->nents = 0; in safexcel_handle_req_result() 257 dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz, in safexcel_handle_req_result() 263 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz, in safexcel_handle_req_result() 270 if (sreq->hmac && in safexcel_handle_req_result() 273 memcpy(sreq->cache, sreq->state, in safexcel_handle_req_result() 276 memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz); in safexcel_handle_req_result() 278 sreq->len = sreq->block_sz + in safexcel_handle_req_result() 280 sreq->processed = sreq->block_sz; in safexcel_handle_req_result() 281 sreq->hmac = 0; in safexcel_handle_req_result() [all …]
|
/linux/drivers/net/wireless/mediatek/mt76/ |
A D | mt76_connac_mcu.c | 1467 if (!sreq->ssids[i].ssid_len) in mt76_connac_mcu_hw_scan() 1481 if (!sreq->n_ssids) in mt76_connac_mcu_hw_scan() 1510 if (sreq->ie_len > 0) { in mt76_connac_mcu_hw_scan() 1511 memcpy(req->ies, sreq->ie, sreq->ie_len); in mt76_connac_mcu_hw_scan() 1521 sreq->mac_addr_mask); in mt76_connac_mcu_hw_scan() 1589 sreq->mac_addr_mask); in mt76_connac_mcu_sched_scan_req() 1596 req->ssids_num = sreq->n_ssids; in mt76_connac_mcu_sched_scan_req() 1598 ssid = &sreq->ssids[i]; in mt76_connac_mcu_sched_scan_req() 1605 match = &sreq->match_sets[i]; in mt76_connac_mcu_sched_scan_req() 1635 if (sreq->ie_len > 0) { in mt76_connac_mcu_sched_scan_req() [all …]
|
A D | mt76_connac_mcu.h | 1095 struct cfg80211_sched_scan_request *sreq);
|
/linux/drivers/crypto/hisilicon/sec2/ |
A D | sec_crypto.c | 1962 struct sec_req *sreq) in sec_skcipher_cryptlen_ckeck() argument 1964 u32 cryptlen = sreq->c_req.sk_req->cryptlen; in sec_skcipher_cryptlen_ckeck() 2009 sreq->c_req.c_len = sk_req->cryptlen; in sec_skcipher_param_check() 2012 sreq->use_pbuf = true; in sec_skcipher_param_check() 2014 sreq->use_pbuf = false; in sec_skcipher_param_check() 2050 skcipher_request_set_crypt(subreq, sreq->src, sreq->dst, in sec_skcipher_soft_crypto() 2051 sreq->cryptlen, sreq->iv); in sec_skcipher_soft_crypto() 2229 if (sreq->c_req.encrypt) in sec_aead_spec_check() 2230 sreq->c_req.c_len = req->cryptlen; in sec_aead_spec_check() 2276 sreq->use_pbuf = true; in sec_aead_param_check() [all …]
|
/linux/drivers/crypto/marvell/octeontx/ |
A D | otx_cptvf_algs.c | 147 struct skcipher_request *sreq; in output_iv_copyback() local 153 sreq = container_of(areq, struct skcipher_request, base); in output_iv_copyback() 154 stfm = crypto_skcipher_reqtfm(sreq); in output_iv_copyback() 158 rctx = skcipher_request_ctx(sreq); in output_iv_copyback() 161 start = sreq->cryptlen - ivsize; in output_iv_copyback() 164 scatterwalk_map_and_copy(sreq->iv, sreq->dst, start, in output_iv_copyback() 167 if (sreq->src != sreq->dst) { in output_iv_copyback() 168 scatterwalk_map_and_copy(sreq->iv, sreq->src, in output_iv_copyback() 171 memcpy(sreq->iv, req_info->iv_out, ivsize); in output_iv_copyback()
|
/linux/drivers/crypto/marvell/octeontx2/ |
A D | otx2_cptvf_algs.c | 130 struct skcipher_request *sreq; in output_iv_copyback() local 135 sreq = container_of(areq, struct skcipher_request, base); in output_iv_copyback() 136 stfm = crypto_skcipher_reqtfm(sreq); in output_iv_copyback() 140 rctx = skcipher_request_ctx(sreq); in output_iv_copyback() 143 start = sreq->cryptlen - ivsize; in output_iv_copyback() 146 scatterwalk_map_and_copy(sreq->iv, sreq->dst, start, in output_iv_copyback() 149 if (sreq->src != sreq->dst) { in output_iv_copyback() 150 scatterwalk_map_and_copy(sreq->iv, sreq->src, in output_iv_copyback() 153 memcpy(sreq->iv, req_info->iv_out, ivsize); in output_iv_copyback()
|
/linux/drivers/crypto/qat/qat_common/ |
A D | qat_algs.c | 849 struct skcipher_request *sreq = qat_req->skcipher_req; in qat_alg_update_iv_ctr_mode() local 854 memcpy(qat_req->iv, sreq->iv, AES_BLOCK_SIZE); in qat_alg_update_iv_ctr_mode() 860 iv_lo += DIV_ROUND_UP(sreq->cryptlen, AES_BLOCK_SIZE); in qat_alg_update_iv_ctr_mode() 870 struct skcipher_request *sreq = qat_req->skcipher_req; in qat_alg_update_iv_cbc_mode() local 871 int offset = sreq->cryptlen - AES_BLOCK_SIZE; in qat_alg_update_iv_cbc_mode() 875 sgl = sreq->dst; in qat_alg_update_iv_cbc_mode() 877 sgl = sreq->src; in qat_alg_update_iv_cbc_mode() 907 struct skcipher_request *sreq = qat_req->skcipher_req; in qat_skcipher_alg_callback() local 918 memcpy(sreq->iv, qat_req->iv, AES_BLOCK_SIZE); in qat_skcipher_alg_callback() 920 sreq->base.complete(&sreq->base, res); in qat_skcipher_alg_callback()
|
/linux/include/linux/netfilter/ |
A D | nf_conntrack_pptp.h | 288 struct PptpStartSessionRequest sreq; member
|
/linux/arch/s390/kernel/ |
A D | perf_cpum_sf.c | 107 struct hws_lsctl_request_block sreq; in sf_disable() local 109 memset(&sreq, 0, sizeof(sreq)); in sf_disable() 110 return lsctl(&sreq); in sf_disable()
|