| /linux/drivers/crypto/keembay/ |
| A D | keembay-ocs-hcu-core.c | 117 return rctx->sg_data_total + rctx->buf_cnt; in kmb_get_total_data() 125 if (rctx->sg_data_total > (sizeof(rctx->buffer) - rctx->buf_cnt)) { in flush_sg_to_ocs_buffer() 139 if (rctx->sg_data_offset == rctx->sg->length) { in flush_sg_to_ocs_buffer() 140 rctx->sg = sg_next(rctx->sg); in flush_sg_to_ocs_buffer() 153 rctx->sg, rctx->sg_data_offset, in flush_sg_to_ocs_buffer() 283 rctx->buf_dma_count = rctx->buf_cnt; in kmb_ocs_dma_prepare() 406 rctx->buf_cnt = rctx->blk_sz; in prepare_ipad() 572 memset(rctx, 0, sizeof(*rctx)); in kmb_ocs_hcu_init() 644 if (rctx->sg_data_total <= (sizeof(rctx->buffer) - rctx->buf_cnt)) in kmb_ocs_hcu_update() 732 memcpy(out, rctx, sizeof(*rctx)); in kmb_ocs_hcu_export() [all …]
|
| A D | keembay-ocs-aes-core.c | 246 memset(rctx, 0, sizeof(*rctx)); in ocs_aes_init_rctx() 421 if (rctx->cts_swap && rctx->instruction == OCS_DECRYPT) in kmb_ocs_sk_prepare_inplace() 436 rctx->dst_dma_count, &rctx->dst_dll, in kmb_ocs_sk_prepare_inplace() 445 rctx->src_dll.dma_addr = rctx->dst_dll.dma_addr; in kmb_ocs_sk_prepare_inplace() 471 rctx->src_dma_count, &rctx->src_dll, in kmb_ocs_sk_prepare_notinplace() 486 rctx->dst_dma_count, &rctx->dst_dll, in kmb_ocs_sk_prepare_notinplace() 538 rctx->cts_swap = (rctx->mode == OCS_MODE_CTS && in kmb_ocs_sk_run() 553 rctx->dst_dll.dma_addr, rctx->src_dll.dma_addr, in kmb_ocs_sk_run() 812 rctx->dst_dma_count, &rctx->dst_dll, in kmb_ocs_aead_dma_prepare() 826 rctx->src_dll.dma_addr = rctx->dst_dll.dma_addr; in kmb_ocs_aead_dma_prepare() [all …]
|
| /linux/drivers/crypto/qce/ |
| A D | aead.c | 174 rctx->dst_sg = rctx->dst_tbl.sgl; in qce_aead_prepare_dst_buf() 249 sg_init_one(&rctx->adata_sg, rctx->adata, rctx->assoclen); in qce_aead_ccm_prepare_buf_assoclen() 277 rctx->src_sg = rctx->src_tbl.sgl; in qce_aead_ccm_prepare_buf_assoclen() 291 rctx->dst_nents = rctx->src_nents; in qce_aead_ccm_prepare_buf_assoclen() 292 rctx->dst_sg = rctx->src_sg; in qce_aead_ccm_prepare_buf_assoclen() 324 rctx->src_sg = rctx->dst_sg; in qce_aead_prepare_buf() 354 rctx->dst_nents = rctx->src_nents; in qce_aead_ccm_prepare_buf() 355 rctx->dst_sg = rctx->src_sg; in qce_aead_ccm_prepare_buf() 367 if (!rctx || !rctx->iv) in qce_aead_create_ccm_nonce() 396 memcpy(&rctx->ccm_nonce[0], rctx->iv, rctx->ivsize); in qce_aead_create_ccm_nonce() [all …]
|
| A D | sha.c | 137 memset(rctx, 0, sizeof(*rctx)); in qce_ahash_init() 141 memcpy(rctx->digest, std_iv, sizeof(rctx->digest)); in qce_ahash_init() 151 memcpy(export_state->pending_buf, rctx->buf, rctx->buflen); in qce_ahash_export() 168 memset(rctx, 0, sizeof(*rctx)); in qce_ahash_import() 175 memcpy(rctx->buf, import_state->pending_buf, rctx->buflen); in qce_ahash_import() 200 scatterwalk_map_and_copy(rctx->buf + rctx->buflen, req->src, in qce_ahash_update() 214 if (rctx->buflen) in qce_ahash_update() 215 memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen); in qce_ahash_update() 263 sg_set_buf(rctx->sg, rctx->tmpbuf, rctx->buflen); in qce_ahash_update() 292 memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen); in qce_ahash_final() [all …]
|
| A D | skcipher.c | 50 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done() 51 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_done() 59 memcpy(rctx->iv, result_buf->encr_cntr_iv, rctx->ivsize); in qce_skcipher_done() 77 rctx->iv = req->iv; in qce_skcipher_async_req_handle() 89 rctx->dst_nents = rctx->src_nents; in qce_skcipher_async_req_handle() 104 ret = sg_alloc_table(&rctx->dst_tbl, rctx->dst_nents, gfp); in qce_skcipher_async_req_handle() 116 sg = qce_sgtable_add(&rctx->dst_tbl, &rctx->result_sg, in qce_skcipher_async_req_handle() 124 rctx->dst_sg = rctx->dst_tbl.sgl; in qce_skcipher_async_req_handle() 140 rctx->src_sg = rctx->dst_sg; in qce_skcipher_async_req_handle() 164 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_async_req_handle() [all …]
|
| A D | common.c | 174 auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen, digestsize); in qce_setup_regs_ahash() 177 if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash() 180 qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen); in qce_setup_regs_ahash() 188 if (rctx->first_blk) in qce_setup_regs_ahash() 193 iv_words = (IS_SHA1(rctx->flags) || IS_SHA1_HMAC(rctx->flags)) ? 5 : 8; in qce_setup_regs_ahash() 196 if (rctx->first_blk) in qce_setup_regs_ahash() 204 if (rctx->last_blk) in qce_setup_regs_ahash() 209 if (rctx->first_blk) in qce_setup_regs_ahash() 513 totallen = rctx->cryptlen + rctx->assoclen; in qce_setup_regs_aead() 516 if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags)) in qce_setup_regs_aead() [all …]
|
| /linux/drivers/crypto/ccp/ |
| A D | ccp-crypto-aes-cmac.c | 38 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_aes_cmac_complete() 40 rctx->buf_count = rctx->hash_rem; in ccp_aes_cmac_complete() 90 rctx->hash_cnt = len - rctx->hash_rem; in ccp_do_cmac_update() 102 sg_init_one(&rctx->iv_sg, rctx->iv, sizeof(rctx->iv)); in ccp_do_cmac_update() 116 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); in ccp_do_cmac_update() 137 memset(rctx->pad, 0, sizeof(rctx->pad)); in ccp_do_cmac_update() 156 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_do_cmac_update() 164 rctx->cmd.u.aes.iv = &rctx->iv_sg; in ccp_do_cmac_update() 167 rctx->cmd.u.aes.src_len = rctx->hash_cnt; in ccp_do_cmac_update() 187 memset(rctx, 0, sizeof(*rctx)); in ccp_aes_cmac_init() [all …]
|
| A D | ccp-crypto-sha.c | 43 rctx->buf_count = rctx->hash_rem; in ccp_sha_complete() 87 rctx->hash_cnt = len - rctx->hash_rem; in ccp_do_sha_update() 95 sg_init_one(&rctx->ctx_sg, rctx->ctx, sizeof(rctx->ctx)); in ccp_do_sha_update() 109 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); in ccp_do_sha_update() 124 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); in ccp_do_sha_update() 136 rctx->cmd.u.sha.type = rctx->type; in ccp_do_sha_update() 137 rctx->cmd.u.sha.ctx = &rctx->ctx_sg; in ccp_do_sha_update() 166 rctx->cmd.u.sha.first = rctx->first; in ccp_do_sha_update() 167 rctx->cmd.u.sha.final = rctx->final; in ccp_do_sha_update() 192 memset(rctx, 0, sizeof(*rctx)); in ccp_sha_init() [all …]
|
| A D | ccp-crypto-aes-xts.c | 70 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_complete() 165 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() 167 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_xts_crypt() 168 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_xts_crypt() 170 rctx->cmd.u.xts.type = CCP_AES_TYPE_128; in ccp_aes_xts_crypt() 173 rctx->cmd.u.xts.unit_size = unit_size; in ccp_aes_xts_crypt() 176 rctx->cmd.u.xts.iv = &rctx->iv_sg; in ccp_aes_xts_crypt() 177 rctx->cmd.u.xts.iv_len = AES_BLOCK_SIZE; in ccp_aes_xts_crypt() 178 rctx->cmd.u.xts.src = req->src; in ccp_aes_xts_crypt() 179 rctx->cmd.u.xts.src_len = req->cryptlen; in ccp_aes_xts_crypt() [all …]
|
| A D | ccp-crypto-aes-galois.c | 107 rctx->iv[i + GCM_AES_IV_SIZE] = 0; in ccp_aes_gcm_crypt() 108 rctx->iv[AES_BLOCK_SIZE - 1] = 1; in ccp_aes_gcm_crypt() 111 iv_sg = &rctx->iv_sg; in ccp_aes_gcm_crypt() 116 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_gcm_crypt() 117 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_gcm_crypt() 118 rctx->cmd.engine = CCP_ENGINE_AES; in ccp_aes_gcm_crypt() 122 rctx->cmd.u.aes.action = encrypt; in ccp_aes_gcm_crypt() 125 rctx->cmd.u.aes.iv = iv_sg; in ccp_aes_gcm_crypt() 126 rctx->cmd.u.aes.iv_len = iv_len; in ccp_aes_gcm_crypt() 127 rctx->cmd.u.aes.src = req->src; in ccp_aes_gcm_crypt() [all …]
|
| A D | ccp-crypto-aes.c | 87 iv_sg = &rctx->iv_sg; in ccp_aes_crypt() 92 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_crypt() 97 rctx->cmd.u.aes.action = in ccp_aes_crypt() 101 rctx->cmd.u.aes.iv = iv_sg; in ccp_aes_crypt() 102 rctx->cmd.u.aes.iv_len = iv_len; in ccp_aes_crypt() 103 rctx->cmd.u.aes.src = req->src; in ccp_aes_crypt() 105 rctx->cmd.u.aes.dst = req->dst; in ccp_aes_crypt() 141 req->iv = rctx->rfc3686_info; in ccp_aes_rfc3686_complete() 168 iv = rctx->rfc3686_iv; in ccp_aes_rfc3686_crypt() 178 rctx->rfc3686_info = req->iv; in ccp_aes_rfc3686_crypt() [all …]
|
| A D | ccp-crypto-des3.c | 82 iv_sg = &rctx->iv_sg; in ccp_des3_crypt() 84 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_des3_crypt() 87 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_des3_crypt() 88 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_des3_crypt() 89 rctx->cmd.engine = CCP_ENGINE_DES3; in ccp_des3_crypt() 90 rctx->cmd.u.des3.type = ctx->u.des3.type; in ccp_des3_crypt() 92 rctx->cmd.u.des3.action = (encrypt) in ccp_des3_crypt() 97 rctx->cmd.u.des3.iv = iv_sg; in ccp_des3_crypt() 98 rctx->cmd.u.des3.iv_len = iv_len; in ccp_des3_crypt() 99 rctx->cmd.u.des3.src = req->src; in ccp_des3_crypt() [all …]
|
| /linux/drivers/crypto/cavium/nitrox/ |
| A D | nitrox_aead.c | 166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq() 167 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); in nitrox_set_creq() 175 ret = alloc_src_sglist(&rctx->nkreq, rctx->src, rctx->iv, rctx->ivsize, in nitrox_set_creq() 180 ret = alloc_dst_sglist(&rctx->nkreq, rctx->dst, rctx->ivsize, in nitrox_set_creq() 230 rctx->dstlen = rctx->srclen + aead->authsize; in nitrox_aes_gcm_enc() 235 rctx->src = areq->src; in nitrox_aes_gcm_enc() 236 rctx->dst = areq->dst; in nitrox_aes_gcm_enc() 264 rctx->dstlen = rctx->srclen - aead->authsize; in nitrox_aes_gcm_dec() 405 sg_set_buf(rctx->src, rctx->assoc, assoclen); in nitrox_rfc4106_set_aead_rctx_sglist() 412 sg_set_buf(rctx->dst, rctx->assoc, assoclen); in nitrox_rfc4106_set_aead_rctx_sglist() [all …]
|
| /linux/drivers/crypto/bcm/ |
| A D | cipher.c | 345 rctx->total_sent = rctx->src_sent; in handle_skcipher_req() 348 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); in handle_skcipher_req() 349 rctx->dst_nents = spu_sg_count(rctx->dst_sg, rctx->dst_skip, chunksize); in handle_skcipher_req() 696 rctx->total_todo, rctx->total_sent); in handle_ahash_req() 769 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, in handle_ahash_req() 787 if ((rctx->total_sent == rctx->total_todo) && rctx->is_final) in handle_ahash_req() 1013 if (rctx->is_final && (rctx->total_sent == rctx->total_todo)) in handle_ahash_resp() 2682 rctx->dst_sg = rctx->src_sg; in aead_enqueue() 2683 rctx->dst_skip = rctx->src_skip; in aead_enqueue() 2716 rctx->src_sg, rctx->src_skip); in aead_enqueue() [all …]
|
| /linux/drivers/crypto/allwinner/sun8i-ss/ |
| A D | sun8i-ss-hash.c | 203 int flow = rctx->flow; in sun8i_ss_run_hash_task() 217 v |= rctx->method; in sun8i_ss_run_hash_task() 220 if (!rctx->t_dst[i].addr) in sun8i_ss_run_hash_task() 233 rctx->t_src[i].len, rctx->t_dst[i].len, in sun8i_ss_run_hash_task() 234 rctx->method, rctx->t_src[i].addr, rctx->t_dst[i].addr); in sun8i_ss_run_hash_task() 309 rctx->flow = e; in sun8i_ss_hash_digest() 357 rctx->t_dst[i].addr = 0; in sun8i_ss_hash_run() 358 rctx->t_dst[i].len = 0; in sun8i_ss_hash_run() 385 rctx->t_src[i].len = todo / 4; in sun8i_ss_hash_run() 424 rctx->t_src[i].addr = addr_pad; in sun8i_ss_hash_run() [all …]
|
| A D | sun8i-ss-cipher.c | 125 rctx->keylen = op->keylen; in sun8i_ss_cipher() 136 rctx->ivlen = ivsize; in sun8i_ss_cipher() 138 if (!rctx->biv) { in sun8i_ss_cipher() 153 rctx->p_iv = dma_map_single(ss->dev, rctx->biv, rctx->ivlen, in sun8i_ss_cipher() 245 if (rctx->p_iv) in sun8i_ss_cipher() 246 dma_unmap_single(ss->dev, rctx->p_iv, rctx->ivlen, in sun8i_ss_cipher() 250 if (rctx->biv) { in sun8i_ss_cipher() 259 kfree(rctx->biv); in sun8i_ss_cipher() 291 rctx->op_dir = SS_DECRYPTION; in sun8i_ss_skdecrypt() 298 rctx->flow = e; in sun8i_ss_skdecrypt() [all …]
|
| /linux/drivers/crypto/stm32/ |
| A D | stm32-hash.c | 301 while ((rctx->bufcnt < rctx->buflen) && rctx->total) { in stm32_hash_append_sg() 302 count = min(rctx->sg->length - rctx->offset, rctx->total); in stm32_hash_append_sg() 307 rctx->sg = sg_next(rctx->sg); in stm32_hash_append_sg() 314 scatterwalk_map_and_copy(rctx->buffer + rctx->bufcnt, rctx->sg, in stm32_hash_append_sg() 322 rctx->sg = sg_next(rctx->sg); in stm32_hash_append_sg() 390 (rctx->bufcnt + rctx->total >= rctx->buflen)) { in stm32_hash_update_cpu() 554 rctx->nents = sg_nents(rctx->sg); in stm32_hash_dma_send() 576 rctx->sg, rctx->nents, in stm32_hash_dma_send() 909 if ((rctx->bufcnt + rctx->total < rctx->buflen)) { in stm32_hash_update() 985 memcpy(out, rctx, sizeof(*rctx)); in stm32_hash_export() [all …]
|
| /linux/crypto/ |
| A D | chacha20poly1305.c | 98 u8 tag[sizeof(rctx->tag)]; in poly_verify_tag() 114 sizeof(rctx->tag), 1); in poly_copy_tag() 131 if (rctx->cryptlen == 0) in chacha_decrypt() 145 rctx->cryptlen, creq->iv); in chacha_decrypt() 318 sg_init_one(preq->src, rctx->key, sizeof(rctx->key)); in poly_setkey() 368 rctx->assoclen = req->assoclen; in poly_genkey() 371 if (rctx->assoclen < 8) in poly_genkey() 373 rctx->assoclen -= 8; in poly_genkey() 376 memset(rctx->key, 0, sizeof(rctx->key)); in poly_genkey() 377 sg_init_one(creq->src, rctx->key, sizeof(rctx->key)); in poly_genkey() [all …]
|
| A D | xts.c | 90 le128 t = rctx->t; in xts_xor_tweak() 94 req = &rctx->subreq; in xts_xor_tweak() 113 rctx->t = t; in xts_xor_tweak() 118 gf128mul_x_ble(&rctx->t, &t); in xts_xor_tweak() 152 le128_xor(&b, &rctx->t, &b); in xts_cts_done() 166 struct skcipher_request *subreq = &rctx->subreq; in xts_cts_final() 171 rctx->tail = scatterwalk_ffwd(rctx->sg, req->dst, in xts_cts_final() 178 le128_xor(b, &rctx->t, b); in xts_cts_final() 185 skcipher_request_set_crypt(subreq, rctx->tail, rctx->tail, in xts_cts_final() 193 le128_xor(b, &rctx->t, b); in xts_cts_final() [all …]
|
| /linux/drivers/crypto/allwinner/sun8i-ce/ |
| A D | sun8i-ce-cipher.c | 109 flow = rctx->flow; in sun8i_ce_cipher_prepare() 153 rctx->ivlen = ivsize; in sun8i_ce_cipher_prepare() 170 rctx->addr_iv = dma_map_single(ce->dev, rctx->bounce_iv, rctx->ivlen, in sun8i_ce_cipher_prepare() 237 rctx->nr_sgs = nr_sgs; in sun8i_ce_cipher_prepare() 255 if (rctx->addr_iv) in sun8i_ce_cipher_prepare() 256 dma_unmap_single(ce->dev, rctx->addr_iv, rctx->ivlen, DMA_TO_DEVICE); in sun8i_ce_cipher_prepare() 284 flow = rctx->flow; in sun8i_ce_cipher_run() 304 flow = rctx->flow; in sun8i_ce_cipher_unprepare() 319 dma_unmap_single(ce->dev, rctx->addr_iv, rctx->ivlen, DMA_TO_DEVICE); in sun8i_ce_cipher_unprepare() 349 rctx->flow = e; in sun8i_ce_skdecrypt() [all …]
|
| A D | sun8i-ce-hash.c | 124 rctx->fallback_req.result = areq->result; in sun8i_ce_hash_final() 143 rctx->fallback_req.nbytes = areq->nbytes; in sun8i_ce_hash_update() 144 rctx->fallback_req.src = areq->src; in sun8i_ce_hash_update() 163 rctx->fallback_req.nbytes = areq->nbytes; in sun8i_ce_hash_finup() 164 rctx->fallback_req.src = areq->src; in sun8i_ce_hash_finup() 165 rctx->fallback_req.result = areq->result; in sun8i_ce_hash_finup() 188 rctx->fallback_req.nbytes = areq->nbytes; in sun8i_ce_hash_digest_fb() 189 rctx->fallback_req.src = areq->src; in sun8i_ce_hash_digest_fb() 190 rctx->fallback_req.result = areq->result; in sun8i_ce_hash_digest_fb() 244 rctx->flow = e; in sun8i_ce_hash_digest() [all …]
|
| /linux/drivers/crypto/ |
| A D | sahara.c | 773 if (rctx->last) in sahara_sha_init_hdr() 937 memcpy(rctx->rembuf, rctx->buf, rctx->buf_cnt); in sahara_sha_prepare_request() 956 sg_set_buf(rctx->in_sg_chain, rctx->rembuf, rctx->buf_cnt); in sahara_sha_prepare_request() 960 rctx->total = req->nbytes + rctx->buf_cnt; in sahara_sha_prepare_request() 961 rctx->in_sg = rctx->in_sg_chain; in sahara_sha_prepare_request() 969 rctx->in_sg = rctx->in_sg_chain; in sahara_sha_prepare_request() 971 sg_init_one(rctx->in_sg, rctx->rembuf, rctx->buf_cnt); in sahara_sha_prepare_request() 972 rctx->total = rctx->buf_cnt; in sahara_sha_prepare_request() 1031 memcpy(req->result, rctx->context, rctx->digest_size); in sahara_sha_process() 1109 memset(rctx, 0, sizeof(*rctx)); in sahara_sha_init() [all …]
|
| A D | omap-aes-gcm.c | 46 struct omap_aes_reqctx *rctx; in omap_aes_gcm_done_task() local 77 tag = (u8 *)rctx->auth_tag; in omap_aes_gcm_done_task() 185 struct omap_aes_reqctx *rctx; in omap_aes_gcm_dma_out_callback() local 195 auth_tag = (u32 *)rctx->auth_tag; in omap_aes_gcm_dma_out_callback() 226 rctx->mode &= FLAGS_MODE_MASK; in omap_aes_gcm_prepare_req() 247 memset(rctx->auth_tag, 0, sizeof(rctx->auth_tag)); in omap_aes_gcm_crypt() 250 err = do_encrypt_iv(req, (u32 *)rctx->auth_tag, (u32 *)rctx->iv); in omap_aes_gcm_crypt() 264 dd = omap_aes_find_dev(rctx); in omap_aes_gcm_crypt() 267 rctx->mode = mode; in omap_aes_gcm_crypt() 294 memcpy(rctx->iv + 4, req->iv, 8); in omap_aes_4106gcm_encrypt() [all …]
|
| /linux/drivers/crypto/rockchip/ |
| A D | rk3288_crypto_ahash.c | 102 rctx->fallback_req.nbytes = req->nbytes; in rk_ahash_update() 103 rctx->fallback_req.src = req->src; in rk_ahash_update() 117 rctx->fallback_req.result = req->result; in rk_ahash_final() 132 rctx->fallback_req.nbytes = req->nbytes; in rk_ahash_finup() 133 rctx->fallback_req.src = req->src; in rk_ahash_finup() 198 struct rk_ahash_rctx *rctx; in rk_ahash_start() local 208 rctx = ahash_request_ctx(req); in rk_ahash_start() 209 rctx->mode = 0; in rk_ahash_start() 214 rctx->mode = RK_CRYPTO_HASH_SHA1; in rk_ahash_start() 217 rctx->mode = RK_CRYPTO_HASH_SHA256; in rk_ahash_start() [all …]
|
| /linux/drivers/crypto/gemini/ |
| A D | sl3516-ce-cipher.c | 183 rctx->t_src[i].len = todo; in sl3516_ce_cipher() 205 rctx->t_dst[i].len = todo; in sl3516_ce_cipher() 226 rctx->tqflag |= TQ1_CIPHER; in sl3516_ce_cipher() 232 rctx->h = &ecb->cipher; in sl3516_ce_cipher() 234 rctx->tqflag |= TQ4_KEY0; in sl3516_ce_cipher() 235 rctx->tqflag |= TQ5_KEY4; in sl3516_ce_cipher() 236 rctx->tqflag |= TQ6_KEY6; in sl3516_ce_cipher() 241 rctx->nr_sgs = nr_sgs; in sl3516_ce_cipher() 242 rctx->nr_sgd = nr_sgd; in sl3516_ce_cipher() 280 rctx->op_dir = CE_DECRYPTION; in sl3516_ce_skdecrypt() [all …]
|