Lines Matching refs:sreq
508 struct safexcel_cipher_req *sreq, in safexcel_context_control() argument
530 (sreq->direction == SAFEXCEL_ENCRYPT ? in safexcel_context_control()
545 if (sreq->direction == SAFEXCEL_ENCRYPT && in safexcel_context_control()
550 else if (sreq->direction == SAFEXCEL_ENCRYPT) in safexcel_context_control()
560 if (sreq->direction == SAFEXCEL_ENCRYPT) in safexcel_context_control()
613 struct safexcel_cipher_req *sreq, in safexcel_handle_req_result() argument
624 if (unlikely(!sreq->rdescs)) in safexcel_handle_req_result()
627 while (sreq->rdescs--) { in safexcel_handle_req_result()
645 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL); in safexcel_handle_req_result()
647 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE); in safexcel_handle_req_result()
648 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE); in safexcel_handle_req_result()
655 (sreq->direction == SAFEXCEL_ENCRYPT)) { in safexcel_handle_req_result()
657 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv, in safexcel_handle_req_result()
669 struct safexcel_cipher_req *sreq, in safexcel_send_req() argument
691 sreq->nr_src = sg_nents_for_len(src, totlen_src); in safexcel_send_req()
698 if (sreq->direction == SAFEXCEL_DECRYPT) in safexcel_send_req()
710 (sreq->direction == SAFEXCEL_DECRYPT)) { in safexcel_send_req()
716 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv, in safexcel_send_req()
722 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst); in safexcel_send_req()
732 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst); in safexcel_send_req()
733 sreq->nr_dst = sreq->nr_src; in safexcel_send_req()
735 (sreq->nr_src <= 0))) { in safexcel_send_req()
740 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL); in safexcel_send_req()
742 if (unlikely(totlen_src && (sreq->nr_src <= 0))) { in safexcel_send_req()
747 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE); in safexcel_send_req()
749 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) { in safexcel_send_req()
752 dma_unmap_sg(priv->dev, src, sreq->nr_src, in safexcel_send_req()
756 dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE); in safexcel_send_req()
781 for_each_sg(src, sg, sreq->nr_src, i) { in safexcel_send_req()
808 safexcel_context_control(ctx, base, sreq, first_cdesc); in safexcel_send_req()
811 sreq->direction, cryptlen, in safexcel_send_req()
818 for_each_sg(dst, sg, sreq->nr_dst, i) { in safexcel_send_req()
819 bool last = (i == sreq->nr_dst - 1); in safexcel_send_req()
888 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL); in safexcel_send_req()
890 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE); in safexcel_send_req()
891 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE); in safexcel_send_req()
900 struct safexcel_cipher_req *sreq, in safexcel_handle_inv_result() argument
909 if (unlikely(!sreq->rdescs)) in safexcel_handle_inv_result()
912 while (sreq->rdescs--) { in safexcel_handle_inv_result()
962 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); in safexcel_skcipher_handle_result() local
965 if (sreq->needs_inv) { in safexcel_skcipher_handle_result()
966 sreq->needs_inv = false; in safexcel_skcipher_handle_result()
967 err = safexcel_handle_inv_result(priv, ring, async, sreq, in safexcel_skcipher_handle_result()
971 req->dst, req->cryptlen, sreq, in safexcel_skcipher_handle_result()
985 struct safexcel_cipher_req *sreq = aead_request_ctx(req); in safexcel_aead_handle_result() local
988 if (sreq->needs_inv) { in safexcel_aead_handle_result()
989 sreq->needs_inv = false; in safexcel_aead_handle_result()
990 err = safexcel_handle_inv_result(priv, ring, async, sreq, in safexcel_aead_handle_result()
996 sreq, should_complete, ret); in safexcel_aead_handle_result()
1024 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); in safexcel_skcipher_send() local
1028 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv); in safexcel_skcipher_send()
1030 if (sreq->needs_inv) { in safexcel_skcipher_send()
1042 ret = safexcel_send_req(async, ring, sreq, req->src, in safexcel_skcipher_send()
1047 sreq->rdescs = *results; in safexcel_skcipher_send()
1057 struct safexcel_cipher_req *sreq = aead_request_ctx(req); in safexcel_aead_send() local
1061 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv); in safexcel_aead_send()
1063 if (sreq->needs_inv) in safexcel_aead_send()
1066 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst, in safexcel_aead_send()
1070 sreq->rdescs = *results; in safexcel_aead_send()
1076 struct safexcel_cipher_req *sreq, in safexcel_cipher_exit_inv() argument
1087 sreq->needs_inv = true; in safexcel_cipher_exit_inv()
1111 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); in safexcel_skcipher_exit_inv() local
1120 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result); in safexcel_skcipher_exit_inv()
1126 struct safexcel_cipher_req *sreq = aead_request_ctx(req); in safexcel_aead_exit_inv() local
1135 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result); in safexcel_aead_exit_inv()
1139 struct safexcel_cipher_req *sreq, in safexcel_queue_req() argument
1146 sreq->needs_inv = false; in safexcel_queue_req()
1147 sreq->direction = dir; in safexcel_queue_req()
1151 sreq->needs_inv = true; in safexcel_queue_req()