Lines Matching refs:sreq

507 				    struct safexcel_cipher_req *sreq,  in safexcel_context_control()  argument
529 (sreq->direction == SAFEXCEL_ENCRYPT ? in safexcel_context_control()
544 if (sreq->direction == SAFEXCEL_ENCRYPT && in safexcel_context_control()
549 else if (sreq->direction == SAFEXCEL_ENCRYPT) in safexcel_context_control()
559 if (sreq->direction == SAFEXCEL_ENCRYPT) in safexcel_context_control()
612 struct safexcel_cipher_req *sreq, in safexcel_handle_req_result() argument
623 if (unlikely(!sreq->rdescs)) in safexcel_handle_req_result()
626 while (sreq->rdescs--) { in safexcel_handle_req_result()
644 if (sreq->nr_src > 0) in safexcel_handle_req_result()
645 dma_unmap_sg(priv->dev, src, sreq->nr_src, in safexcel_handle_req_result()
648 if (sreq->nr_src > 0) in safexcel_handle_req_result()
649 dma_unmap_sg(priv->dev, src, sreq->nr_src, in safexcel_handle_req_result()
651 if (sreq->nr_dst > 0) in safexcel_handle_req_result()
652 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, in safexcel_handle_req_result()
660 (sreq->direction == SAFEXCEL_ENCRYPT)) { in safexcel_handle_req_result()
662 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv, in safexcel_handle_req_result()
674 struct safexcel_cipher_req *sreq, in safexcel_send_req() argument
696 sreq->nr_src = sg_nents_for_len(src, totlen_src); in safexcel_send_req()
703 if (sreq->direction == SAFEXCEL_DECRYPT) in safexcel_send_req()
715 (sreq->direction == SAFEXCEL_DECRYPT)) { in safexcel_send_req()
721 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv, in safexcel_send_req()
727 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst); in safexcel_send_req()
737 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst); in safexcel_send_req()
738 sreq->nr_dst = sreq->nr_src; in safexcel_send_req()
740 (sreq->nr_src <= 0))) { in safexcel_send_req()
745 if (sreq->nr_src > 0 && in safexcel_send_req()
746 !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL)) in safexcel_send_req()
749 if (unlikely(totlen_src && (sreq->nr_src <= 0))) { in safexcel_send_req()
755 if (sreq->nr_src > 0 && in safexcel_send_req()
756 !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE)) in safexcel_send_req()
759 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) { in safexcel_send_req()
766 if (sreq->nr_dst > 0 && in safexcel_send_req()
767 !dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE)) { in safexcel_send_req()
795 for_each_sg(src, sg, sreq->nr_src, i) { in safexcel_send_req()
822 safexcel_context_control(ctx, base, sreq, first_cdesc); in safexcel_send_req()
825 sreq->direction, cryptlen, in safexcel_send_req()
832 for_each_sg(dst, sg, sreq->nr_dst, i) { in safexcel_send_req()
833 bool last = (i == sreq->nr_dst - 1); in safexcel_send_req()
902 if (sreq->nr_src > 0) in safexcel_send_req()
903 dma_unmap_sg(priv->dev, src, sreq->nr_src, in safexcel_send_req()
906 if (sreq->nr_src > 0) in safexcel_send_req()
907 dma_unmap_sg(priv->dev, src, sreq->nr_src, in safexcel_send_req()
909 if (sreq->nr_dst > 0) in safexcel_send_req()
910 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, in safexcel_send_req()
920 struct safexcel_cipher_req *sreq, in safexcel_handle_inv_result() argument
929 if (unlikely(!sreq->rdescs)) in safexcel_handle_inv_result()
932 while (sreq->rdescs--) { in safexcel_handle_inv_result()
982 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); in safexcel_skcipher_handle_result() local
985 if (sreq->needs_inv) { in safexcel_skcipher_handle_result()
986 sreq->needs_inv = false; in safexcel_skcipher_handle_result()
987 err = safexcel_handle_inv_result(priv, ring, async, sreq, in safexcel_skcipher_handle_result()
991 req->dst, req->cryptlen, sreq, in safexcel_skcipher_handle_result()
1005 struct safexcel_cipher_req *sreq = aead_request_ctx(req); in safexcel_aead_handle_result() local
1008 if (sreq->needs_inv) { in safexcel_aead_handle_result()
1009 sreq->needs_inv = false; in safexcel_aead_handle_result()
1010 err = safexcel_handle_inv_result(priv, ring, async, sreq, in safexcel_aead_handle_result()
1016 sreq, should_complete, ret); in safexcel_aead_handle_result()
1044 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); in safexcel_skcipher_send() local
1048 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv); in safexcel_skcipher_send()
1050 if (sreq->needs_inv) { in safexcel_skcipher_send()
1062 ret = safexcel_send_req(async, ring, sreq, req->src, in safexcel_skcipher_send()
1067 sreq->rdescs = *results; in safexcel_skcipher_send()
1077 struct safexcel_cipher_req *sreq = aead_request_ctx(req); in safexcel_aead_send() local
1081 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv); in safexcel_aead_send()
1083 if (sreq->needs_inv) in safexcel_aead_send()
1086 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst, in safexcel_aead_send()
1090 sreq->rdescs = *results; in safexcel_aead_send()
1096 struct safexcel_cipher_req *sreq, in safexcel_cipher_exit_inv() argument
1106 sreq->needs_inv = true; in safexcel_cipher_exit_inv()
1130 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); in safexcel_skcipher_exit_inv() local
1139 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result); in safexcel_skcipher_exit_inv()
1145 struct safexcel_cipher_req *sreq = aead_request_ctx(req); in safexcel_aead_exit_inv() local
1154 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result); in safexcel_aead_exit_inv()
1158 struct safexcel_cipher_req *sreq, in safexcel_queue_req() argument
1165 sreq->needs_inv = false; in safexcel_queue_req()
1166 sreq->direction = dir; in safexcel_queue_req()
1170 sreq->needs_inv = true; in safexcel_queue_req()