Home
last modified time | relevance | path

Searched refs:src_nents (Results 1 – 25 of 29) sorted by relevance

12

/linux-5.19.10/drivers/crypto/caam/
Dcaamhash.c535 int src_nents; member
548 if (edesc->src_nents) in ahash_unmap()
549 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
827 int src_nents, mapped_nents, sec4_sg_bytes, sec4_sg_src_index; in ahash_update_ctx() local
849 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_ctx()
850 if (src_nents < 0) { in ahash_update_ctx()
852 return src_nents; in ahash_update_ctx()
855 if (src_nents) { in ahash_update_ctx()
856 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_ctx()
877 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_ctx()
[all …]
Dcaamalg_qi2.c149 struct scatterlist *dst, int src_nents, in caam_unmap() argument
155 if (src_nents) in caam_unmap()
156 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
160 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
362 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
383 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
384 if (unlikely(src_nents < 0)) { in aead_edesc_alloc()
388 return ERR_PTR(src_nents); in aead_edesc_alloc()
399 if (src_nents) { in aead_edesc_alloc()
400 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc()
[all …]
Dcaamalg_qi2.h111 int src_nents; member
131 int src_nents; member
148 int src_nents; member
Dcaamalg_qi.c798 int src_nents; member
820 int src_nents; member
868 struct scatterlist *dst, int src_nents, in caam_unmap() argument
874 if (src_nents) in caam_unmap()
875 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
879 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
895 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
907 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
946 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
972 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
[all …]
Dcaamalg.c888 int src_nents; member
914 int src_nents; member
927 struct scatterlist *dst, int src_nents, in caam_unmap() argument
933 if (src_nents) in caam_unmap()
934 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
938 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
953 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap()
964 edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1249 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); in init_skcipher_job()
1253 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); in init_skcipher_job()
[all …]
Dcaampkc.h135 int src_nents; member
Dcaampkc.c49 dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE); in rsa_io_unmap()
254 int src_nents, dst_nents; in rsa_edesc_alloc() local
282 src_nents = sg_nents_for_len(req_ctx->fixup_src, in rsa_edesc_alloc()
286 mapped_src_nents = dma_map_sg(dev, req_ctx->fixup_src, src_nents, in rsa_edesc_alloc()
332 edesc->src_nents = src_nents; in rsa_edesc_alloc()
363 dma_unmap_sg(dev, req_ctx->fixup_src, src_nents, DMA_TO_DEVICE); in rsa_edesc_alloc()
/linux-5.19.10/drivers/crypto/qce/
Daead.c49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_aead_done()
239 rctx->src_nents = sg_nents_for_len(req->src, totallen) + 1; in qce_aead_ccm_prepare_buf_assoclen()
241 rctx->src_nents = sg_nents_for_len(req->src, totallen) + 2; in qce_aead_ccm_prepare_buf_assoclen()
244 ret = sg_alloc_table(&rctx->src_tbl, rctx->src_nents, gfp); in qce_aead_ccm_prepare_buf_assoclen()
279 rctx->src_nents = sg_nents_for_len(rctx->src_sg, totallen); in qce_aead_ccm_prepare_buf_assoclen()
289 rctx->dst_nents = rctx->src_nents + 1; in qce_aead_ccm_prepare_buf_assoclen()
291 rctx->dst_nents = rctx->src_nents; in qce_aead_ccm_prepare_buf_assoclen()
316 rctx->src_nents = sg_nents_for_len(req->src, totallen); in qce_aead_prepare_buf()
317 if (rctx->src_nents < 0) { in qce_aead_prepare_buf()
323 rctx->src_nents = rctx->dst_nents - 1; in qce_aead_prepare_buf()
[all …]
Dskcipher.c50 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done()
75 int dst_nents, src_nents, ret; in qce_skcipher_async_req_handle() local
85 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle()
89 rctx->dst_nents = rctx->src_nents; in qce_skcipher_async_req_handle()
90 if (rctx->src_nents < 0) { in qce_skcipher_async_req_handle()
92 return rctx->src_nents; in qce_skcipher_async_req_handle()
133 src_nents = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_skcipher_async_req_handle()
134 if (src_nents < 0) { in qce_skcipher_async_req_handle()
135 ret = src_nents; in qce_skcipher_async_req_handle()
141 src_nents = dst_nents - 1; in qce_skcipher_async_req_handle()
[all …]
Dsha.c53 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done()
93 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ahash_async_req_handle()
94 if (rctx->src_nents < 0) { in qce_ahash_async_req_handle()
96 return rctx->src_nents; in qce_ahash_async_req_handle()
99 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
109 ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents, in qce_ahash_async_req_handle()
127 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
Dcipher.h38 int src_nents; member
Dsha.h50 int src_nents; member
Daead.h30 int src_nents; member
/linux-5.19.10/drivers/crypto/virtio/
Dvirtio_crypto_skcipher_algs.c335 int src_nents, dst_nents; in __virtio_crypto_skcipher_do_req() local
345 src_nents = sg_nents_for_len(req->src, req->cryptlen); in __virtio_crypto_skcipher_do_req()
346 if (src_nents < 0) { in __virtio_crypto_skcipher_do_req()
348 return src_nents; in __virtio_crypto_skcipher_do_req()
354 src_nents, dst_nents); in __virtio_crypto_skcipher_do_req()
357 sg_total = src_nents + dst_nents + 3; in __virtio_crypto_skcipher_do_req()
437 for (sg = req->src; src_nents; sg = sg_next(sg), src_nents--) in __virtio_crypto_skcipher_do_req()
/linux-5.19.10/drivers/crypto/marvell/cesa/
Dcipher.c66 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
69 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
97 len = mv_cesa_sg_copy_to_sram(engine, req->src, creq->src_nents, in mv_cesa_skcipher_std_step()
327 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
339 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
400 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
433 creq->src_nents = sg_nents_for_len(req->src, req->cryptlen); in mv_cesa_skcipher_req_init()
434 if (creq->src_nents < 0) { in mv_cesa_skcipher_req_init()
436 return creq->src_nents; in mv_cesa_skcipher_req_init()
Dhash.c105 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_cleanup()
204 engine, req->src, creq->src_nents, in mv_cesa_ahash_std_step()
427 sg_pcopy_to_buffer(ahashreq->src, creq->src_nents, in mv_cesa_ahash_req_cleanup()
481 sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_cache_req()
646 if (creq->src_nents) { in mv_cesa_ahash_dma_req_init()
647 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_ahash_dma_req_init()
753 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_req_init()
765 creq->src_nents = sg_nents_for_len(req->src, req->nbytes); in mv_cesa_ahash_req_init()
766 if (creq->src_nents < 0) { in mv_cesa_ahash_req_init()
768 return creq->src_nents; in mv_cesa_ahash_req_init()
Dcesa.h573 int src_nents; member
619 int src_nents; member
/linux-5.19.10/drivers/crypto/
Dtalitos.c967 unsigned int src_nents = edesc->src_nents ? : 1; in talitos_sg_unmap() local
977 if (src_nents == 1 || !is_sec1) in talitos_sg_unmap()
978 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in talitos_sg_unmap()
982 } else if (src_nents == 1 || !is_sec1) { in talitos_sg_unmap()
983 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in talitos_sg_unmap()
1220 sg_count = edesc->src_nents ?: 1; in ipsec_esp()
1332 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len; in talitos_edesc_alloc() local
1347 src_nents = sg_nents_for_len(src, src_len); in talitos_edesc_alloc()
1348 if (src_nents < 0) { in talitos_edesc_alloc()
1352 src_nents = (src_nents == 1) ? 0 : src_nents; in talitos_edesc_alloc()
[all …]
Dtalitos.h61 int src_nents; member
Dsa2ul.c1088 int sg_nents, src_nents, dst_nents; in sa_run() local
1159 src_nents = 1; in sa_run()
1165 mapped_sg->sgt.orig_nents = src_nents; in sa_run()
1187 &split_size, &src, &src_nents, gfp_flags); in sa_run()
1189 src_nents = mapped_sg->sgt.nents; in sa_run()
1199 dst_nents = src_nents; in sa_run()
1261 src_nents, DMA_MEM_TO_DEV, in sa_run()
/linux-5.19.10/drivers/crypto/keembay/
Dkeembay-ocs-aes-core.c88 int src_nents; member
385 dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in kmb_ocs_sk_dma_cleanup()
457 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in kmb_ocs_sk_prepare_notinplace()
458 if (rctx->src_nents < 0) in kmb_ocs_sk_prepare_notinplace()
463 rctx->src_nents, DMA_TO_DEVICE); in kmb_ocs_sk_prepare_notinplace()
692 dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in kmb_ocs_aead_dma_cleanup()
733 rctx->src_nents = sg_nents_for_len(req->src, in kmb_ocs_aead_dma_prepare()
735 if (rctx->src_nents < 0) in kmb_ocs_aead_dma_prepare()
761 sg_pcopy_to_buffer(req->src, rctx->src_nents, rctx->in_tag, in kmb_ocs_aead_dma_prepare()
855 rctx->src_nents, DMA_TO_DEVICE); in kmb_ocs_aead_dma_prepare()
[all …]
/linux-5.19.10/drivers/crypto/ccp/
Dccp-dmaengine.c356 unsigned int src_nents, in ccp_create_desc() argument
375 if (!dst_nents || !src_nents) in ccp_create_desc()
392 src_nents--; in ccp_create_desc()
393 if (!src_nents) in ccp_create_desc()
/linux-5.19.10/drivers/crypto/rockchip/
Drk3288_crypto.h213 size_t src_nents; member
/linux-5.19.10/include/linux/
Ddmaengine.h893 struct scatterlist *src_sg, unsigned int src_nents,
1066 struct scatterlist *src_sg, unsigned int src_nents, in dmaengine_prep_dma_memcpy_sg() argument
1073 src_sg, src_nents, in dmaengine_prep_dma_memcpy_sg()
/linux-5.19.10/drivers/crypto/bcm/
Dcipher.c226 rctx->src_nents, chunksize); in spu_skcipher_tx_sg_create()
348 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); in handle_skcipher_req()
446 tx_frag_num += rctx->src_nents; in handle_skcipher_req()
607 rctx->src_nents, new_data_len); in spu_ahash_tx_sg_create()
769 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, in handle_ahash_req()
872 tx_frag_num += rctx->src_nents; in handle_ahash_req()
1218 rctx->src_nents, datalen); in spu_aead_tx_sg_create()
1344 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); in handle_aead_req()
1503 tx_frag_num += rctx->src_nents; in handle_aead_req()
1735 rctx->src_nents = 0; in skcipher_enqueue()
[all …]

12