/linux-5.19.10/drivers/crypto/amlogic/ |
D | amlogic-gxl-cipher.c | 28 struct scatterlist *dst_sg = areq->dst; in meson_cipher_need_fallback() local 33 if (sg_nents(src_sg) != sg_nents(dst_sg)) in meson_cipher_need_fallback() 37 if (sg_nents(src_sg) > MAXDESC - 3 || sg_nents(dst_sg) > MAXDESC - 3) in meson_cipher_need_fallback() 40 while (src_sg && dst_sg) { in meson_cipher_need_fallback() 43 if ((dst_sg->length % 16) != 0) in meson_cipher_need_fallback() 45 if (src_sg->length != dst_sg->length) in meson_cipher_need_fallback() 49 if (!IS_ALIGNED(dst_sg->offset, sizeof(u32))) in meson_cipher_need_fallback() 52 dst_sg = sg_next(dst_sg); in meson_cipher_need_fallback() 95 struct scatterlist *dst_sg = areq->dst; in meson_cipher() local 204 dst_sg = areq->dst; in meson_cipher() [all …]
|
/linux-5.19.10/drivers/crypto/ccp/ |
D | ccp-dmaengine.c | 353 struct scatterlist *dst_sg, in ccp_create_desc() argument 372 if (!dst_sg || !src_sg) in ccp_create_desc() 387 dst_len = sg_dma_len(dst_sg); in ccp_create_desc() 410 dst_sg = sg_next(dst_sg); in ccp_create_desc() 411 if (!dst_sg) in ccp_create_desc() 414 dst_len = sg_dma_len(dst_sg); in ccp_create_desc() 434 ccp_pt->dst_dma = sg_dma_address(dst_sg) + dst_offset; in ccp_create_desc() 485 struct scatterlist dst_sg, src_sg; in ccp_prep_dma_memcpy() local 491 sg_init_table(&dst_sg, 1); in ccp_prep_dma_memcpy() 492 sg_dma_address(&dst_sg) = dst; in ccp_prep_dma_memcpy() [all …]
|
/linux-5.19.10/drivers/mailbox/ |
D | bcm-flexrm-mailbox.c | 606 struct scatterlist *src_sg = msg->spu.src, *dst_sg = msg->spu.dst; in flexrm_spu_estimate_nonheader_desc_count() local 608 while (src_sg || dst_sg) { in flexrm_spu_estimate_nonheader_desc_count() 616 while (dst_target && dst_sg) { in flexrm_spu_estimate_nonheader_desc_count() 618 if (dst_sg->length < dst_target) in flexrm_spu_estimate_nonheader_desc_count() 619 dst_target -= dst_sg->length; in flexrm_spu_estimate_nonheader_desc_count() 622 dst_sg = sg_next(dst_sg); in flexrm_spu_estimate_nonheader_desc_count() 665 struct scatterlist *src_sg = msg->spu.src, *dst_sg = msg->spu.dst; in flexrm_spu_write_descs() local 667 while (src_sg || dst_sg) { in flexrm_spu_write_descs() 684 while (dst_target && dst_sg) { in flexrm_spu_write_descs() 685 if (sg_dma_len(dst_sg) & 0xf) in flexrm_spu_write_descs() [all …]
|
D | bcm-pdc-mailbox.c | 274 struct scatterlist *dst_sg; member 627 dma_unmap_sg(dev, rx_ctx->dst_sg, sg_nents(rx_ctx->dst_sg), in pdc_receive_one() 814 static int pdc_rx_list_init(struct pdc_state *pdcs, struct scatterlist *dst_sg, in pdc_rx_list_init() argument 853 rx_ctx->dst_sg = dst_sg; in pdc_rx_list_init()
|
/linux-5.19.10/drivers/crypto/qce/ |
D | skcipher.c | 51 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_done() 124 rctx->dst_sg = rctx->dst_tbl.sgl; in qce_skcipher_async_req_handle() 126 dst_nents = dma_map_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_async_req_handle() 140 rctx->src_sg = rctx->dst_sg; in qce_skcipher_async_req_handle() 145 rctx->dst_sg, dst_nents, in qce_skcipher_async_req_handle() 164 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_async_req_handle()
|
D | aead.c | 51 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_aead_done() 174 rctx->dst_sg = rctx->dst_tbl.sgl; in qce_aead_prepare_dst_buf() 175 rctx->dst_nents = sg_nents_for_len(rctx->dst_sg, totallen) + 1; in qce_aead_prepare_dst_buf() 292 rctx->dst_sg = rctx->src_sg; in qce_aead_ccm_prepare_buf_assoclen() 324 rctx->src_sg = rctx->dst_sg; in qce_aead_prepare_buf() 355 rctx->dst_sg = rctx->src_sg; in qce_aead_ccm_prepare_buf() 452 dst_nents = dma_map_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_aead_async_req_handle() 471 ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, src_nents, rctx->dst_sg, dst_nents, in qce_aead_async_req_handle() 490 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_aead_async_req_handle()
|
D | cipher.h | 42 struct scatterlist *dst_sg; member
|
D | aead.h | 36 struct scatterlist *dst_sg; member
|
/linux-5.19.10/fs/crypto/ |
D | keysetup_v1.c | 54 struct scatterlist src_sg, dst_sg; in derive_key_aes() local 76 sg_init_one(&dst_sg, derived_key, derived_keysize); in derive_key_aes() 77 skcipher_request_set_crypt(req, &src_sg, &dst_sg, derived_keysize, in derive_key_aes()
|
D | fname.c | 157 struct scatterlist src_sg, dst_sg; in fname_decrypt() local 176 sg_init_one(&dst_sg, oname->name, oname->len); in fname_decrypt() 177 skcipher_request_set_crypt(req, &src_sg, &dst_sg, iname->len, &iv); in fname_decrypt()
|
/linux-5.19.10/drivers/dma/ |
D | nbpfaxi.c | 921 struct scatterlist *src_sg, struct scatterlist *dst_sg, in nbpf_prep_sg() argument 934 mem_sg = dst_sg; in nbpf_prep_sg() 967 sg_dma_address(dst_sg), in nbpf_prep_sg() 978 dst_sg = sg_next(dst_sg); in nbpf_prep_sg() 979 mem_sg = direction == DMA_DEV_TO_MEM ? dst_sg : src_sg; in nbpf_prep_sg() 994 struct scatterlist dst_sg; in nbpf_prep_memcpy() local 997 sg_init_table(&dst_sg, 1); in nbpf_prep_memcpy() 1000 sg_dma_address(&dst_sg) = dst; in nbpf_prep_memcpy() 1003 sg_dma_len(&dst_sg) = len; in nbpf_prep_memcpy() 1009 return nbpf_prep_sg(chan, &src_sg, &dst_sg, 1, in nbpf_prep_memcpy()
|
D | ste_dma40.c | 2480 struct scatterlist dst_sg; in d40_prep_memcpy() local 2483 sg_init_table(&dst_sg, 1); in d40_prep_memcpy() 2486 sg_dma_address(&dst_sg) = dst; in d40_prep_memcpy() 2489 sg_dma_len(&dst_sg) = size; in d40_prep_memcpy() 2492 return d40_prep_sg(chan, &src_sg, &dst_sg, 1, in d40_prep_memcpy()
|
/linux-5.19.10/fs/ecryptfs/ |
D | keystore.c | 598 struct scatterlist dst_sg[2]; member 802 s->dst_sg, 2); in ecryptfs_write_tag_70_packet() 828 skcipher_request_set_crypt(s->skcipher_req, s->src_sg, s->dst_sg, in ecryptfs_write_tag_70_packet() 868 struct scatterlist dst_sg[2]; member 997 s->block_aligned_filename_size, s->dst_sg, 2); in ecryptfs_parse_tag_70_packet() 1044 skcipher_request_set_crypt(s->skcipher_req, s->src_sg, s->dst_sg, in ecryptfs_parse_tag_70_packet() 1649 struct scatterlist dst_sg[2]; in decrypt_passphrase_encrypted_session_key() local 1687 dst_sg, 2); in decrypt_passphrase_encrypted_session_key() 1716 skcipher_request_set_crypt(req, src_sg, dst_sg, in decrypt_passphrase_encrypted_session_key() 2182 struct scatterlist dst_sg[2]; in write_tag_3_packet() local [all …]
|
D | crypto.c | 291 struct scatterlist *dst_sg, in crypt_scatterlist() argument 334 skcipher_request_set_crypt(req, src_sg, dst_sg, size, iv); in crypt_scatterlist() 382 struct scatterlist src_sg, dst_sg; in crypt_extent() local 397 sg_init_table(&dst_sg, 1); in crypt_extent() 401 sg_set_page(&dst_sg, dst_page, extent_size, in crypt_extent() 404 rc = crypt_scatterlist(crypt_stat, &dst_sg, &src_sg, extent_size, in crypt_extent()
|
/linux-5.19.10/drivers/dma/xilinx/ |
D | xilinx_dma.c | 2142 struct dma_chan *dchan, struct scatterlist *dst_sg, in xilinx_cdma_prep_memcpy_sg() argument 2156 if (unlikely(!dst_sg || !src_sg)) in xilinx_cdma_prep_memcpy_sg() 2166 dst_avail = sg_dma_len(dst_sg); in xilinx_cdma_prep_memcpy_sg() 2183 dma_dst = sg_dma_address(dst_sg) + sg_dma_len(dst_sg) - in xilinx_cdma_prep_memcpy_sg() 2213 dst_sg = sg_next(dst_sg); in xilinx_cdma_prep_memcpy_sg() 2214 if (dst_sg == NULL) in xilinx_cdma_prep_memcpy_sg() 2217 dst_avail = sg_dma_len(dst_sg); in xilinx_cdma_prep_memcpy_sg()
|
/linux-5.19.10/include/linux/ |
D | dmaengine.h | 892 struct scatterlist *dst_sg, unsigned int dst_nents, 1065 struct scatterlist *dst_sg, unsigned int dst_nents, in dmaengine_prep_dma_memcpy_sg() argument 1072 return chan->device->device_prep_dma_memcpy_sg(chan, dst_sg, dst_nents, in dmaengine_prep_dma_memcpy_sg()
|
/linux-5.19.10/drivers/crypto/bcm/ |
D | cipher.h | 294 struct scatterlist *dst_sg; member
|
D | cipher.c | 161 datalen = spu_msg_sg_add(&sg, &rctx->dst_sg, &rctx->dst_skip, in spu_skcipher_rx_sg_create() 349 rctx->dst_nents = spu_sg_count(rctx->dst_sg, rctx->dst_skip, chunksize); in handle_skcipher_req() 1105 datalen = spu_msg_sg_add(&sg, &rctx->dst_sg, &rctx->dst_skip, in spu_aead_rx_sg_create() 1345 rctx->dst_nents = spu_sg_count(rctx->dst_sg, rctx->dst_skip, chunksize); in handle_aead_req() 1737 rctx->dst_sg = req->dst; in skcipher_enqueue() 1924 rctx->dst_sg = NULL; in ahash_enqueue() 2682 rctx->dst_sg = rctx->src_sg; in aead_enqueue() 2690 if (spu_sg_at_offset(req->dst, req->assoclen, &rctx->dst_sg, in aead_enqueue() 2720 rctx->dst_sg, rctx->dst_skip); in aead_enqueue()
|