/linux-5.19.10/net/tls/ |
D | tls_device_fallback.c | 56 struct scatterlist sg_in[3]; in tls_enc_record() local 82 sg_init_table(sg_in, ARRAY_SIZE(sg_in)); in tls_enc_record() 84 sg_set_buf(sg_in, aad, TLS_AAD_SPACE_SIZE); in tls_enc_record() 86 chain_to_walk(sg_in + 1, in); in tls_enc_record() 113 aead_request_set_crypt(aead_req, sg_in, sg_out, len, iv); in tls_enc_record() 141 struct crypto_aead *aead, struct scatterlist *sg_in, in tls_enc_records() argument 148 scatterwalk_start(&in, sg_in); in tls_enc_records() 228 static int fill_sg_in(struct scatterlist *sg_in, in fill_sg_in() argument 275 sg_set_page(sg_in + i, skb_frag_page(frag), in fill_sg_in() 281 sg_in[i].length += remaining; in fill_sg_in() [all …]
|
/linux-5.19.10/security/keys/encrypted-keys/ |
D | encrypted.c | 456 struct scatterlist sg_in[2]; in derived_key_encrypt() local 472 sg_init_table(sg_in, 2); in derived_key_encrypt() 473 sg_set_buf(&sg_in[0], epayload->decrypted_data, in derived_key_encrypt() 475 sg_set_page(&sg_in[1], ZERO_PAGE(0), AES_BLOCK_SIZE, 0); in derived_key_encrypt() 481 skcipher_request_set_crypt(req, sg_in, sg_out, encrypted_datalen, iv); in derived_key_encrypt() 558 struct scatterlist sg_in[1]; in derived_key_decrypt() local 579 sg_init_table(sg_in, 1); in derived_key_decrypt() 581 sg_set_buf(sg_in, epayload->encrypted_data, encrypted_datalen); in derived_key_decrypt() 587 skcipher_request_set_crypt(req, sg_in, sg_out, encrypted_datalen, iv); in derived_key_decrypt()
|
/linux-5.19.10/drivers/firmware/arm_scmi/ |
D | virtio.c | 236 struct scatterlist sg_in; in scmi_vio_feed_vq_rx() local 241 sg_init_one(&sg_in, msg->input, VIRTIO_SCMI_MAX_PDU_SIZE); in scmi_vio_feed_vq_rx() 245 rc = virtqueue_add_inbuf(vioch->vqueue, &sg_in, 1, msg, GFP_ATOMIC); in scmi_vio_feed_vq_rx() 491 struct scatterlist sg_in; in virtio_send_message() local 492 struct scatterlist *sgs[DESCRIPTORS_PER_TX_MSG] = { &sg_out, &sg_in }; in virtio_send_message() 509 sg_init_one(&sg_in, msg->input, msg_response_size(xfer)); in virtio_send_message()
|
/linux-5.19.10/drivers/crypto/qce/ |
D | dma.h | 39 int qce_dma_prep_sgs(struct qce_dma_data *dma, struct scatterlist *sg_in,
|
/linux-5.19.10/drivers/crypto/rockchip/ |
D | rk3288_crypto.c | 155 struct scatterlist *sg_in, *sg_out; in rk_unload_data() local 157 sg_in = dev->aligned ? dev->sg_src : &dev->sg_tmp; in rk_unload_data() 158 dma_unmap_sg(dev->dev, sg_in, 1, DMA_TO_DEVICE); in rk_unload_data()
|
/linux-5.19.10/drivers/md/ |
D | dm-crypt.c | 89 struct scatterlist sg_in[4]; member 533 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_lmk_gen() 683 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_tcw_gen() 982 sg2 = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_elephant() 1324 sg_init_table(dmreq->sg_in, 4); in crypt_convert_block_aead() 1325 sg_set_buf(&dmreq->sg_in[0], sector, sizeof(uint64_t)); in crypt_convert_block_aead() 1326 sg_set_buf(&dmreq->sg_in[1], org_iv, cc->iv_size); in crypt_convert_block_aead() 1327 sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_aead() 1328 sg_set_buf(&dmreq->sg_in[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead() 1354 aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out, in crypt_convert_block_aead() [all …]
|
/linux-5.19.10/include/crypto/ |
D | drbg.h | 134 struct scatterlist sg_in, sg_out; /* CTR mode SGLs */ member
|
/linux-5.19.10/crypto/ |
D | drbg.c | 1812 sg_init_table(&drbg->sg_in, 1); in drbg_init_sym_kernel() 1843 struct scatterlist *sg_in = &drbg->sg_in, *sg_out = &drbg->sg_out; in drbg_kcapi_sym_ctr() local 1849 sg_set_buf(sg_in, inbuf, inlen); in drbg_kcapi_sym_ctr() 1854 sg_set_buf(sg_in, drbg->outscratchpad, scratchpad_use); in drbg_kcapi_sym_ctr() 1861 skcipher_request_set_crypt(drbg->ctr_req, sg_in, sg_out, in drbg_kcapi_sym_ctr()
|