Home
last modified time | relevance | path

Searched refs:sreq (Results 1 – 17 of 17) sorted by relevance

/linux-6.6.21/include/trace/events/
Dnetfs.h188 TP_PROTO(struct netfs_io_subrequest *sreq,
191 TP_ARGS(sreq, what),
206 __entry->rreq = sreq->rreq->debug_id;
207 __entry->index = sreq->debug_index;
208 __entry->error = sreq->error;
209 __entry->flags = sreq->flags;
210 __entry->source = sreq->source;
212 __entry->len = sreq->len;
213 __entry->transferred = sreq->transferred;
214 __entry->start = sreq->start;
[all …]
/linux-6.6.21/fs/nfs/
Dfscache.c283 static struct nfs_netfs_io_data *nfs_netfs_alloc(struct netfs_io_subrequest *sreq) in nfs_netfs_alloc() argument
290 netfs->sreq = sreq; in nfs_netfs_alloc()
295 static bool nfs_netfs_clamp_length(struct netfs_io_subrequest *sreq) in nfs_netfs_clamp_length() argument
297 size_t rsize = NFS_SB(sreq->rreq->inode->i_sb)->rsize; in nfs_netfs_clamp_length()
299 sreq->len = min(sreq->len, rsize); in nfs_netfs_clamp_length()
303 static void nfs_netfs_issue_read(struct netfs_io_subrequest *sreq) in nfs_netfs_issue_read() argument
307 struct inode *inode = sreq->rreq->inode; in nfs_netfs_issue_read()
308 struct nfs_open_context *ctx = sreq->rreq->netfs_priv; in nfs_netfs_issue_read()
311 pgoff_t start = (sreq->start + sreq->transferred) >> PAGE_SHIFT; in nfs_netfs_issue_read()
312 pgoff_t last = ((sreq->start + sreq->len - in nfs_netfs_issue_read()
[all …]
Dfscache.h46 struct netfs_io_subrequest *sreq; member
77 final_len = min_t(s64, netfs->sreq->len, atomic64_read(&netfs->transferred)); in nfs_netfs_put()
78 netfs_subreq_terminated(netfs->sreq, netfs->error ?: final_len, false); in nfs_netfs_put()
/linux-6.6.21/drivers/crypto/marvell/cesa/
Dcipher.c86 struct mv_cesa_skcipher_std_req *sreq = &creq->std; in mv_cesa_skcipher_std_step() local
88 size_t len = min_t(size_t, req->cryptlen - sreq->offset, in mv_cesa_skcipher_std_step()
91 mv_cesa_adjust_op(engine, &sreq->op); in mv_cesa_skcipher_std_step()
93 memcpy(engine->sram_pool, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step()
95 memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step()
99 sreq->offset); in mv_cesa_skcipher_std_step()
101 sreq->size = len; in mv_cesa_skcipher_std_step()
102 mv_cesa_set_crypt_op_len(&sreq->op, len); in mv_cesa_skcipher_std_step()
105 if (!sreq->skip_ctx) { in mv_cesa_skcipher_std_step()
107 memcpy(engine->sram_pool, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step()
[all …]
Dhash.c161 struct mv_cesa_ahash_std_req *sreq = &creq->req.std; in mv_cesa_ahash_std_step() local
178 if (!sreq->offset) { in mv_cesa_ahash_std_step()
194 len = min_t(size_t, req->nbytes + creq->cache_ptr - sreq->offset, in mv_cesa_ahash_std_step()
203 sreq->offset += mv_cesa_sg_copy_to_sram( in mv_cesa_ahash_std_step()
206 len - creq->cache_ptr, sreq->offset); in mv_cesa_ahash_std_step()
212 if (creq->last_req && sreq->offset == req->nbytes && in mv_cesa_ahash_std_step()
287 struct mv_cesa_ahash_std_req *sreq = &creq->req.std; in mv_cesa_ahash_std_process() local
289 if (sreq->offset < (req->nbytes - creq->cache_ptr)) in mv_cesa_ahash_std_process()
306 struct mv_cesa_ahash_std_req *sreq = &creq->req.std; in mv_cesa_ahash_std_prepare() local
308 sreq->offset = 0; in mv_cesa_ahash_std_prepare()
/linux-6.6.21/drivers/crypto/inside-secure/
Dsafexcel_cipher.c507 struct safexcel_cipher_req *sreq, in safexcel_context_control() argument
529 (sreq->direction == SAFEXCEL_ENCRYPT ? in safexcel_context_control()
544 if (sreq->direction == SAFEXCEL_ENCRYPT && in safexcel_context_control()
549 else if (sreq->direction == SAFEXCEL_ENCRYPT) in safexcel_context_control()
559 if (sreq->direction == SAFEXCEL_ENCRYPT) in safexcel_context_control()
612 struct safexcel_cipher_req *sreq, in safexcel_handle_req_result() argument
623 if (unlikely(!sreq->rdescs)) in safexcel_handle_req_result()
626 while (sreq->rdescs--) { in safexcel_handle_req_result()
644 if (sreq->nr_src > 0) in safexcel_handle_req_result()
645 dma_unmap_sg(priv->dev, src, sreq->nr_src, in safexcel_handle_req_result()
[all …]
Dsafexcel_hash.c234 struct safexcel_ahash_req *sreq = ahash_request_ctx_dma(areq); in safexcel_handle_req_result() local
251 if (sreq->nents) { in safexcel_handle_req_result()
252 dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE); in safexcel_handle_req_result()
253 sreq->nents = 0; in safexcel_handle_req_result()
256 if (sreq->result_dma) { in safexcel_handle_req_result()
257 dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz, in safexcel_handle_req_result()
259 sreq->result_dma = 0; in safexcel_handle_req_result()
262 if (sreq->cache_dma) { in safexcel_handle_req_result()
263 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz, in safexcel_handle_req_result()
265 sreq->cache_dma = 0; in safexcel_handle_req_result()
[all …]
/linux-6.6.21/drivers/net/wireless/mediatek/mt76/
Dmt76_connac_mcu.c1638 struct cfg80211_scan_request *sreq = &scan_req->req; in mt76_connac_mcu_hw_scan() local
1640 int ext_channels_num = max_t(int, sreq->n_channels - 32, 0); in mt76_connac_mcu_hw_scan()
1641 struct ieee80211_channel **scan_list = sreq->channels; in mt76_connac_mcu_hw_scan()
1661 req->scan_type = sreq->n_ssids ? 1 : 0; in mt76_connac_mcu_hw_scan()
1662 req->probe_req_num = sreq->n_ssids ? 2 : 0; in mt76_connac_mcu_hw_scan()
1665 for (i = 0; i < sreq->n_ssids; i++) { in mt76_connac_mcu_hw_scan()
1666 if (!sreq->ssids[i].ssid_len) in mt76_connac_mcu_hw_scan()
1669 req->ssids[i].ssid_len = cpu_to_le32(sreq->ssids[i].ssid_len); in mt76_connac_mcu_hw_scan()
1670 memcpy(req->ssids[i].ssid, sreq->ssids[i].ssid, in mt76_connac_mcu_hw_scan()
1671 sreq->ssids[i].ssid_len); in mt76_connac_mcu_hw_scan()
[all …]
Dmt76_connac_mcu.h1862 struct cfg80211_sched_scan_request *sreq);
/linux-6.6.21/drivers/crypto/hisilicon/sec2/
Dsec_crypto.c2010 struct sec_req *sreq) in sec_skcipher_cryptlen_check() argument
2012 u32 cryptlen = sreq->c_req.sk_req->cryptlen; in sec_skcipher_cryptlen_check()
2046 static int sec_skcipher_param_check(struct sec_ctx *ctx, struct sec_req *sreq) in sec_skcipher_param_check() argument
2048 struct skcipher_request *sk_req = sreq->c_req.sk_req; in sec_skcipher_param_check()
2057 sreq->c_req.c_len = sk_req->cryptlen; in sec_skcipher_param_check()
2060 sreq->use_pbuf = true; in sec_skcipher_param_check()
2062 sreq->use_pbuf = false; in sec_skcipher_param_check()
2071 return sec_skcipher_cryptlen_check(ctx, sreq); in sec_skcipher_param_check()
2080 struct skcipher_request *sreq, bool encrypt) in sec_skcipher_soft_crypto() argument
2095 skcipher_request_set_callback(subreq, sreq->base.flags, in sec_skcipher_soft_crypto()
[all …]
/linux-6.6.21/drivers/crypto/marvell/octeontx/
Dotx_cptvf_algs.c147 struct skcipher_request *sreq; in output_iv_copyback() local
153 sreq = container_of(areq, struct skcipher_request, base); in output_iv_copyback()
154 stfm = crypto_skcipher_reqtfm(sreq); in output_iv_copyback()
158 rctx = skcipher_request_ctx_dma(sreq); in output_iv_copyback()
161 start = sreq->cryptlen - ivsize; in output_iv_copyback()
164 scatterwalk_map_and_copy(sreq->iv, sreq->dst, start, in output_iv_copyback()
167 if (sreq->src != sreq->dst) { in output_iv_copyback()
168 scatterwalk_map_and_copy(sreq->iv, sreq->src, in output_iv_copyback()
171 memcpy(sreq->iv, req_info->iv_out, ivsize); in output_iv_copyback()
/linux-6.6.21/drivers/crypto/marvell/octeontx2/
Dotx2_cptvf_algs.c130 struct skcipher_request *sreq; in output_iv_copyback() local
135 sreq = container_of(areq, struct skcipher_request, base); in output_iv_copyback()
136 stfm = crypto_skcipher_reqtfm(sreq); in output_iv_copyback()
140 rctx = skcipher_request_ctx_dma(sreq); in output_iv_copyback()
143 start = sreq->cryptlen - ivsize; in output_iv_copyback()
146 scatterwalk_map_and_copy(sreq->iv, sreq->dst, start, in output_iv_copyback()
149 if (sreq->src != sreq->dst) { in output_iv_copyback()
150 scatterwalk_map_and_copy(sreq->iv, sreq->src, in output_iv_copyback()
153 memcpy(sreq->iv, req_info->iv_out, ivsize); in output_iv_copyback()
/linux-6.6.21/drivers/crypto/intel/qat/qat_common/
Dqat_algs.c683 struct skcipher_request *sreq = qat_req->skcipher_req; in qat_alg_update_iv_ctr_mode() local
688 memcpy(qat_req->iv, sreq->iv, AES_BLOCK_SIZE); in qat_alg_update_iv_ctr_mode()
694 iv_lo += DIV_ROUND_UP(sreq->cryptlen, AES_BLOCK_SIZE); in qat_alg_update_iv_ctr_mode()
704 struct skcipher_request *sreq = qat_req->skcipher_req; in qat_alg_update_iv_cbc_mode() local
705 int offset = sreq->cryptlen - AES_BLOCK_SIZE; in qat_alg_update_iv_cbc_mode()
709 sgl = sreq->dst; in qat_alg_update_iv_cbc_mode()
711 sgl = sreq->src; in qat_alg_update_iv_cbc_mode()
741 struct skcipher_request *sreq = qat_req->skcipher_req; in qat_skcipher_alg_callback() local
752 memcpy(sreq->iv, qat_req->iv, AES_BLOCK_SIZE); in qat_skcipher_alg_callback()
754 skcipher_request_complete(sreq, res); in qat_skcipher_alg_callback()
/linux-6.6.21/drivers/crypto/starfive/
Djh7110-aes.c301 starfive_aes_write_iv(ctx, (void *)cryp->req.sreq->iv); in starfive_aes_hw_init()
349 starfive_aes_get_iv(cryp, (void *)cryp->req.sreq->iv); in starfive_aes_finish_req()
359 crypto_finalize_skcipher_request(cryp->engine, cryp->req.sreq, in starfive_aes_finish_req()
478 cryp->req.sreq = req; in starfive_aes_prepare_req()
Djh7110-cryp.h200 struct skcipher_request *sreq; member
/linux-6.6.21/include/linux/netfilter/
Dnf_conntrack_pptp.h288 struct PptpStartSessionRequest sreq; member
/linux-6.6.21/arch/s390/kernel/
Dperf_cpum_sf.c159 struct hws_lsctl_request_block sreq; in sf_disable() local
161 memset(&sreq, 0, sizeof(sreq)); in sf_disable()
162 return lsctl(&sreq); in sf_disable()