Home
last modified time | relevance | path

Searched refs:subreq (Results 1 – 25 of 46) sorted by relevance

12

/linux-5.19.10/fs/netfs/
Dio.c22 static void netfs_clear_unread(struct netfs_io_subrequest *subreq) in netfs_clear_unread() argument
26 iov_iter_xarray(&iter, READ, &subreq->rreq->mapping->i_pages, in netfs_clear_unread()
27 subreq->start + subreq->transferred, in netfs_clear_unread()
28 subreq->len - subreq->transferred); in netfs_clear_unread()
35 struct netfs_io_subrequest *subreq = priv; in netfs_cache_read_terminated() local
37 netfs_subreq_terminated(subreq, transferred_or_error, was_async); in netfs_cache_read_terminated()
45 struct netfs_io_subrequest *subreq, in netfs_read_from_cache() argument
53 subreq->start + subreq->transferred, in netfs_read_from_cache()
54 subreq->len - subreq->transferred); in netfs_read_from_cache()
56 cres->ops->read(cres, subreq->start, &iter, read_hole, in netfs_read_from_cache()
[all …]
Dobjects.c62 struct netfs_io_subrequest *subreq; in netfs_clear_subrequests() local
65 subreq = list_first_entry(&rreq->subrequests, in netfs_clear_subrequests()
67 list_del(&subreq->rreq_link); in netfs_clear_subrequests()
68 netfs_put_subrequest(subreq, was_async, in netfs_clear_subrequests()
113 struct netfs_io_subrequest *subreq; in netfs_alloc_subrequest() local
115 subreq = kzalloc(sizeof(struct netfs_io_subrequest), GFP_KERNEL); in netfs_alloc_subrequest()
116 if (subreq) { in netfs_alloc_subrequest()
117 INIT_LIST_HEAD(&subreq->rreq_link); in netfs_alloc_subrequest()
118 refcount_set(&subreq->ref, 2); in netfs_alloc_subrequest()
119 subreq->rreq = rreq; in netfs_alloc_subrequest()
[all …]
Dbuffered_read.c18 struct netfs_io_subrequest *subreq; in netfs_rreq_unlock_folios() local
29 list_for_each_entry(subreq, &rreq->subrequests, rreq_link) { in netfs_rreq_unlock_folios()
30 __clear_bit(NETFS_SREQ_COPY_TO_CACHE, &subreq->flags); in netfs_rreq_unlock_folios()
40 subreq = list_first_entry(&rreq->subrequests, in netfs_rreq_unlock_folios()
43 subreq_failed = (subreq->error < 0); in netfs_rreq_unlock_folios()
54 if (!subreq) { in netfs_rreq_unlock_folios()
58 if (test_bit(NETFS_SREQ_COPY_TO_CACHE, &subreq->flags)) in netfs_rreq_unlock_folios()
61 if (pgend < iopos + subreq->len) in netfs_rreq_unlock_folios()
64 account += subreq->transferred; in netfs_rreq_unlock_folios()
65 iopos += subreq->len; in netfs_rreq_unlock_folios()
[all …]
/linux-5.19.10/fs/erofs/
Dfscache.c35 static void erofs_fscache_put_subrequest(struct netfs_io_subrequest *subreq) in erofs_fscache_put_subrequest() argument
37 if (!refcount_dec_and_test(&subreq->ref)) in erofs_fscache_put_subrequest()
39 erofs_fscache_put_request(subreq->rreq); in erofs_fscache_put_subrequest()
40 kfree(subreq); in erofs_fscache_put_subrequest()
45 struct netfs_io_subrequest *subreq; in erofs_fscache_clear_subrequests() local
48 subreq = list_first_entry(&rreq->subrequests, in erofs_fscache_clear_subrequests()
50 list_del(&subreq->rreq_link); in erofs_fscache_clear_subrequests()
51 erofs_fscache_put_subrequest(subreq); in erofs_fscache_clear_subrequests()
57 struct netfs_io_subrequest *subreq; in erofs_fscache_rreq_unlock_folios() local
66 subreq = list_first_entry(&rreq->subrequests, in erofs_fscache_rreq_unlock_folios()
[all …]
/linux-5.19.10/crypto/
Dcts.c62 struct skcipher_request subreq; member
102 struct skcipher_request *subreq = &rctx->subreq; in cts_cbc_encrypt() local
121 skcipher_request_set_callback(subreq, req->base.flags & in cts_cbc_encrypt()
124 skcipher_request_set_crypt(subreq, sg, sg, bsize, req->iv); in cts_cbc_encrypt()
125 return crypto_skcipher_encrypt(subreq); in cts_cbc_encrypt()
148 struct skcipher_request *subreq = &rctx->subreq; in crypto_cts_encrypt() local
153 skcipher_request_set_tfm(subreq, ctx->child); in crypto_cts_encrypt()
159 skcipher_request_set_callback(subreq, req->base.flags, in crypto_cts_encrypt()
162 skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes, in crypto_cts_encrypt()
164 return crypto_skcipher_encrypt(subreq); in crypto_cts_encrypt()
[all …]
Dseqiv.c23 struct aead_request *subreq = aead_request_ctx(req); in seqiv_aead_encrypt_complete2() local
33 memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv)); in seqiv_aead_encrypt_complete2()
36 kfree_sensitive(subreq->iv); in seqiv_aead_encrypt_complete2()
52 struct aead_request *subreq = aead_request_ctx(req); in seqiv_aead_encrypt() local
62 aead_request_set_tfm(subreq, ctx->child); in seqiv_aead_encrypt()
95 aead_request_set_callback(subreq, req->base.flags, compl, data); in seqiv_aead_encrypt()
96 aead_request_set_crypt(subreq, req->dst, req->dst, in seqiv_aead_encrypt()
98 aead_request_set_ad(subreq, req->assoclen + ivsize); in seqiv_aead_encrypt()
103 err = crypto_aead_encrypt(subreq); in seqiv_aead_encrypt()
113 struct aead_request *subreq = aead_request_ctx(req); in seqiv_aead_decrypt() local
[all …]
Dechainiv.c30 struct aead_request *subreq = aead_request_ctx(req); in echainiv_encrypt() local
40 aead_request_set_tfm(subreq, ctx->child); in echainiv_encrypt()
59 aead_request_set_callback(subreq, req->base.flags, in echainiv_encrypt()
61 aead_request_set_crypt(subreq, req->dst, req->dst, in echainiv_encrypt()
63 aead_request_set_ad(subreq, req->assoclen); in echainiv_encrypt()
82 return crypto_aead_encrypt(subreq); in echainiv_encrypt()
89 struct aead_request *subreq = aead_request_ctx(req); in echainiv_decrypt() local
97 aead_request_set_tfm(subreq, ctx->child); in echainiv_decrypt()
102 aead_request_set_callback(subreq, req->base.flags, compl, data); in echainiv_decrypt()
103 aead_request_set_crypt(subreq, req->src, req->dst, in echainiv_decrypt()
[all …]
Dxts.c38 struct skcipher_request subreq; member
94 req = &rctx->subreq; in xts_xor_tweak()
166 struct skcipher_request *subreq = &rctx->subreq; in xts_cts_final() local
182 skcipher_request_set_tfm(subreq, ctx->child); in xts_cts_final()
183 skcipher_request_set_callback(subreq, req->base.flags, xts_cts_done, in xts_cts_final()
185 skcipher_request_set_crypt(subreq, rctx->tail, rctx->tail, in xts_cts_final()
188 err = crypt(subreq); in xts_cts_final()
206 rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in xts_encrypt_done()
226 rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in xts_decrypt_done()
245 struct skcipher_request *subreq = &rctx->subreq; in xts_init_crypt() local
[all …]
Dsimd.c66 struct skcipher_request *subreq; in simd_skcipher_encrypt() local
69 subreq = skcipher_request_ctx(req); in simd_skcipher_encrypt()
70 *subreq = *req; in simd_skcipher_encrypt()
78 skcipher_request_set_tfm(subreq, child); in simd_skcipher_encrypt()
80 return crypto_skcipher_encrypt(subreq); in simd_skcipher_encrypt()
87 struct skcipher_request *subreq; in simd_skcipher_decrypt() local
90 subreq = skcipher_request_ctx(req); in simd_skcipher_decrypt()
91 *subreq = *req; in simd_skcipher_decrypt()
99 skcipher_request_set_tfm(subreq, child); in simd_skcipher_decrypt()
101 return crypto_skcipher_decrypt(subreq); in simd_skcipher_decrypt()
[all …]
Dlrw.c54 struct skcipher_request subreq; member
153 req = &rctx->subreq; in lrw_xor_tweak()
215 rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in lrw_crypt_done()
227 struct skcipher_request *subreq = &rctx->subreq; in lrw_init_crypt() local
229 skcipher_request_set_tfm(subreq, ctx->child); in lrw_init_crypt()
230 skcipher_request_set_callback(subreq, req->base.flags, lrw_crypt_done, in lrw_init_crypt()
233 skcipher_request_set_crypt(subreq, req->dst, req->dst, in lrw_init_crypt()
246 struct skcipher_request *subreq = &rctx->subreq; in lrw_encrypt() local
250 crypto_skcipher_encrypt(subreq) ?: in lrw_encrypt()
257 struct skcipher_request *subreq = &rctx->subreq; in lrw_decrypt() local
[all …]
Dessiv.c145 struct skcipher_request *subreq = skcipher_request_ctx(req); in essiv_skcipher_crypt() local
149 skcipher_request_set_tfm(subreq, tctx->u.skcipher); in essiv_skcipher_crypt()
150 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, in essiv_skcipher_crypt()
152 skcipher_request_set_callback(subreq, skcipher_request_flags(req), in essiv_skcipher_crypt()
155 return enc ? crypto_skcipher_encrypt(subreq) : in essiv_skcipher_crypt()
156 crypto_skcipher_decrypt(subreq); in essiv_skcipher_crypt()
183 struct aead_request *subreq = &rctx->aead_req; in essiv_aead_crypt() local
241 aead_request_set_tfm(subreq, tctx->u.aead); in essiv_aead_crypt()
242 aead_request_set_ad(subreq, req->assoclen); in essiv_aead_crypt()
243 aead_request_set_callback(subreq, aead_request_flags(req), in essiv_aead_crypt()
[all …]
Dctr.c25 struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
198 struct skcipher_request *subreq = &rctx->subreq; in crypto_rfc3686_crypt() local
209 skcipher_request_set_tfm(subreq, child); in crypto_rfc3686_crypt()
210 skcipher_request_set_callback(subreq, req->base.flags, in crypto_rfc3686_crypt()
212 skcipher_request_set_crypt(subreq, req->src, req->dst, in crypto_rfc3686_crypt()
215 return crypto_skcipher_encrypt(subreq); in crypto_rfc3686_crypt()
/linux-5.19.10/fs/cachefiles/
Dio.c392 static enum netfs_io_source cachefiles_prepare_read(struct netfs_io_subrequest *subreq, in cachefiles_prepare_read() argument
396 struct netfs_io_request *rreq = subreq->rreq; in cachefiles_prepare_read()
408 _enter("%zx @%llx/%llx", subreq->len, subreq->start, i_size); in cachefiles_prepare_read()
410 if (subreq->start >= i_size) { in cachefiles_prepare_read()
417 __set_bit(NETFS_SREQ_COPY_TO_CACHE, &subreq->flags); in cachefiles_prepare_read()
419 if (!test_bit(NETFS_SREQ_ONDEMAND, &subreq->flags)) in cachefiles_prepare_read()
440 off = vfs_llseek(file, subreq->start, SEEK_DATA); in cachefiles_prepare_read()
452 if (off >= subreq->start + subreq->len) { in cachefiles_prepare_read()
457 if (off > subreq->start) { in cachefiles_prepare_read()
459 subreq->len = off - subreq->start; in cachefiles_prepare_read()
[all …]
/linux-5.19.10/arch/arm/crypto/
Daes-ce-glue.c274 struct skcipher_request subreq; in cts_cbc_encrypt() local
278 skcipher_request_set_tfm(&subreq, tfm); in cts_cbc_encrypt()
279 skcipher_request_set_callback(&subreq, skcipher_request_flags(req), in cts_cbc_encrypt()
289 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_encrypt()
293 err = skcipher_walk_virt(&walk, &subreq, false) ?: in cts_cbc_encrypt()
294 cbc_encrypt_walk(&subreq, &walk); in cts_cbc_encrypt()
301 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt()
304 subreq.cryptlen); in cts_cbc_encrypt()
308 skcipher_request_set_crypt(&subreq, src, dst, in cts_cbc_encrypt()
312 err = skcipher_walk_virt(&walk, &subreq, false); in cts_cbc_encrypt()
[all …]
/linux-5.19.10/fs/afs/
Dfile.c241 struct netfs_io_subrequest *subreq = req->subreq; in afs_fetch_data_notify() local
248 if (subreq) { in afs_fetch_data_notify()
249 __set_bit(NETFS_SREQ_CLEAR_TAIL, &subreq->flags); in afs_fetch_data_notify()
250 netfs_subreq_terminated(subreq, error ?: req->actual_len, false); in afs_fetch_data_notify()
251 req->subreq = NULL; in afs_fetch_data_notify()
299 if (req->subreq) in afs_fetch_data()
300 netfs_subreq_terminated(req->subreq, PTR_ERR(op), false); in afs_fetch_data()
311 static void afs_issue_read(struct netfs_io_subrequest *subreq) in afs_issue_read() argument
313 struct afs_vnode *vnode = AFS_FS_I(subreq->rreq->inode); in afs_issue_read()
318 return netfs_subreq_terminated(subreq, -ENOMEM, false); in afs_issue_read()
[all …]
/linux-5.19.10/arch/arm64/crypto/
Daes-glue.c279 struct skcipher_request subreq; in cts_cbc_encrypt() local
282 skcipher_request_set_tfm(&subreq, tfm); in cts_cbc_encrypt()
283 skcipher_request_set_callback(&subreq, skcipher_request_flags(req), in cts_cbc_encrypt()
293 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_encrypt()
297 err = skcipher_walk_virt(&walk, &subreq, false) ?: in cts_cbc_encrypt()
298 cbc_encrypt_walk(&subreq, &walk); in cts_cbc_encrypt()
305 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt()
308 subreq.cryptlen); in cts_cbc_encrypt()
312 skcipher_request_set_crypt(&subreq, src, dst, in cts_cbc_encrypt()
316 err = skcipher_walk_virt(&walk, &subreq, false); in cts_cbc_encrypt()
[all …]
/linux-5.19.10/drivers/crypto/vmx/
Daes_cbc.c80 struct skcipher_request *subreq = skcipher_request_ctx(req); in p8_aes_cbc_crypt() local
82 *subreq = *req; in p8_aes_cbc_crypt()
83 skcipher_request_set_tfm(subreq, ctx->fallback); in p8_aes_cbc_crypt()
84 return enc ? crypto_skcipher_encrypt(subreq) : in p8_aes_cbc_crypt()
85 crypto_skcipher_decrypt(subreq); in p8_aes_cbc_crypt()
Daes_xts.c91 struct skcipher_request *subreq = skcipher_request_ctx(req); in p8_aes_xts_crypt() local
93 *subreq = *req; in p8_aes_xts_crypt()
94 skcipher_request_set_tfm(subreq, ctx->fallback); in p8_aes_xts_crypt()
95 return enc ? crypto_skcipher_encrypt(subreq) : in p8_aes_xts_crypt()
96 crypto_skcipher_decrypt(subreq); in p8_aes_xts_crypt()
Daes_ctr.c99 struct skcipher_request *subreq = skcipher_request_ctx(req); in p8_aes_ctr_crypt() local
101 *subreq = *req; in p8_aes_ctr_crypt()
102 skcipher_request_set_tfm(subreq, ctx->fallback); in p8_aes_ctr_crypt()
103 return crypto_skcipher_encrypt(subreq); in p8_aes_ctr_crypt()
/linux-5.19.10/fs/9p/
Dvfs_addr.c34 static void v9fs_issue_read(struct netfs_io_subrequest *subreq) in v9fs_issue_read() argument
36 struct netfs_io_request *rreq = subreq->rreq; in v9fs_issue_read()
39 loff_t pos = subreq->start + subreq->transferred; in v9fs_issue_read()
40 size_t len = subreq->len - subreq->transferred; in v9fs_issue_read()
49 __set_bit(NETFS_SREQ_CLEAR_TAIL, &subreq->flags); in v9fs_issue_read()
51 netfs_subreq_terminated(subreq, err ?: total, false); in v9fs_issue_read()
/linux-5.19.10/fs/nfs/
Dwrite.c454 struct nfs_page *subreq = destroy_list; in nfs_destroy_unlinked_subrequests() local
456 destroy_list = (subreq->wb_this_page == old_head) ? in nfs_destroy_unlinked_subrequests()
457 NULL : subreq->wb_this_page; in nfs_destroy_unlinked_subrequests()
460 nfs_page_set_headlock(subreq); in nfs_destroy_unlinked_subrequests()
461 WARN_ON_ONCE(old_head != subreq->wb_head); in nfs_destroy_unlinked_subrequests()
464 subreq->wb_this_page = subreq; in nfs_destroy_unlinked_subrequests()
465 subreq->wb_head = subreq; in nfs_destroy_unlinked_subrequests()
467 clear_bit(PG_REMOVE, &subreq->wb_flags); in nfs_destroy_unlinked_subrequests()
470 if (!kref_read(&subreq->wb_kref)) { in nfs_destroy_unlinked_subrequests()
472 if (test_and_clear_bit(PG_TEARDOWN, &subreq->wb_flags)) { in nfs_destroy_unlinked_subrequests()
[all …]
Dpagelist.c201 nfs_page_group_lock_subreq(struct nfs_page *head, struct nfs_page *subreq) in nfs_page_group_lock_subreq() argument
205 if (!kref_get_unless_zero(&subreq->wb_kref)) in nfs_page_group_lock_subreq()
207 while (!nfs_lock_request(subreq)) { in nfs_page_group_lock_subreq()
209 ret = nfs_wait_on_request(subreq); in nfs_page_group_lock_subreq()
213 nfs_unroll_locks(head, subreq); in nfs_page_group_lock_subreq()
214 nfs_release_request(subreq); in nfs_page_group_lock_subreq()
230 struct nfs_page *subreq; in nfs_page_group_lock_subrequests() local
237 for (subreq = head->wb_this_page; subreq != head; in nfs_page_group_lock_subrequests()
238 subreq = subreq->wb_this_page) { in nfs_page_group_lock_subrequests()
239 ret = nfs_page_group_lock_subreq(head, subreq); in nfs_page_group_lock_subrequests()
[all …]
/linux-5.19.10/arch/x86/crypto/
Daesni-intel_glue.c373 struct skcipher_request subreq; in cts_cbc_encrypt() local
377 skcipher_request_set_tfm(&subreq, tfm); in cts_cbc_encrypt()
378 skcipher_request_set_callback(&subreq, skcipher_request_flags(req), in cts_cbc_encrypt()
388 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_encrypt()
392 err = cbc_encrypt(&subreq); in cts_cbc_encrypt()
399 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt()
402 subreq.cryptlen); in cts_cbc_encrypt()
406 skcipher_request_set_crypt(&subreq, src, dst, in cts_cbc_encrypt()
410 err = skcipher_walk_virt(&walk, &subreq, false); in cts_cbc_encrypt()
429 struct skcipher_request subreq; in cts_cbc_decrypt() local
[all …]
/linux-5.19.10/drivers/crypto/amcc/
Dcrypto4xx_alg.c269 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher.cipher); in crypto4xx_ctr_crypt()
272 skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher.cipher); in crypto4xx_ctr_crypt()
273 skcipher_request_set_callback(subreq, req->base.flags, in crypto4xx_ctr_crypt()
275 skcipher_request_set_crypt(subreq, req->src, req->dst, in crypto4xx_ctr_crypt()
277 ret = encrypt ? crypto_skcipher_encrypt(subreq) in crypto4xx_ctr_crypt()
278 : crypto_skcipher_decrypt(subreq); in crypto4xx_ctr_crypt()
279 skcipher_request_zero(subreq); in crypto4xx_ctr_crypt()
354 struct aead_request *subreq = aead_request_ctx(req); in crypto4xx_aead_fallback() local
356 aead_request_set_tfm(subreq, ctx->sw_cipher.aead); in crypto4xx_aead_fallback()
357 aead_request_set_callback(subreq, req->base.flags, in crypto4xx_aead_fallback()
[all …]
/linux-5.19.10/include/linux/
Dnetfs.h213 bool (*clamp_length)(struct netfs_io_subrequest *subreq);
214 void (*issue_read)(struct netfs_io_subrequest *subreq);
260 enum netfs_io_source (*prepare_read)(struct netfs_io_subrequest *subreq,
287 extern void netfs_get_subrequest(struct netfs_io_subrequest *subreq,
289 extern void netfs_put_subrequest(struct netfs_io_subrequest *subreq,

12