/linux-6.6.21/crypto/ |
D | skcipher.c | 46 static int skcipher_walk_next(struct skcipher_walk *walk); 48 static inline void skcipher_map_src(struct skcipher_walk *walk) in skcipher_map_src() argument 50 walk->src.virt.addr = scatterwalk_map(&walk->in); in skcipher_map_src() 53 static inline void skcipher_map_dst(struct skcipher_walk *walk) in skcipher_map_dst() argument 55 walk->dst.virt.addr = scatterwalk_map(&walk->out); in skcipher_map_dst() 58 static inline void skcipher_unmap_src(struct skcipher_walk *walk) in skcipher_unmap_src() argument 60 scatterwalk_unmap(walk->src.virt.addr); in skcipher_unmap_src() 63 static inline void skcipher_unmap_dst(struct skcipher_walk *walk) in skcipher_unmap_dst() argument 65 scatterwalk_unmap(walk->dst.virt.addr); in skcipher_unmap_dst() 68 static inline gfp_t skcipher_walk_gfp(struct skcipher_walk *walk) in skcipher_walk_gfp() argument [all …]
|
D | cfb.c | 43 static void crypto_cfb_final(struct skcipher_walk *walk, in crypto_cfb_final() argument 49 u8 *src = walk->src.virt.addr; in crypto_cfb_final() 50 u8 *dst = walk->dst.virt.addr; in crypto_cfb_final() 51 u8 *iv = walk->iv; in crypto_cfb_final() 52 unsigned int nbytes = walk->nbytes; in crypto_cfb_final() 58 static int crypto_cfb_encrypt_segment(struct skcipher_walk *walk, in crypto_cfb_encrypt_segment() argument 62 unsigned int nbytes = walk->nbytes; in crypto_cfb_encrypt_segment() 63 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_segment() 64 u8 *dst = walk->dst.virt.addr; in crypto_cfb_encrypt_segment() 65 u8 *iv = walk->iv; in crypto_cfb_encrypt_segment() [all …]
|
D | cbc.c | 17 static int crypto_cbc_encrypt_segment(struct skcipher_walk *walk, in crypto_cbc_encrypt_segment() argument 22 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_segment() 23 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_segment() 24 u8 *dst = walk->dst.virt.addr; in crypto_cbc_encrypt_segment() 27 u8 *iv = walk->iv; in crypto_cbc_encrypt_segment() 45 static int crypto_cbc_encrypt_inplace(struct skcipher_walk *walk, in crypto_cbc_encrypt_inplace() argument 50 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_inplace() 51 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_inplace() 54 u8 *iv = walk->iv; in crypto_cbc_encrypt_inplace() 68 memcpy(walk->iv, iv, bsize); in crypto_cbc_encrypt_inplace() [all …]
|
D | pcbc.c | 21 struct skcipher_walk *walk, in crypto_pcbc_encrypt_segment() argument 25 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_segment() 26 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_segment() 27 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_encrypt_segment() 28 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_segment() 43 struct skcipher_walk *walk, in crypto_pcbc_encrypt_inplace() argument 47 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_inplace() 48 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_inplace() 49 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_inplace() 68 struct skcipher_walk walk; in crypto_pcbc_encrypt() local [all …]
|
D | xctr.c | 34 static void crypto_xctr_crypt_final(struct skcipher_walk *walk, in crypto_xctr_crypt_final() argument 38 const u8 *src = walk->src.virt.addr; in crypto_xctr_crypt_final() 39 u8 *dst = walk->dst.virt.addr; in crypto_xctr_crypt_final() 40 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_final() 43 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32)); in crypto_xctr_crypt_final() 44 crypto_cipher_encrypt_one(tfm, keystream, walk->iv); in crypto_xctr_crypt_final() 46 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32)); in crypto_xctr_crypt_final() 49 static int crypto_xctr_crypt_segment(struct skcipher_walk *walk, in crypto_xctr_crypt_segment() argument 54 const u8 *src = walk->src.virt.addr; in crypto_xctr_crypt_segment() 55 u8 *dst = walk->dst.virt.addr; in crypto_xctr_crypt_segment() [all …]
|
D | ahash.c | 34 static int hash_walk_next(struct crypto_hash_walk *walk) in hash_walk_next() argument 36 unsigned int alignmask = walk->alignmask; in hash_walk_next() 37 unsigned int offset = walk->offset; in hash_walk_next() 38 unsigned int nbytes = min(walk->entrylen, in hash_walk_next() 41 walk->data = kmap_local_page(walk->pg); in hash_walk_next() 42 walk->data += offset; in hash_walk_next() 51 walk->entrylen -= nbytes; in hash_walk_next() 55 static int hash_walk_new_entry(struct crypto_hash_walk *walk) in hash_walk_new_entry() argument 59 sg = walk->sg; in hash_walk_new_entry() 60 walk->offset = sg->offset; in hash_walk_new_entry() [all …]
|
D | ofb.c | 23 struct skcipher_walk walk; in crypto_ofb_crypt() local 26 err = skcipher_walk_virt(&walk, req, false); in crypto_ofb_crypt() 28 while (walk.nbytes >= bsize) { in crypto_ofb_crypt() 29 const u8 *src = walk.src.virt.addr; in crypto_ofb_crypt() 30 u8 *dst = walk.dst.virt.addr; in crypto_ofb_crypt() 31 u8 * const iv = walk.iv; in crypto_ofb_crypt() 32 unsigned int nbytes = walk.nbytes; in crypto_ofb_crypt() 41 err = skcipher_walk_done(&walk, nbytes); in crypto_ofb_crypt() 44 if (walk.nbytes) { in crypto_ofb_crypt() 45 crypto_cipher_encrypt_one(cipher, walk.iv, walk.iv); in crypto_ofb_crypt() [all …]
|
/linux-6.6.21/include/crypto/ |
D | scatterwalk.h | 29 static inline unsigned int scatterwalk_pagelen(struct scatter_walk *walk) in scatterwalk_pagelen() argument 31 unsigned int len = walk->sg->offset + walk->sg->length - walk->offset; in scatterwalk_pagelen() 32 unsigned int len_this_page = offset_in_page(~walk->offset) + 1; in scatterwalk_pagelen() 36 static inline unsigned int scatterwalk_clamp(struct scatter_walk *walk, in scatterwalk_clamp() argument 39 unsigned int len_this_page = scatterwalk_pagelen(walk); in scatterwalk_clamp() 43 static inline void scatterwalk_advance(struct scatter_walk *walk, in scatterwalk_advance() argument 46 walk->offset += nbytes; in scatterwalk_advance() 49 static inline struct page *scatterwalk_page(struct scatter_walk *walk) in scatterwalk_page() argument 51 return sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); in scatterwalk_page() 59 static inline void scatterwalk_start(struct scatter_walk *walk, in scatterwalk_start() argument [all …]
|
/linux-6.6.21/mm/ |
D | pagewalk.c | 24 unsigned long end, struct mm_walk *walk) in walk_pte_range_inner() argument 26 const struct mm_walk_ops *ops = walk->ops; in walk_pte_range_inner() 30 err = ops->pte_entry(pte, addr, addr + PAGE_SIZE, walk); in walk_pte_range_inner() 42 struct mm_walk *walk) in walk_pte_range() argument 48 if (walk->no_vma) { in walk_pte_range() 55 if (walk->mm == &init_mm || addr >= TASK_SIZE) in walk_pte_range() 60 err = walk_pte_range_inner(pte, addr, end, walk); in walk_pte_range() 61 if (walk->mm != &init_mm && addr < TASK_SIZE) in walk_pte_range() 65 pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in walk_pte_range() 67 err = walk_pte_range_inner(pte, addr, end, walk); in walk_pte_range() [all …]
|
D | ptdump.c | 15 static inline int note_kasan_page_table(struct mm_walk *walk, in note_kasan_page_table() argument 18 struct ptdump_state *st = walk->private; in note_kasan_page_table() 22 walk->action = ACTION_CONTINUE; in note_kasan_page_table() 29 unsigned long next, struct mm_walk *walk) in ptdump_pgd_entry() argument 31 struct ptdump_state *st = walk->private; in ptdump_pgd_entry() 37 return note_kasan_page_table(walk, addr); in ptdump_pgd_entry() 45 walk->action = ACTION_CONTINUE; in ptdump_pgd_entry() 52 unsigned long next, struct mm_walk *walk) in ptdump_p4d_entry() argument 54 struct ptdump_state *st = walk->private; in ptdump_p4d_entry() 60 return note_kasan_page_table(walk, addr); in ptdump_p4d_entry() [all …]
|
D | mapping_dirty_helpers.c | 35 struct mm_walk *walk) in wp_pte() argument 37 struct wp_walk *wpwalk = walk->private; in wp_pte() 41 pte_t old_pte = ptep_modify_prot_start(walk->vma, addr, pte); in wp_pte() 44 ptep_modify_prot_commit(walk->vma, addr, pte, old_pte, ptent); in wp_pte() 90 unsigned long end, struct mm_walk *walk) in clean_record_pte() argument 92 struct wp_walk *wpwalk = walk->private; in clean_record_pte() 97 pgoff_t pgoff = ((addr - walk->vma->vm_start) >> PAGE_SHIFT) + in clean_record_pte() 98 walk->vma->vm_pgoff - cwalk->bitmap_pgoff; in clean_record_pte() 99 pte_t old_pte = ptep_modify_prot_start(walk->vma, addr, pte); in clean_record_pte() 102 ptep_modify_prot_commit(walk->vma, addr, pte, old_pte, ptent); in clean_record_pte() [all …]
|
D | hugetlb_vmemmap.c | 32 struct vmemmap_remap_walk *walk); 93 struct vmemmap_remap_walk *walk) in vmemmap_pte_range() argument 101 if (!walk->reuse_page) { in vmemmap_pte_range() 102 walk->reuse_page = pte_page(ptep_get(pte)); in vmemmap_pte_range() 109 walk->nr_walked++; in vmemmap_pte_range() 113 walk->remap_pte(pte, addr, walk); in vmemmap_pte_range() 114 walk->nr_walked++; in vmemmap_pte_range() 120 struct vmemmap_remap_walk *walk) in vmemmap_pmd_range() argument 134 vmemmap_pte_range(pmd, addr, next, walk); in vmemmap_pmd_range() 142 struct vmemmap_remap_walk *walk) in vmemmap_pud_range() argument [all …]
|
/linux-6.6.21/arch/arm64/crypto/ |
D | sm4-neon-glue.c | 40 struct skcipher_walk walk; in sm4_ecb_do_crypt() local 44 err = skcipher_walk_virt(&walk, req, false); in sm4_ecb_do_crypt() 46 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt() 47 const u8 *src = walk.src.virt.addr; in sm4_ecb_do_crypt() 48 u8 *dst = walk.dst.virt.addr; in sm4_ecb_do_crypt() 60 err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE); in sm4_ecb_do_crypt() 86 struct skcipher_walk walk; in sm4_cbc_encrypt() local 90 err = skcipher_walk_virt(&walk, req, false); in sm4_cbc_encrypt() 92 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_encrypt() 93 const u8 *iv = walk.iv; in sm4_cbc_encrypt() [all …]
|
D | aes-glue.c | 184 struct skcipher_walk walk; in ecb_encrypt() local 187 err = skcipher_walk_virt(&walk, req, false); in ecb_encrypt() 189 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 191 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt() 194 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 204 struct skcipher_walk walk; in ecb_decrypt() local 207 err = skcipher_walk_virt(&walk, req, false); in ecb_decrypt() 209 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 211 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt() 214 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() [all …]
|
D | aes-neonbs-glue.c | 100 struct skcipher_walk walk; in __ecb_crypt() local 103 err = skcipher_walk_virt(&walk, req, false); in __ecb_crypt() 105 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 106 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 108 if (walk.nbytes < walk.total) in __ecb_crypt() 110 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt() 113 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt() 116 err = skcipher_walk_done(&walk, in __ecb_crypt() 117 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 160 struct skcipher_walk walk; in cbc_encrypt() local [all …]
|
D | sm4-ce-glue.c | 116 struct skcipher_walk walk; in sm4_ecb_do_crypt() local 120 err = skcipher_walk_virt(&walk, req, false); in sm4_ecb_do_crypt() 122 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt() 123 const u8 *src = walk.src.virt.addr; in sm4_ecb_do_crypt() 124 u8 *dst = walk.dst.virt.addr; in sm4_ecb_do_crypt() 137 err = skcipher_walk_done(&walk, nbytes); in sm4_ecb_do_crypt() 162 struct skcipher_walk walk; in sm4_cbc_crypt() local 166 err = skcipher_walk_virt(&walk, req, false); in sm4_cbc_crypt() 170 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_crypt() 171 const u8 *src = walk.src.virt.addr; in sm4_cbc_crypt() [all …]
|
D | sm4-ce-ccm-glue.c | 97 struct scatter_walk walk; in ccm_calculate_auth_mac() local 112 scatterwalk_start(&walk, req->src); in ccm_calculate_auth_mac() 115 u32 n = scatterwalk_clamp(&walk, assoclen); in ccm_calculate_auth_mac() 119 scatterwalk_start(&walk, sg_next(walk.sg)); in ccm_calculate_auth_mac() 120 n = scatterwalk_clamp(&walk, assoclen); in ccm_calculate_auth_mac() 123 p = ptr = scatterwalk_map(&walk); in ccm_calculate_auth_mac() 125 scatterwalk_advance(&walk, n); in ccm_calculate_auth_mac() 158 scatterwalk_done(&walk, 0, assoclen); in ccm_calculate_auth_mac() 162 static int ccm_crypt(struct aead_request *req, struct skcipher_walk *walk, in ccm_crypt() argument 172 memcpy(ctr0, walk->iv, SM4_BLOCK_SIZE); in ccm_crypt() [all …]
|
D | aes-ce-ccm-glue.c | 102 struct scatter_walk walk; in ccm_calculate_auth_mac() local 118 scatterwalk_start(&walk, req->src); in ccm_calculate_auth_mac() 121 u32 n = scatterwalk_clamp(&walk, len); in ccm_calculate_auth_mac() 125 scatterwalk_start(&walk, sg_next(walk.sg)); in ccm_calculate_auth_mac() 126 n = scatterwalk_clamp(&walk, len); in ccm_calculate_auth_mac() 129 p = scatterwalk_map(&walk); in ccm_calculate_auth_mac() 141 scatterwalk_advance(&walk, n); in ccm_calculate_auth_mac() 142 scatterwalk_done(&walk, 0, len); in ccm_calculate_auth_mac() 150 struct skcipher_walk walk; in ccm_encrypt() local 163 err = skcipher_walk_aead_encrypt(&walk, req, false); in ccm_encrypt() [all …]
|
D | sm4-ce-gcm-glue.c | 79 struct scatter_walk walk; in gcm_calculate_auth_mac() local 82 scatterwalk_start(&walk, req->src); in gcm_calculate_auth_mac() 85 u32 n = scatterwalk_clamp(&walk, assoclen); in gcm_calculate_auth_mac() 89 scatterwalk_start(&walk, sg_next(walk.sg)); in gcm_calculate_auth_mac() 90 n = scatterwalk_clamp(&walk, assoclen); in gcm_calculate_auth_mac() 93 p = ptr = scatterwalk_map(&walk); in gcm_calculate_auth_mac() 95 scatterwalk_advance(&walk, n); in gcm_calculate_auth_mac() 127 scatterwalk_done(&walk, 0, assoclen); in gcm_calculate_auth_mac() 137 static int gcm_crypt(struct aead_request *req, struct skcipher_walk *walk, in gcm_crypt() argument 152 lengths.b = cpu_to_be64(walk->total * 8); in gcm_crypt() [all …]
|
/linux-6.6.21/arch/arm/crypto/ |
D | aes-ce-glue.c | 171 struct skcipher_walk walk; in ecb_encrypt() local 175 err = skcipher_walk_virt(&walk, req, false); in ecb_encrypt() 177 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 179 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt() 182 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 191 struct skcipher_walk walk; in ecb_decrypt() local 195 err = skcipher_walk_virt(&walk, req, false); in ecb_decrypt() 197 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 199 ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt() 202 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() [all …]
|
D | aes-neonbs-glue.c | 94 struct skcipher_walk walk; in __ecb_crypt() local 97 err = skcipher_walk_virt(&walk, req, false); in __ecb_crypt() 99 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 100 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 102 if (walk.nbytes < walk.total) in __ecb_crypt() 104 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt() 107 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt() 110 err = skcipher_walk_done(&walk, in __ecb_crypt() 111 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 168 struct skcipher_walk walk; in cbc_decrypt() local [all …]
|
/linux-6.6.21/arch/x86/crypto/ |
D | sm4_aesni_avx_glue.c | 43 struct skcipher_walk walk; in ecb_do_crypt() local 47 err = skcipher_walk_virt(&walk, req, false); in ecb_do_crypt() 49 while ((nbytes = walk.nbytes) > 0) { in ecb_do_crypt() 50 const u8 *src = walk.src.virt.addr; in ecb_do_crypt() 51 u8 *dst = walk.dst.virt.addr; in ecb_do_crypt() 69 err = skcipher_walk_done(&walk, nbytes); in ecb_do_crypt() 97 struct skcipher_walk walk; in sm4_cbc_encrypt() local 101 err = skcipher_walk_virt(&walk, req, false); in sm4_cbc_encrypt() 103 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_encrypt() 104 const u8 *iv = walk.iv; in sm4_cbc_encrypt() [all …]
|
D | aesni-intel_glue.c | 295 struct skcipher_walk walk; in ecb_encrypt() local 299 err = skcipher_walk_virt(&walk, req, false); in ecb_encrypt() 301 while ((nbytes = walk.nbytes)) { in ecb_encrypt() 303 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt() 307 err = skcipher_walk_done(&walk, nbytes); in ecb_encrypt() 317 struct skcipher_walk walk; in ecb_decrypt() local 321 err = skcipher_walk_virt(&walk, req, false); in ecb_decrypt() 323 while ((nbytes = walk.nbytes)) { in ecb_decrypt() 325 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt() 329 err = skcipher_walk_done(&walk, nbytes); in ecb_decrypt() [all …]
|
D | des3_ede_glue.c | 69 struct skcipher_walk walk; in ecb_crypt() local 73 err = skcipher_walk_virt(&walk, req, false); in ecb_crypt() 75 while ((nbytes = walk.nbytes)) { in ecb_crypt() 76 u8 *wsrc = walk.src.virt.addr; in ecb_crypt() 77 u8 *wdst = walk.dst.virt.addr; in ecb_crypt() 104 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt() 127 struct skcipher_walk *walk) in __cbc_encrypt() argument 130 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() 131 u64 *src = (u64 *)walk->src.virt.addr; in __cbc_encrypt() 132 u64 *dst = (u64 *)walk->dst.virt.addr; in __cbc_encrypt() [all …]
|
/linux-6.6.21/arch/sparc/crypto/ |
D | aes_glue.c | 223 struct skcipher_walk walk; in ecb_encrypt() local 227 err = skcipher_walk_virt(&walk, req, true); in ecb_encrypt() 232 while ((nbytes = walk.nbytes) != 0) { in ecb_encrypt() 233 ctx->ops->ecb_encrypt(&ctx->key[0], walk.src.virt.addr, in ecb_encrypt() 234 walk.dst.virt.addr, in ecb_encrypt() 236 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 247 struct skcipher_walk walk; in ecb_decrypt() local 251 err = skcipher_walk_virt(&walk, req, true); in ecb_decrypt() 257 while ((nbytes = walk.nbytes) != 0) { in ecb_decrypt() 258 ctx->ops->ecb_decrypt(key_end, walk.src.virt.addr, in ecb_decrypt() [all …]
|