Lines Matching refs:walk
171 struct skcipher_walk walk; in ecb_encrypt() local
175 err = skcipher_walk_virt(&walk, req, false); in ecb_encrypt()
177 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
179 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
182 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
191 struct skcipher_walk walk; in ecb_decrypt() local
195 err = skcipher_walk_virt(&walk, req, false); in ecb_decrypt()
197 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
199 ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt()
202 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
208 struct skcipher_walk *walk) in cbc_encrypt_walk() argument
215 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk()
217 ce_aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr, in cbc_encrypt_walk()
219 walk->iv); in cbc_encrypt_walk()
221 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk()
228 struct skcipher_walk walk; in cbc_encrypt() local
231 err = skcipher_walk_virt(&walk, req, false); in cbc_encrypt()
234 return cbc_encrypt_walk(req, &walk); in cbc_encrypt()
238 struct skcipher_walk *walk) in cbc_decrypt_walk() argument
245 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk()
247 ce_aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr, in cbc_decrypt_walk()
249 walk->iv); in cbc_decrypt_walk()
251 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk()
258 struct skcipher_walk walk; in cbc_decrypt() local
261 err = skcipher_walk_virt(&walk, req, false); in cbc_decrypt()
264 return cbc_decrypt_walk(req, &walk); in cbc_decrypt()
275 struct skcipher_walk walk; in cts_cbc_encrypt() local
293 err = skcipher_walk_virt(&walk, &subreq, false) ?: in cts_cbc_encrypt()
294 cbc_encrypt_walk(&subreq, &walk); in cts_cbc_encrypt()
312 err = skcipher_walk_virt(&walk, &subreq, false); in cts_cbc_encrypt()
317 ce_aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in cts_cbc_encrypt()
318 ctx->key_enc, num_rounds(ctx), walk.nbytes, in cts_cbc_encrypt()
319 walk.iv); in cts_cbc_encrypt()
322 return skcipher_walk_done(&walk, 0); in cts_cbc_encrypt()
333 struct skcipher_walk walk; in cts_cbc_decrypt() local
351 err = skcipher_walk_virt(&walk, &subreq, false) ?: in cts_cbc_decrypt()
352 cbc_decrypt_walk(&subreq, &walk); in cts_cbc_decrypt()
370 err = skcipher_walk_virt(&walk, &subreq, false); in cts_cbc_decrypt()
375 ce_aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in cts_cbc_decrypt()
376 ctx->key_dec, num_rounds(ctx), walk.nbytes, in cts_cbc_decrypt()
377 walk.iv); in cts_cbc_decrypt()
380 return skcipher_walk_done(&walk, 0); in cts_cbc_decrypt()
387 struct skcipher_walk walk; in ctr_encrypt() local
390 err = skcipher_walk_virt(&walk, req, false); in ctr_encrypt()
392 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ctr_encrypt()
394 ce_aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ctr_encrypt()
396 walk.iv); in ctr_encrypt()
398 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ctr_encrypt()
400 if (walk.nbytes) { in ctr_encrypt()
402 unsigned int nbytes = walk.nbytes; in ctr_encrypt()
403 u8 *tdst = walk.dst.virt.addr; in ctr_encrypt()
404 u8 *tsrc = walk.src.virt.addr; in ctr_encrypt()
413 blocks, walk.iv); in ctr_encrypt()
416 err = skcipher_walk_done(&walk, 0); in ctr_encrypt()
453 struct skcipher_walk walk; in xts_encrypt() local
458 err = skcipher_walk_virt(&walk, req, false); in xts_encrypt()
460 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_encrypt()
464 skcipher_walk_abort(&walk); in xts_encrypt()
474 err = skcipher_walk_virt(&walk, req, false); in xts_encrypt()
479 for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) { in xts_encrypt()
480 int nbytes = walk.nbytes; in xts_encrypt()
482 if (walk.nbytes < walk.total) in xts_encrypt()
486 ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in xts_encrypt()
487 ctx->key1.key_enc, rounds, nbytes, walk.iv, in xts_encrypt()
490 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_encrypt()
503 err = skcipher_walk_virt(&walk, req, false); in xts_encrypt()
508 ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in xts_encrypt()
509 ctx->key1.key_enc, rounds, walk.nbytes, walk.iv, in xts_encrypt()
513 return skcipher_walk_done(&walk, 0); in xts_encrypt()
525 struct skcipher_walk walk; in xts_decrypt() local
530 err = skcipher_walk_virt(&walk, req, false); in xts_decrypt()
532 if (unlikely(tail > 0 && walk.nbytes < walk.total)) { in xts_decrypt()
536 skcipher_walk_abort(&walk); in xts_decrypt()
546 err = skcipher_walk_virt(&walk, req, false); in xts_decrypt()
551 for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) { in xts_decrypt()
552 int nbytes = walk.nbytes; in xts_decrypt()
554 if (walk.nbytes < walk.total) in xts_decrypt()
558 ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in xts_decrypt()
559 ctx->key1.key_dec, rounds, nbytes, walk.iv, in xts_decrypt()
562 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_decrypt()
575 err = skcipher_walk_virt(&walk, req, false); in xts_decrypt()
580 ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in xts_decrypt()
581 ctx->key1.key_dec, rounds, walk.nbytes, walk.iv, in xts_decrypt()
585 return skcipher_walk_done(&walk, 0); in xts_decrypt()