/linux-6.6.21/arch/x86/crypto/ |
D | sm4_aesni_avx_glue.c | 44 unsigned int nbytes; in ecb_do_crypt() local 49 while ((nbytes = walk.nbytes) > 0) { in ecb_do_crypt() 54 while (nbytes >= SM4_CRYPT8_BLOCK_SIZE) { in ecb_do_crypt() 58 nbytes -= SM4_CRYPT8_BLOCK_SIZE; in ecb_do_crypt() 60 while (nbytes >= SM4_BLOCK_SIZE) { in ecb_do_crypt() 61 unsigned int nblocks = min(nbytes >> 4, 4u); in ecb_do_crypt() 65 nbytes -= nblocks * SM4_BLOCK_SIZE; in ecb_do_crypt() 69 err = skcipher_walk_done(&walk, nbytes); in ecb_do_crypt() 98 unsigned int nbytes; in sm4_cbc_encrypt() local 103 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_encrypt() [all …]
|
D | des3_ede_glue.c | 70 unsigned int nbytes; in ecb_crypt() local 75 while ((nbytes = walk.nbytes)) { in ecb_crypt() 80 if (nbytes >= bsize * 3) { in ecb_crypt() 87 nbytes -= bsize * 3; in ecb_crypt() 88 } while (nbytes >= bsize * 3); in ecb_crypt() 90 if (nbytes < bsize) in ecb_crypt() 100 nbytes -= bsize; in ecb_crypt() 101 } while (nbytes >= bsize); in ecb_crypt() 104 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt() 130 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() local [all …]
|
D | aesni-intel_glue.c | 296 unsigned int nbytes; in ecb_encrypt() local 301 while ((nbytes = walk.nbytes)) { in ecb_encrypt() 304 nbytes & AES_BLOCK_MASK); in ecb_encrypt() 306 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 307 err = skcipher_walk_done(&walk, nbytes); in ecb_encrypt() 318 unsigned int nbytes; in ecb_decrypt() local 323 while ((nbytes = walk.nbytes)) { in ecb_decrypt() 326 nbytes & AES_BLOCK_MASK); in ecb_decrypt() 328 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt() 329 err = skcipher_walk_done(&walk, nbytes); in ecb_decrypt() [all …]
|
D | aria_gfni_avx512_glue.c | 81 unsigned int nbytes; in aria_avx512_ctr_encrypt() local 86 while ((nbytes = walk.nbytes) > 0) { in aria_avx512_ctr_encrypt() 90 while (nbytes >= ARIA_GFNI_AVX512_PARALLEL_BLOCK_SIZE) { in aria_avx512_ctr_encrypt() 98 nbytes -= ARIA_GFNI_AVX512_PARALLEL_BLOCK_SIZE; in aria_avx512_ctr_encrypt() 101 while (nbytes >= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE) { in aria_avx512_ctr_encrypt() 109 nbytes -= ARIA_AESNI_AVX2_PARALLEL_BLOCK_SIZE; in aria_avx512_ctr_encrypt() 112 while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) { in aria_avx512_ctr_encrypt() 120 nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE; in aria_avx512_ctr_encrypt() 123 while (nbytes >= ARIA_BLOCK_SIZE) { in aria_avx512_ctr_encrypt() 135 nbytes -= ARIA_BLOCK_SIZE; in aria_avx512_ctr_encrypt() [all …]
|
/linux-6.6.21/arch/arm64/crypto/ |
D | sm4-neon-glue.c | 41 unsigned int nbytes; in sm4_ecb_do_crypt() local 46 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt() 51 nblocks = nbytes / SM4_BLOCK_SIZE; in sm4_ecb_do_crypt() 60 err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE); in sm4_ecb_do_crypt() 87 unsigned int nbytes; in sm4_cbc_encrypt() local 92 while ((nbytes = walk.nbytes) > 0) { in sm4_cbc_encrypt() 97 while (nbytes >= SM4_BLOCK_SIZE) { in sm4_cbc_encrypt() 103 nbytes -= SM4_BLOCK_SIZE; in sm4_cbc_encrypt() 108 err = skcipher_walk_done(&walk, nbytes); in sm4_cbc_encrypt() 119 unsigned int nbytes; in sm4_cbc_decrypt() local [all …]
|
D | sm4-ce-glue.c | 25 #define BYTES2BLKS(nbytes) ((nbytes) >> 4) argument 37 u8 *iv, unsigned int nbytes); 39 u8 *iv, unsigned int nbytes); 47 u8 *tweak, unsigned int nbytes, 50 u8 *tweak, unsigned int nbytes, 117 unsigned int nbytes; in sm4_ecb_do_crypt() local 122 while ((nbytes = walk.nbytes) > 0) { in sm4_ecb_do_crypt() 129 nblks = BYTES2BLKS(nbytes); in sm4_ecb_do_crypt() 132 nbytes -= nblks * SM4_BLOCK_SIZE; in sm4_ecb_do_crypt() 137 err = skcipher_walk_done(&walk, nbytes); in sm4_ecb_do_crypt() [all …]
|
D | aes-neonbs-glue.c | 105 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 106 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 108 if (walk.nbytes < walk.total) in __ecb_crypt() 117 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 165 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_encrypt() 166 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_encrypt() 174 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt() 188 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt() 189 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt() 191 if (walk.nbytes < walk.total) in cbc_decrypt() [all …]
|
/linux-6.6.21/drivers/media/test-drivers/vidtv/ |
D | vidtv_ts.c | 53 u32 nbytes = 0; in vidtv_ts_null_write_into() local 62 nbytes += vidtv_memcpy(args.dest_buf, in vidtv_ts_null_write_into() 63 args.dest_offset + nbytes, in vidtv_ts_null_write_into() 71 nbytes += vidtv_memset(args.dest_buf, in vidtv_ts_null_write_into() 72 args.dest_offset + nbytes, in vidtv_ts_null_write_into() 75 TS_PACKET_LEN - nbytes); in vidtv_ts_null_write_into() 78 if (nbytes != TS_PACKET_LEN) in vidtv_ts_null_write_into() 81 nbytes); in vidtv_ts_null_write_into() 83 return nbytes; in vidtv_ts_null_write_into() 88 u32 nbytes = 0; in vidtv_ts_pcr_write_into() local [all …]
|
D | vidtv_pes.c | 82 u32 nbytes = 0; /* the number of bytes written by this function */ in vidtv_pes_write_pts_dts() local 122 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_pes_write_pts_dts() 123 args->dest_offset + nbytes, in vidtv_pes_write_pts_dts() 128 return nbytes; in vidtv_pes_write_pts_dts() 133 u32 nbytes = 0; /* the number of bytes written by this function */ in vidtv_pes_write_h() local 158 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_pes_write_h() 159 args->dest_offset + nbytes, in vidtv_pes_write_h() 165 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_pes_write_h() 166 args->dest_offset + nbytes, in vidtv_pes_write_h() 173 pts_dts_args.dest_offset = args->dest_offset + nbytes; in vidtv_pes_write_h() [all …]
|
D | vidtv_psi.c | 172 u32 nbytes = 0; in vidtv_psi_ts_psi_write_into() local 184 nbytes += vidtv_memset(args->dest_buf, in vidtv_psi_ts_psi_write_into() 185 args->dest_offset + nbytes, in vidtv_psi_ts_psi_write_into() 192 nbytes_past_boundary = (args->dest_offset + nbytes) % TS_PACKET_LEN; in vidtv_psi_ts_psi_write_into() 199 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_psi_ts_psi_write_into() 200 args->dest_offset + nbytes, in vidtv_psi_ts_psi_write_into() 213 nbytes += vidtv_memset(args->dest_buf, in vidtv_psi_ts_psi_write_into() 214 args->dest_offset + nbytes, in vidtv_psi_ts_psi_write_into() 220 nbytes_past_boundary = (args->dest_offset + nbytes) % TS_PACKET_LEN; in vidtv_psi_ts_psi_write_into() 223 nbytes += vidtv_memcpy(args->dest_buf, in vidtv_psi_ts_psi_write_into() [all …]
|
/linux-6.6.21/crypto/ |
D | pcbc.c | 25 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_segment() local 37 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_segment() 39 return nbytes; in crypto_pcbc_encrypt_segment() 47 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_inplace() local 59 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_inplace() 61 return nbytes; in crypto_pcbc_encrypt_inplace() 69 unsigned int nbytes; in crypto_pcbc_encrypt() local 74 while ((nbytes = walk.nbytes)) { in crypto_pcbc_encrypt() 76 nbytes = crypto_pcbc_encrypt_inplace(req, &walk, in crypto_pcbc_encrypt() 79 nbytes = crypto_pcbc_encrypt_segment(req, &walk, in crypto_pcbc_encrypt() [all …]
|
D | cfb.c | 52 unsigned int nbytes = walk->nbytes; in crypto_cfb_final() local 55 crypto_xor_cpy(dst, stream, src, nbytes); in crypto_cfb_final() 62 unsigned int nbytes = walk->nbytes; in crypto_cfb_encrypt_segment() local 74 } while ((nbytes -= bsize) >= bsize); in crypto_cfb_encrypt_segment() 78 return nbytes; in crypto_cfb_encrypt_segment() 85 unsigned int nbytes = walk->nbytes; in crypto_cfb_encrypt_inplace() local 96 } while ((nbytes -= bsize) >= bsize); in crypto_cfb_encrypt_inplace() 100 return nbytes; in crypto_cfb_encrypt_inplace() 112 while (walk.nbytes >= bsize) { in crypto_cfb_encrypt() 120 if (walk.nbytes) { in crypto_cfb_encrypt() [all …]
|
D | xctr.c | 40 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_final() local 45 crypto_xor_cpy(dst, keystream, src, nbytes); in crypto_xctr_crypt_final() 56 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_segment() local 69 } while ((nbytes -= XCTR_BLOCKSIZE) >= XCTR_BLOCKSIZE); in crypto_xctr_crypt_segment() 71 return nbytes; in crypto_xctr_crypt_segment() 80 unsigned int nbytes = walk->nbytes; in crypto_xctr_crypt_inplace() local 95 } while ((nbytes -= XCTR_BLOCKSIZE) >= XCTR_BLOCKSIZE); in crypto_xctr_crypt_inplace() 97 return nbytes; in crypto_xctr_crypt_inplace() 105 unsigned int nbytes; in crypto_xctr_crypt() local 111 while (walk.nbytes >= XCTR_BLOCKSIZE) { in crypto_xctr_crypt() [all …]
|
D | cbc.c | 22 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_segment() local 40 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_segment() 42 return nbytes; in crypto_cbc_encrypt_segment() 50 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_inplace() local 66 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_inplace() 70 return nbytes; in crypto_cbc_encrypt_inplace() 81 while (walk.nbytes) { in crypto_cbc_encrypt() 97 unsigned int nbytes = walk->nbytes; in crypto_cbc_decrypt_segment() local 115 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_decrypt_segment() 119 return nbytes; in crypto_cbc_decrypt_segment() [all …]
|
D | scatterwalk.c | 18 static inline void memcpy_dir(void *buf, void *sgdata, size_t nbytes, int out) in memcpy_dir() argument 23 memcpy(dst, src, nbytes); in memcpy_dir() 27 size_t nbytes, int out) in scatterwalk_copychunks() argument 33 if (len_this_page > nbytes) in scatterwalk_copychunks() 34 len_this_page = nbytes; in scatterwalk_copychunks() 44 if (nbytes == len_this_page) in scatterwalk_copychunks() 48 nbytes -= len_this_page; in scatterwalk_copychunks() 56 unsigned int start, unsigned int nbytes, int out) in scatterwalk_map_and_copy() argument 61 if (!nbytes) in scatterwalk_map_and_copy() 67 scatterwalk_copychunks(buf, &walk, nbytes, out); in scatterwalk_map_and_copy()
|
/linux-6.6.21/arch/powerpc/crypto/ |
D | aes-spe-glue.c | 185 unsigned int nbytes; in ppc_ecb_crypt() local 190 while ((nbytes = walk.nbytes) != 0) { in ppc_ecb_crypt() 191 nbytes = min_t(unsigned int, nbytes, MAX_BYTES); in ppc_ecb_crypt() 192 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_ecb_crypt() 197 ctx->key_enc, ctx->rounds, nbytes); in ppc_ecb_crypt() 200 ctx->key_dec, ctx->rounds, nbytes); in ppc_ecb_crypt() 203 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ppc_ecb_crypt() 224 unsigned int nbytes; in ppc_cbc_crypt() local 229 while ((nbytes = walk.nbytes) != 0) { in ppc_cbc_crypt() 230 nbytes = min_t(unsigned int, nbytes, MAX_BYTES); in ppc_cbc_crypt() [all …]
|
/linux-6.6.21/drivers/comedi/ |
D | comedi_buf.c | 340 unsigned int nbytes) in comedi_buf_write_alloc() argument 345 if (nbytes > unalloc) in comedi_buf_write_alloc() 346 nbytes = unalloc; in comedi_buf_write_alloc() 348 async->buf_write_alloc_count += nbytes; in comedi_buf_write_alloc() 356 return nbytes; in comedi_buf_write_alloc() 430 unsigned int nbytes) in comedi_buf_write_free() argument 435 if (nbytes > allocated) in comedi_buf_write_free() 436 nbytes = allocated; in comedi_buf_write_free() 438 async->buf_write_count += nbytes; in comedi_buf_write_free() 439 async->buf_write_ptr += nbytes; in comedi_buf_write_free() [all …]
|
/linux-6.6.21/lib/crypto/mpi/ |
D | mpicoder.c | 36 MPI mpi_read_raw_data(const void *xbuffer, size_t nbytes) in mpi_read_raw_data() argument 44 while (nbytes > 0 && buffer[0] == 0) { in mpi_read_raw_data() 46 nbytes--; in mpi_read_raw_data() 49 nbits = nbytes * 8; in mpi_read_raw_data() 54 if (nbytes > 0) in mpi_read_raw_data() 57 nlimbs = DIV_ROUND_UP(nbytes, BYTES_PER_MPI_LIMB); in mpi_read_raw_data() 65 if (nbytes > 0) { in mpi_read_raw_data() 66 i = BYTES_PER_MPI_LIMB - nbytes % BYTES_PER_MPI_LIMB; in mpi_read_raw_data() 85 unsigned int nbits, nbytes; in mpi_read_from_buffer() local 97 nbytes = DIV_ROUND_UP(nbits, 8); in mpi_read_from_buffer() [all …]
|
/linux-6.6.21/arch/s390/crypto/ |
D | chacha-glue.c | 22 unsigned int nbytes, const u32 *key, in chacha20_crypt_s390() argument 28 chacha20_vx(dst, src, nbytes, key, counter); in chacha20_crypt_s390() 31 *counter += round_up(nbytes, CHACHA_BLOCK_SIZE) / CHACHA_BLOCK_SIZE; in chacha20_crypt_s390() 40 unsigned int nbytes; in chacha20_s390() local 46 while (walk.nbytes > 0) { in chacha20_s390() 47 nbytes = walk.nbytes; in chacha20_s390() 48 if (nbytes < walk.total) in chacha20_s390() 49 nbytes = round_down(nbytes, walk.stride); in chacha20_s390() 51 if (nbytes <= CHACHA_BLOCK_SIZE) { in chacha20_s390() 53 walk.src.virt.addr, nbytes, in chacha20_s390() [all …]
|
/linux-6.6.21/drivers/spi/ |
D | spi-mtk-nor.c | 172 if (op->addr.nbytes == 4) { in mtk_nor_set_addr() 189 if (op->dummy.nbytes) in mtk_nor_match_read() 190 dummy = op->dummy.nbytes * BITS_PER_BYTE / op->dummy.buswidth; in mtk_nor_match_read() 217 tx_len = op->cmd.nbytes + op->addr.nbytes; in mtk_nor_match_prg() 221 tx_len += op->dummy.nbytes; in mtk_nor_match_prg() 229 if ((!op->addr.nbytes) && in mtk_nor_match_prg() 230 (tx_len + op->data.nbytes > MTK_NOR_REG_PRGDATA_MAX + 1)) in mtk_nor_match_prg() 236 rx_len = op->data.nbytes; in mtk_nor_match_prg() 237 prg_left = MTK_NOR_PRG_CNT_MAX / 8 - tx_len - op->dummy.nbytes; in mtk_nor_match_prg() 241 if (!op->addr.nbytes) in mtk_nor_match_prg() [all …]
|
D | spi-wpcm-fiu.c | 78 static void wpcm_fiu_set_data(struct wpcm_fiu_spi *fiu, const u8 *data, unsigned int nbytes) in wpcm_fiu_set_data() argument 82 for (i = 0; i < nbytes; i++) in wpcm_fiu_set_data() 86 static void wpcm_fiu_get_data(struct wpcm_fiu_spi *fiu, u8 *data, unsigned int nbytes) in wpcm_fiu_get_data() argument 90 for (i = 0; i < nbytes; i++) in wpcm_fiu_get_data() 146 return (op->addr.nbytes == 0 || op->addr.nbytes == 3) && in wpcm_fiu_normal_match() 147 op->dummy.nbytes == 0 && op->data.nbytes <= 4; in wpcm_fiu_normal_match() 158 wpcm_fiu_set_data(fiu, op->data.buf.out, op->data.nbytes); in wpcm_fiu_normal_exec() 160 ret = wpcm_fiu_do_uma(fiu, spi_get_chipselect(mem->spi, 0), op->addr.nbytes == 3, in wpcm_fiu_normal_exec() 161 op->data.dir == SPI_MEM_DATA_OUT, op->data.nbytes); in wpcm_fiu_normal_exec() 164 wpcm_fiu_get_data(fiu, op->data.buf.in, op->data.nbytes); in wpcm_fiu_normal_exec() [all …]
|
D | spi-mem.c | 42 if (!op->data.nbytes) in spi_controller_dma_map_mem_op_data() 55 return spi_map_buf(ctlr, dmadev, sgt, op->data.buf.in, op->data.nbytes, in spi_controller_dma_map_mem_op_data() 88 if (!op->data.nbytes) in spi_controller_dma_unmap_mem_op_data() 148 if (op->addr.nbytes && in spi_mem_check_buswidth() 152 if (op->dummy.nbytes && in spi_mem_check_buswidth() 175 if (op->cmd.nbytes != 2) in spi_mem_default_supports_op() 178 if (op->cmd.nbytes != 1) in spi_mem_default_supports_op() 201 if (!op->cmd.buswidth || !op->cmd.nbytes) in spi_mem_check_op() 204 if ((op->addr.nbytes && !op->addr.buswidth) || in spi_mem_check_op() 205 (op->dummy.nbytes && !op->dummy.buswidth) || in spi_mem_check_op() [all …]
|
/linux-6.6.21/drivers/infiniband/hw/hfi1/ |
D | pio_copy.c | 184 unsigned int nbytes) in read_low_bytes() argument 187 jcopy(&pbuf->carry.val8[0], from, nbytes); in read_low_bytes() 188 pbuf->carry_bytes = nbytes; in read_low_bytes() 200 const void *from, unsigned int nbytes) in read_extra_bytes() argument 202 jcopy(&pbuf->carry.val8[pbuf->carry_bytes], from, nbytes); in read_extra_bytes() 203 pbuf->carry_bytes += nbytes; in read_extra_bytes() 263 const void *from, size_t nbytes) in seg_pio_copy_start() argument 273 dend = dest + ((nbytes >> 3) * sizeof(u64)); in seg_pio_copy_start() 338 read_low_bytes(pbuf, from, nbytes & 0x7); in seg_pio_copy_start() 340 pbuf->qw_written = 1 /*PBC*/ + (nbytes >> 3); in seg_pio_copy_start() [all …]
|
/linux-6.6.21/lib/crypto/ |
D | poly1305.c | 31 unsigned int nbytes) in poly1305_update_generic() argument 36 bytes = min(nbytes, POLY1305_BLOCK_SIZE - desc->buflen); in poly1305_update_generic() 39 nbytes -= bytes; in poly1305_update_generic() 49 if (likely(nbytes >= POLY1305_BLOCK_SIZE)) { in poly1305_update_generic() 51 nbytes / POLY1305_BLOCK_SIZE, 1); in poly1305_update_generic() 52 src += nbytes - (nbytes % POLY1305_BLOCK_SIZE); in poly1305_update_generic() 53 nbytes %= POLY1305_BLOCK_SIZE; in poly1305_update_generic() 56 if (unlikely(nbytes)) { in poly1305_update_generic() 57 desc->buflen = nbytes; in poly1305_update_generic() 58 memcpy(desc->buf, src, nbytes); in poly1305_update_generic()
|
/linux-6.6.21/include/crypto/ |
D | ctr.h | 36 while (walk.nbytes > 0) { in crypto_ctr_encrypt_walk() 39 int nbytes = walk.nbytes; in crypto_ctr_encrypt_walk() local 42 if (nbytes < walk.total) { in crypto_ctr_encrypt_walk() 43 tail = walk.nbytes & (blocksize - 1); in crypto_ctr_encrypt_walk() 44 nbytes -= tail; in crypto_ctr_encrypt_walk() 48 int bsize = min(nbytes, blocksize); in crypto_ctr_encrypt_walk() 57 nbytes -= bsize; in crypto_ctr_encrypt_walk() 58 } while (nbytes > 0); in crypto_ctr_encrypt_walk()
|