Lines Matching refs:AES_BLOCK_SIZE

119 	__be32			iv[AES_BLOCK_SIZE / sizeof(u32)];
132 __be32 j0[AES_BLOCK_SIZE / sizeof(u32)];
133 u32 tag[AES_BLOCK_SIZE / sizeof(u32)];
134 __be32 ghash[AES_BLOCK_SIZE / sizeof(u32)];
158 u8 lastc[AES_BLOCK_SIZE];
395 atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_read_block()
401 atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_write_block()
530 crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE); in atmel_aes_ctr_update_req_iv()
613 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_cpu_transfer()
615 if (dd->datalen < AES_BLOCK_SIZE) in atmel_aes_cpu_transfer()
644 size_t padlen = atmel_aes_padlen(len, AES_BLOCK_SIZE); in atmel_aes_cpu_start()
885 case AES_BLOCK_SIZE: in atmel_aes_dma_start()
985 dd->ctx->block_size != AES_BLOCK_SIZE); in atmel_aes_start()
1021 ctx->blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE); in atmel_aes_ctr_transfer()
1030 datalen = AES_BLOCK_SIZE * (0x10000 - start); in atmel_aes_ctr_transfer()
1049 crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE); in atmel_aes_ctr_transfer()
1073 memcpy(ctx->iv, req->iv, AES_BLOCK_SIZE); in atmel_aes_ctr_start()
1140 ctx->block_size = AES_BLOCK_SIZE; in atmel_aes_crypt()
1303 .base.cra_blocksize = AES_BLOCK_SIZE,
1316 .base.cra_blocksize = AES_BLOCK_SIZE,
1325 .ivsize = AES_BLOCK_SIZE,
1339 .ivsize = AES_BLOCK_SIZE,
1344 .base.cra_blocksize = AES_BLOCK_SIZE,
1353 .ivsize = AES_BLOCK_SIZE,
1367 .ivsize = AES_BLOCK_SIZE,
1381 .ivsize = AES_BLOCK_SIZE,
1395 .ivsize = AES_BLOCK_SIZE,
1409 .ivsize = AES_BLOCK_SIZE,
1425 .ivsize = AES_BLOCK_SIZE,
1493 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_ghash_finalize()
1534 padlen = atmel_aes_padlen(ivsize, AES_BLOCK_SIZE); in atmel_aes_gcm_start()
1535 datalen = ivsize + padlen + AES_BLOCK_SIZE; in atmel_aes_gcm_start()
1594 padlen = atmel_aes_padlen(req->assoclen, AES_BLOCK_SIZE); in atmel_aes_gcm_length()
1617 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_data()
1675 return atmel_aes_gcm_ghash(dd, (const u32 *)data, AES_BLOCK_SIZE, in atmel_aes_gcm_tag_init()
1733 ctx->block_size = AES_BLOCK_SIZE; in atmel_aes_gcm_crypt()
1797 .maxauthsize = AES_BLOCK_SIZE,
1848 u32 tweak[AES_BLOCK_SIZE / sizeof(u32)]; in atmel_aes_xts_process_data()
1849 static const __le32 one[AES_BLOCK_SIZE / sizeof(u32)] = {cpu_to_le32(1), }; in atmel_aes_xts_process_data()
1860 for (i = 0; i < AES_BLOCK_SIZE/2; ++i) in atmel_aes_xts_process_data()
1861 swap(tweak_bytes[i], tweak_bytes[AES_BLOCK_SIZE - 1 - i]); in atmel_aes_xts_process_data()
1943 .base.cra_blocksize = AES_BLOCK_SIZE,
1949 .ivsize = AES_BLOCK_SIZE,
2025 __be32 iv[AES_BLOCK_SIZE / sizeof(u32)]; in atmel_aes_authenc_transfer()
2214 ctx->block_size = AES_BLOCK_SIZE; in atmel_aes_authenc_crypt()
2237 .ivsize = AES_BLOCK_SIZE,
2243 .cra_blocksize = AES_BLOCK_SIZE,
2253 .ivsize = AES_BLOCK_SIZE,
2259 .cra_blocksize = AES_BLOCK_SIZE,
2269 .ivsize = AES_BLOCK_SIZE,
2275 .cra_blocksize = AES_BLOCK_SIZE,
2285 .ivsize = AES_BLOCK_SIZE,
2291 .cra_blocksize = AES_BLOCK_SIZE,
2301 .ivsize = AES_BLOCK_SIZE,
2307 .cra_blocksize = AES_BLOCK_SIZE,
2320 dd->buflen &= ~(AES_BLOCK_SIZE - 1); in atmel_aes_buff_init()