Searched refs:GHASH_BLOCK_SIZE (Results 1 – 10 of 10) sorted by relevance
/linux-6.1.9/arch/s390/crypto/ |
D | ghash_s390.c | 16 #define GHASH_BLOCK_SIZE 16 macro 20 u8 key[GHASH_BLOCK_SIZE]; 24 u8 icv[GHASH_BLOCK_SIZE]; 25 u8 key[GHASH_BLOCK_SIZE]; 26 u8 buffer[GHASH_BLOCK_SIZE]; 36 memcpy(dctx->key, ctx->key, GHASH_BLOCK_SIZE); in ghash_init() 46 if (keylen != GHASH_BLOCK_SIZE) in ghash_setkey() 49 memcpy(ctx->key, key, GHASH_BLOCK_SIZE); in ghash_setkey() 62 u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update() 73 GHASH_BLOCK_SIZE); in ghash_update() [all …]
|
D | aes_s390.c | 850 u8 j0[GHASH_BLOCK_SIZE];/* initial counter value */ in gcm_aes_crypt() 945 .ivsize = GHASH_BLOCK_SIZE - sizeof(u32),
|
/linux-6.1.9/crypto/ |
D | ghash-generic.c | 61 if (keylen != GHASH_BLOCK_SIZE) in ghash_setkey() 67 BUILD_BUG_ON(sizeof(k) != GHASH_BLOCK_SIZE); in ghash_setkey() 68 memcpy(&k, key, GHASH_BLOCK_SIZE); /* avoid violating alignment rules */ in ghash_setkey() 70 memzero_explicit(&k, GHASH_BLOCK_SIZE); in ghash_setkey() 87 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update() 99 while (srclen >= GHASH_BLOCK_SIZE) { in ghash_update() 100 crypto_xor(dst, src, GHASH_BLOCK_SIZE); in ghash_update() 102 src += GHASH_BLOCK_SIZE; in ghash_update() 103 srclen -= GHASH_BLOCK_SIZE; in ghash_update() 107 dctx->bytes = GHASH_BLOCK_SIZE - srclen; in ghash_update() [all …]
|
/linux-6.1.9/arch/arm64/crypto/ |
D | ghash-ce-glue.c | 29 #define GHASH_BLOCK_SIZE 16 macro 40 u8 buf[GHASH_BLOCK_SIZE]; 84 src += GHASH_BLOCK_SIZE; in ghash_do_update() 87 crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE); in ghash_do_update() 113 #define MAX_BLOCKS (SZ_64K / GHASH_BLOCK_SIZE) 119 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in ghash_update() 123 if ((partial + len) >= GHASH_BLOCK_SIZE) { in ghash_update() 128 int p = GHASH_BLOCK_SIZE - partial; in ghash_update() 135 blocks = len / GHASH_BLOCK_SIZE; in ghash_update() 136 len %= GHASH_BLOCK_SIZE; in ghash_update() [all …]
|
/linux-6.1.9/arch/arm/crypto/ |
D | ghash-ce-glue.c | 27 #define GHASH_BLOCK_SIZE 16 macro 37 u8 buf[GHASH_BLOCK_SIZE]; 82 src += GHASH_BLOCK_SIZE; in ghash_do_update() 85 crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE); in ghash_do_update() 98 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in ghash_update() 102 if ((partial + len) >= GHASH_BLOCK_SIZE) { in ghash_update() 107 int p = GHASH_BLOCK_SIZE - partial; in ghash_update() 114 blocks = len / GHASH_BLOCK_SIZE; in ghash_update() 115 len %= GHASH_BLOCK_SIZE; in ghash_update() 119 src += blocks * GHASH_BLOCK_SIZE; in ghash_update() [all …]
|
/linux-6.1.9/drivers/crypto/vmx/ |
D | ghash.c | 60 if (keylen != GHASH_BLOCK_SIZE) in p8_ghash_setkey() 71 memcpy(&ctx->key, key, GHASH_BLOCK_SIZE); in p8_ghash_setkey() 89 crypto_xor((u8 *)dctx->shash, dctx->buffer, GHASH_BLOCK_SIZE); in __ghash_block() 108 while (srclen >= GHASH_BLOCK_SIZE) { in __ghash_blocks() 109 crypto_xor((u8 *)dctx->shash, src, GHASH_BLOCK_SIZE); in __ghash_blocks() 111 srclen -= GHASH_BLOCK_SIZE; in __ghash_blocks() 112 src += GHASH_BLOCK_SIZE; in __ghash_blocks() 181 .cra_blocksize = GHASH_BLOCK_SIZE,
|
/linux-6.1.9/arch/x86/crypto/ |
D | ghash-clmulni-intel_glue.c | 23 #define GHASH_BLOCK_SIZE 16 macro 40 u8 buffer[GHASH_BLOCK_SIZE]; 60 if (keylen != GHASH_BLOCK_SIZE) in ghash_setkey() 86 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update() 104 dctx->bytes = GHASH_BLOCK_SIZE - srclen; in ghash_update() 117 u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_flush() 137 memcpy(dst, buf, GHASH_BLOCK_SIZE); in ghash_final() 154 .cra_blocksize = GHASH_BLOCK_SIZE, 307 .cra_blocksize = GHASH_BLOCK_SIZE,
|
/linux-6.1.9/include/crypto/ |
D | ghash.h | 12 #define GHASH_BLOCK_SIZE 16 macro 20 u8 buffer[GHASH_BLOCK_SIZE];
|
/linux-6.1.9/drivers/crypto/chelsio/ |
D | chcr_crypto.h | 39 #define GHASH_BLOCK_SIZE 16 macro
|
/linux-6.1.9/drivers/crypto/inside-secure/ |
D | safexcel_cipher.c | 2635 ctx->state_sz = GHASH_BLOCK_SIZE; in safexcel_aead_gcm_cra_init()
|