/linux-6.6.21/arch/sparc/crypto/ |
D | sha512_glue.c | 71 unsigned int i, index, padlen; in sha512_sparc64_final() local 82 padlen = (index < 112) ? (112 - index) : ((SHA512_BLOCK_SIZE+112) - index); in sha512_sparc64_final() 85 if (padlen <= 112) { in sha512_sparc64_final() 86 if ((sctx->count[0] += padlen) < padlen) in sha512_sparc64_final() 88 memcpy(sctx->buf + index, padding, padlen); in sha512_sparc64_final() 90 __sha512_sparc64_update(sctx, padding, padlen, index); in sha512_sparc64_final()
|
D | sha1_glue.c | 71 unsigned int i, index, padlen; in sha1_sparc64_final() local 80 padlen = (index < 56) ? (56 - index) : ((SHA1_BLOCK_SIZE+56) - index); in sha1_sparc64_final() 83 if (padlen <= 56) { in sha1_sparc64_final() 84 sctx->count += padlen; in sha1_sparc64_final() 85 memcpy(sctx->buffer + index, padding, padlen); in sha1_sparc64_final() 87 __sha1_sparc64_update(sctx, padding, padlen, index); in sha1_sparc64_final()
|
D | md5_glue.c | 87 unsigned int i, index, padlen; in md5_sparc64_final() local 96 padlen = (index < 56) ? (56 - index) : ((MD5_HMAC_BLOCK_SIZE+56) - index); in md5_sparc64_final() 99 if (padlen <= 56) { in md5_sparc64_final() 100 sctx->byte_count += padlen; in md5_sparc64_final() 101 memcpy((u8 *)sctx->block + index, padding, padlen); in md5_sparc64_final() 103 __md5_sparc64_update(sctx, padding, padlen, index); in md5_sparc64_final()
|
D | sha256_glue.c | 70 unsigned int i, index, padlen; in sha256_sparc64_final() local 79 padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56) - index); in sha256_sparc64_final() 82 if (padlen <= 56) { in sha256_sparc64_final() 83 sctx->count += padlen; in sha256_sparc64_final() 84 memcpy(sctx->buf + index, padding, padlen); in sha256_sparc64_final() 86 __sha256_sparc64_update(sctx, padding, padlen, index); in sha256_sparc64_final()
|
/linux-6.6.21/arch/powerpc/crypto/ |
D | sha1-spe-glue.c | 108 int padlen; in ppc_spe_sha1_final() local 112 padlen = 55 - offset; in ppc_spe_sha1_final() 117 if (padlen < 0) { in ppc_spe_sha1_final() 118 memset(p, 0x00, padlen + sizeof (u64)); in ppc_spe_sha1_final() 121 padlen = 56; in ppc_spe_sha1_final() 124 memset(p, 0, padlen); in ppc_spe_sha1_final()
|
D | sha256-spe-glue.c | 110 int padlen; in ppc_spe_sha256_final() local 114 padlen = 55 - offset; in ppc_spe_sha256_final() 119 if (padlen < 0) { in ppc_spe_sha256_final() 120 memset(p, 0x00, padlen + sizeof (u64)); in ppc_spe_sha256_final() 123 padlen = 56; in ppc_spe_sha256_final() 126 memset(p, 0, padlen); in ppc_spe_sha256_final()
|
D | md5-glue.c | 81 int padlen = 55 - offset; in ppc_md5_final() local 87 if (padlen < 0) { in ppc_md5_final() 88 memset(p, 0x00, padlen + sizeof (u64)); in ppc_md5_final() 91 padlen = 56; in ppc_md5_final() 94 memset(p, 0, padlen); in ppc_md5_final()
|
D | sha1.c | 65 u32 i, index, padlen; in powerpc_sha1_final() local 73 padlen = (index < 56) ? (56 - index) : ((64+56) - index); in powerpc_sha1_final() 74 powerpc_sha1_update(desc, padding, padlen); in powerpc_sha1_final()
|
/linux-6.6.21/drivers/net/usb/ |
D | gl620a.c | 138 int padlen; in genelink_tx_fixup() local 146 padlen = ((skb->len + (4 + 4*1)) % 64) ? 0 : 1; in genelink_tx_fixup() 149 && ((headroom + tailroom) >= (padlen + (4 + 4*1)))) { in genelink_tx_fixup() 150 if ((headroom < (4 + 4*1)) || (tailroom < padlen)) { in genelink_tx_fixup() 157 skb2 = skb_copy_expand(skb, (4 + 4*1) , padlen, flags); in genelink_tx_fixup()
|
D | cdc_eem.c | 95 int padlen = 0; in eem_tx_fixup() local 104 padlen += 2; in eem_tx_fixup() 110 if ((tailroom >= ETH_FCS_LEN + padlen) && in eem_tx_fixup() 115 > (EEM_HEAD + ETH_FCS_LEN + padlen)) { in eem_tx_fixup() 125 skb2 = skb_copy_expand(skb, EEM_HEAD, ETH_FCS_LEN + padlen, flags); in eem_tx_fixup() 148 if (padlen) in eem_tx_fixup()
|
D | zaurus.c | 47 int padlen; in zaurus_tx_fixup() local 50 padlen = 2; in zaurus_tx_fixup() 53 if ((padlen + 4) <= tailroom) in zaurus_tx_fixup() 56 skb2 = skb_copy_expand(skb, 0, 4 + padlen, flags); in zaurus_tx_fixup()
|
D | kalmia.c | 164 u8 remainder, padlen = 0; in kalmia_tx_fixup() local 211 padlen = KALMIA_ALIGN_SIZE - remainder; in kalmia_tx_fixup() 212 skb_put_zero(skb, padlen); in kalmia_tx_fixup() 217 content_len, padlen, header_start); in kalmia_tx_fixup()
|
D | net1080.c | 419 int padlen = sizeof (struct nc_trailer); in net1080_tx_fixup() local 422 if (!((len + padlen + sizeof (struct nc_header)) & 0x01)) in net1080_tx_fixup() 423 padlen++; in net1080_tx_fixup() 428 if (padlen <= tailroom && in net1080_tx_fixup() 433 if ((sizeof (struct nc_header) + padlen) < in net1080_tx_fixup() 447 padlen, in net1080_tx_fixup()
|
D | asix_common.c | 268 int padlen; in asix_tx_fixup() local 275 padlen = ((skb->len + 4) & (dev->maxpacket - 1)) ? 0 : 4; in asix_tx_fixup() 290 !(padlen && skb_cloned(skb)) && in asix_tx_fixup() 291 headroom + tailroom >= 4 + padlen) { in asix_tx_fixup() 294 tailroom < padlen) { in asix_tx_fixup() 301 skb2 = skb_copy_expand(skb, 4, padlen, flags); in asix_tx_fixup() 312 if (padlen) { in asix_tx_fixup()
|
D | sr9800.c | 117 int padlen; in sr_tx_fixup() local 120 padlen = ((skb->len + 4) % (dev->maxpacket - 1)) ? 0 : 4; in sr_tx_fixup() 122 if ((!skb_cloned(skb)) && ((headroom + tailroom) >= (4 + padlen))) { in sr_tx_fixup() 123 if ((headroom < 4) || (tailroom < padlen)) { in sr_tx_fixup() 130 skb2 = skb_copy_expand(skb, 4, padlen, flags); in sr_tx_fixup() 141 if (padlen) { in sr_tx_fixup()
|
/linux-6.6.21/scripts/ |
D | checkstack.pl | 147 my $padlen = 56 - length($intro); 148 while ($padlen > 0) { 150 $padlen -= 8;
|
/linux-6.6.21/net/ipv6/ |
D | mip6.c | 31 static inline void *mip6_padn(__u8 *data, __u8 padlen) in mip6_padn() argument 35 if (padlen == 1) { in mip6_padn() 37 } else if (padlen > 1) { in mip6_padn() 39 data[1] = padlen - 2; in mip6_padn() 40 if (padlen > 2) in mip6_padn() 43 return data + padlen; in mip6_padn()
|
/linux-6.6.21/fs/orangefs/ |
D | dir.c | 194 __u32 *len, padlen; in fill_from_part() local 218 padlen = (sizeof *len + *len + 1) + in fill_from_part() 220 if (part->len < i + padlen + sizeof *khandle) in fill_from_part() 225 khandle = (void *)part + offset + i + padlen; in fill_from_part() 230 i += padlen + sizeof *khandle; in fill_from_part()
|
/linux-6.6.21/scripts/dtc/ |
D | flattree.c | 355 int padlen = 0; in dt_to_blob() local 377 padlen = minsize - fdt32_to_cpu(fdt.totalsize); in dt_to_blob() 378 if (padlen < 0) { in dt_to_blob() 379 padlen = 0; in dt_to_blob() 388 padlen = padsize; in dt_to_blob() 391 padlen = ALIGN(fdt32_to_cpu(fdt.totalsize) + padlen, alignsize) in dt_to_blob() 394 if (padlen > 0) { in dt_to_blob() 396 tsize += padlen; in dt_to_blob() 415 if (padlen > 0) in dt_to_blob() 416 blob = data_append_zeroes(blob, padlen); in dt_to_blob()
|
/linux-6.6.21/drivers/crypto/ |
D | atmel-aes-regs.h | 81 #define AES_EMR_PADLEN(padlen) (((padlen) << AES_EMR_PADLEN_OFFSET) &\ argument
|
D | padlock-sha.c | 332 unsigned int partial, padlen; in padlock_sha1_final_nano() local 340 padlen = (partial < 56) ? (56 - partial) : ((64+56) - partial); in padlock_sha1_final_nano() 341 padlock_sha1_update_nano(desc, padding, padlen); in padlock_sha1_final_nano() 417 unsigned int partial, padlen; in padlock_sha256_final_nano() local 425 padlen = (partial < 56) ? (56 - partial) : ((64+56) - partial); in padlock_sha256_final_nano() 426 padlock_sha256_update_nano(desc, padding, padlen); in padlock_sha256_final_nano()
|
/linux-6.6.21/crypto/ |
D | chacha20poly1305.c | 204 unsigned int padlen; in poly_cipherpad() local 207 padlen = -rctx->cryptlen % POLY1305_BLOCK_SIZE; in poly_cipherpad() 209 sg_init_one(preq->src, preq->pad, padlen); in poly_cipherpad() 214 ahash_request_set_crypt(&preq->req, preq->src, NULL, padlen); in poly_cipherpad() 263 unsigned int padlen; in poly_adpad() local 266 padlen = -rctx->assoclen % POLY1305_BLOCK_SIZE; in poly_adpad() 268 sg_init_one(preq->src, preq->pad, padlen); in poly_adpad() 273 ahash_request_set_crypt(&preq->req, preq->src, NULL, padlen); in poly_adpad()
|
/linux-6.6.21/drivers/usb/gadget/function/ |
D | f_eem.c | 343 int headroom, tailroom, padlen = 0; in eem_wrap() local 357 padlen += 2; in eem_wrap() 359 if ((tailroom >= (ETH_FCS_LEN + padlen)) && in eem_wrap() 363 skb2 = skb_copy_expand(skb, EEM_HLEN, ETH_FCS_LEN + padlen, GFP_ATOMIC); in eem_wrap() 382 if (padlen) in eem_wrap()
|
/linux-6.6.21/drivers/crypto/aspeed/ |
D | aspeed-hace-hash.c | 80 unsigned int index, padlen; in aspeed_ahash_fill_padding() local 91 padlen = (index < 56) ? (56 - index) : ((64 + 56) - index); in aspeed_ahash_fill_padding() 93 memset(rctx->buffer + rctx->bufcnt + 1, 0, padlen - 1); in aspeed_ahash_fill_padding() 94 memcpy(rctx->buffer + rctx->bufcnt + padlen, bits, 8); in aspeed_ahash_fill_padding() 95 rctx->bufcnt += padlen + 8; in aspeed_ahash_fill_padding() 102 padlen = (index < 112) ? (112 - index) : ((128 + 112) - index); in aspeed_ahash_fill_padding() 104 memset(rctx->buffer + rctx->bufcnt + 1, 0, padlen - 1); in aspeed_ahash_fill_padding() 105 memcpy(rctx->buffer + rctx->bufcnt + padlen, bits, 16); in aspeed_ahash_fill_padding() 106 rctx->bufcnt += padlen + 16; in aspeed_ahash_fill_padding()
|
/linux-6.6.21/net/xfrm/ |
D | xfrm_output.c | 253 ph->padlen = 4 - (optlen & 4); in xfrm4_beet_encap_add() 256 if (ph->padlen) in xfrm4_beet_encap_add() 257 memset(ph + 1, IPOPT_NOP, ph->padlen); in xfrm4_beet_encap_add() 380 ph->padlen = 4 - (optlen & 4); in xfrm6_beet_encap_add() 383 if (ph->padlen) in xfrm6_beet_encap_add() 384 memset(ph + 1, IPOPT_NOP, ph->padlen); in xfrm6_beet_encap_add()
|