1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 */
7
8 #include <asm/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha1.h>
22 #include <crypto/sha2.h>
23 #include <crypto/sm3.h>
24 #include <crypto/sm4.h>
25 #include <crypto/xts.h>
26 #include <crypto/skcipher.h>
27 #include <crypto/internal/aead.h>
28 #include <crypto/internal/skcipher.h>
29
30 #include "safexcel.h"
31
32 enum safexcel_cipher_direction {
33 SAFEXCEL_ENCRYPT,
34 SAFEXCEL_DECRYPT,
35 };
36
37 enum safexcel_cipher_alg {
38 SAFEXCEL_DES,
39 SAFEXCEL_3DES,
40 SAFEXCEL_AES,
41 SAFEXCEL_CHACHA20,
42 SAFEXCEL_SM4,
43 };
44
45 struct safexcel_cipher_ctx {
46 struct safexcel_context base;
47 struct safexcel_crypto_priv *priv;
48
49 u32 mode;
50 enum safexcel_cipher_alg alg;
51 u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
52 u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
53 u8 aadskip;
54 u8 blocksz;
55 u32 ivmask;
56 u32 ctrinit;
57
58 __le32 key[16];
59 u32 nonce;
60 unsigned int key_len, xts;
61
62 /* All the below is AEAD specific */
63 u32 hash_alg;
64 u32 state_sz;
65
66 struct crypto_aead *fback;
67 };
68
69 struct safexcel_cipher_req {
70 enum safexcel_cipher_direction direction;
71 /* Number of result descriptors associated to the request */
72 unsigned int rdescs;
73 bool needs_inv;
74 int nr_src, nr_dst;
75 };
76
safexcel_skcipher_iv(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc)77 static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
78 struct safexcel_command_desc *cdesc)
79 {
80 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
81 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
82 /* 32 bit nonce */
83 cdesc->control_data.token[0] = ctx->nonce;
84 /* 64 bit IV part */
85 memcpy(&cdesc->control_data.token[1], iv, 8);
86 /* 32 bit counter, start at 0 or 1 (big endian!) */
87 cdesc->control_data.token[3] =
88 (__force u32)cpu_to_be32(ctx->ctrinit);
89 return 4;
90 }
91 if (ctx->alg == SAFEXCEL_CHACHA20) {
92 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
93 /* 96 bit nonce part */
94 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
95 /* 32 bit counter */
96 cdesc->control_data.token[3] = *(u32 *)iv;
97 return 4;
98 }
99
100 cdesc->control_data.options |= ctx->ivmask;
101 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
102 return ctx->blocksz / sizeof(u32);
103 }
104
safexcel_skcipher_token(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc,struct safexcel_token * atoken,u32 length)105 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
106 struct safexcel_command_desc *cdesc,
107 struct safexcel_token *atoken,
108 u32 length)
109 {
110 struct safexcel_token *token;
111 int ivlen;
112
113 ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
114 if (ivlen == 4) {
115 /* No space in cdesc, instruction moves to atoken */
116 cdesc->additional_cdata_size = 1;
117 token = atoken;
118 } else {
119 /* Everything fits in cdesc */
120 token = (struct safexcel_token *)(cdesc->control_data.token + 2);
121 /* Need to pad with NOP */
122 eip197_noop_token(&token[1]);
123 }
124
125 token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
126 token->packet_length = length;
127 token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
128 EIP197_TOKEN_STAT_LAST_HASH;
129 token->instructions = EIP197_TOKEN_INS_LAST |
130 EIP197_TOKEN_INS_TYPE_CRYPTO |
131 EIP197_TOKEN_INS_TYPE_OUTPUT;
132 }
133
safexcel_aead_iv(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc)134 static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
135 struct safexcel_command_desc *cdesc)
136 {
137 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
138 ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
139 /* 32 bit nonce */
140 cdesc->control_data.token[0] = ctx->nonce;
141 /* 64 bit IV part */
142 memcpy(&cdesc->control_data.token[1], iv, 8);
143 /* 32 bit counter, start at 0 or 1 (big endian!) */
144 cdesc->control_data.token[3] =
145 (__force u32)cpu_to_be32(ctx->ctrinit);
146 return;
147 }
148 if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
149 /* 96 bit IV part */
150 memcpy(&cdesc->control_data.token[0], iv, 12);
151 /* 32 bit counter, start at 0 or 1 (big endian!) */
152 cdesc->control_data.token[3] =
153 (__force u32)cpu_to_be32(ctx->ctrinit);
154 return;
155 }
156 /* CBC */
157 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
158 }
159
safexcel_aead_token(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc,struct safexcel_token * atoken,enum safexcel_cipher_direction direction,u32 cryptlen,u32 assoclen,u32 digestsize)160 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
161 struct safexcel_command_desc *cdesc,
162 struct safexcel_token *atoken,
163 enum safexcel_cipher_direction direction,
164 u32 cryptlen, u32 assoclen, u32 digestsize)
165 {
166 struct safexcel_token *aadref;
167 int atoksize = 2; /* Start with minimum size */
168 int assocadj = assoclen - ctx->aadskip, aadalign;
169
170 /* Always 4 dwords of embedded IV for AEAD modes */
171 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
172
173 if (direction == SAFEXCEL_DECRYPT)
174 cryptlen -= digestsize;
175
176 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
177 /* Construct IV block B0 for the CBC-MAC */
178 u8 *final_iv = (u8 *)cdesc->control_data.token;
179 u8 *cbcmaciv = (u8 *)&atoken[1];
180 __le32 *aadlen = (__le32 *)&atoken[5];
181
182 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
183 /* Length + nonce */
184 cdesc->control_data.token[0] = ctx->nonce;
185 /* Fixup flags byte */
186 *(__le32 *)cbcmaciv =
187 cpu_to_le32(ctx->nonce |
188 ((assocadj > 0) << 6) |
189 ((digestsize - 2) << 2));
190 /* 64 bit IV part */
191 memcpy(&cdesc->control_data.token[1], iv, 8);
192 memcpy(cbcmaciv + 4, iv, 8);
193 /* Start counter at 0 */
194 cdesc->control_data.token[3] = 0;
195 /* Message length */
196 *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
197 } else {
198 /* Variable length IV part */
199 memcpy(final_iv, iv, 15 - iv[0]);
200 memcpy(cbcmaciv, iv, 15 - iv[0]);
201 /* Start variable length counter at 0 */
202 memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
203 memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
204 /* fixup flags byte */
205 cbcmaciv[0] |= ((assocadj > 0) << 6) |
206 ((digestsize - 2) << 2);
207 /* insert lower 2 bytes of message length */
208 cbcmaciv[14] = cryptlen >> 8;
209 cbcmaciv[15] = cryptlen & 255;
210 }
211
212 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
213 atoken->packet_length = AES_BLOCK_SIZE +
214 ((assocadj > 0) << 1);
215 atoken->stat = 0;
216 atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
217 EIP197_TOKEN_INS_TYPE_HASH;
218
219 if (likely(assocadj)) {
220 *aadlen = cpu_to_le32((assocadj >> 8) |
221 (assocadj & 255) << 8);
222 atoken += 6;
223 atoksize += 7;
224 } else {
225 atoken += 5;
226 atoksize += 6;
227 }
228
229 /* Process AAD data */
230 aadref = atoken;
231 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
232 atoken->packet_length = assocadj;
233 atoken->stat = 0;
234 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
235 atoken++;
236
237 /* For CCM only, align AAD data towards hash engine */
238 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
239 aadalign = (assocadj + 2) & 15;
240 atoken->packet_length = assocadj && aadalign ?
241 16 - aadalign :
242 0;
243 if (likely(cryptlen)) {
244 atoken->stat = 0;
245 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
246 } else {
247 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
248 atoken->instructions = EIP197_TOKEN_INS_LAST |
249 EIP197_TOKEN_INS_TYPE_HASH;
250 }
251 } else {
252 safexcel_aead_iv(ctx, iv, cdesc);
253
254 /* Process AAD data */
255 aadref = atoken;
256 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
257 atoken->packet_length = assocadj;
258 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
259 atoken->instructions = EIP197_TOKEN_INS_LAST |
260 EIP197_TOKEN_INS_TYPE_HASH;
261 }
262 atoken++;
263
264 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
265 /* For ESP mode (and not GMAC), skip over the IV */
266 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
267 atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
268 atoken->stat = 0;
269 atoken->instructions = 0;
270 atoken++;
271 atoksize++;
272 } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
273 direction == SAFEXCEL_DECRYPT)) {
274 /* Poly-chacha decryption needs a dummy NOP here ... */
275 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
276 atoken->packet_length = 16; /* According to Op Manual */
277 atoken->stat = 0;
278 atoken->instructions = 0;
279 atoken++;
280 atoksize++;
281 }
282
283 if (ctx->xcm) {
284 /* For GCM and CCM, obtain enc(Y0) */
285 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
286 atoken->packet_length = 0;
287 atoken->stat = 0;
288 atoken->instructions = AES_BLOCK_SIZE;
289 atoken++;
290
291 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
292 atoken->packet_length = AES_BLOCK_SIZE;
293 atoken->stat = 0;
294 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
295 EIP197_TOKEN_INS_TYPE_CRYPTO;
296 atoken++;
297 atoksize += 2;
298 }
299
300 if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
301 /* Fixup stat field for AAD direction instruction */
302 aadref->stat = 0;
303
304 /* Process crypto data */
305 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
306 atoken->packet_length = cryptlen;
307
308 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
309 /* Fixup instruction field for AAD dir instruction */
310 aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
311
312 /* Do not send to crypt engine in case of GMAC */
313 atoken->instructions = EIP197_TOKEN_INS_LAST |
314 EIP197_TOKEN_INS_TYPE_HASH |
315 EIP197_TOKEN_INS_TYPE_OUTPUT;
316 } else {
317 atoken->instructions = EIP197_TOKEN_INS_LAST |
318 EIP197_TOKEN_INS_TYPE_CRYPTO |
319 EIP197_TOKEN_INS_TYPE_HASH |
320 EIP197_TOKEN_INS_TYPE_OUTPUT;
321 }
322
323 cryptlen &= 15;
324 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
325 atoken->stat = 0;
326 /* For CCM only, pad crypto data to the hash engine */
327 atoken++;
328 atoksize++;
329 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
330 atoken->packet_length = 16 - cryptlen;
331 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
332 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
333 } else {
334 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
335 }
336 atoken++;
337 atoksize++;
338 }
339
340 if (direction == SAFEXCEL_ENCRYPT) {
341 /* Append ICV */
342 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
343 atoken->packet_length = digestsize;
344 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
345 EIP197_TOKEN_STAT_LAST_PACKET;
346 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
347 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
348 } else {
349 /* Extract ICV */
350 atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
351 atoken->packet_length = digestsize;
352 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
353 EIP197_TOKEN_STAT_LAST_PACKET;
354 atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
355 atoken++;
356 atoksize++;
357
358 /* Verify ICV */
359 atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
360 atoken->packet_length = digestsize |
361 EIP197_TOKEN_HASH_RESULT_VERIFY;
362 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
363 EIP197_TOKEN_STAT_LAST_PACKET;
364 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
365 }
366
367 /* Fixup length of the token in the command descriptor */
368 cdesc->additional_cdata_size = atoksize;
369 }
370
safexcel_skcipher_aes_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)371 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
372 const u8 *key, unsigned int len)
373 {
374 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
375 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
376 struct safexcel_crypto_priv *priv = ctx->base.priv;
377 struct crypto_aes_ctx aes;
378 int ret, i;
379
380 ret = aes_expandkey(&aes, key, len);
381 if (ret)
382 return ret;
383
384 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
385 for (i = 0; i < len / sizeof(u32); i++) {
386 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
387 ctx->base.needs_inv = true;
388 break;
389 }
390 }
391 }
392
393 for (i = 0; i < len / sizeof(u32); i++)
394 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
395
396 ctx->key_len = len;
397
398 memzero_explicit(&aes, sizeof(aes));
399 return 0;
400 }
401
safexcel_aead_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)402 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
403 unsigned int len)
404 {
405 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
406 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
407 struct safexcel_crypto_priv *priv = ctx->base.priv;
408 struct crypto_authenc_keys keys;
409 struct crypto_aes_ctx aes;
410 int err = -EINVAL, i;
411 const char *alg;
412
413 if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
414 goto badkey;
415
416 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
417 /* Must have at least space for the nonce here */
418 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
419 goto badkey;
420 /* last 4 bytes of key are the nonce! */
421 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
422 CTR_RFC3686_NONCE_SIZE);
423 /* exclude the nonce here */
424 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
425 }
426
427 /* Encryption key */
428 switch (ctx->alg) {
429 case SAFEXCEL_DES:
430 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
431 if (unlikely(err))
432 goto badkey;
433 break;
434 case SAFEXCEL_3DES:
435 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
436 if (unlikely(err))
437 goto badkey;
438 break;
439 case SAFEXCEL_AES:
440 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
441 if (unlikely(err))
442 goto badkey;
443 break;
444 case SAFEXCEL_SM4:
445 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
446 goto badkey;
447 break;
448 default:
449 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
450 goto badkey;
451 }
452
453 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
454 for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
455 if (le32_to_cpu(ctx->key[i]) !=
456 ((u32 *)keys.enckey)[i]) {
457 ctx->base.needs_inv = true;
458 break;
459 }
460 }
461 }
462
463 /* Auth key */
464 switch (ctx->hash_alg) {
465 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
466 alg = "safexcel-sha1";
467 break;
468 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
469 alg = "safexcel-sha224";
470 break;
471 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
472 alg = "safexcel-sha256";
473 break;
474 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
475 alg = "safexcel-sha384";
476 break;
477 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
478 alg = "safexcel-sha512";
479 break;
480 case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
481 alg = "safexcel-sm3";
482 break;
483 default:
484 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
485 goto badkey;
486 }
487
488 if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
489 alg, ctx->state_sz))
490 goto badkey;
491
492 /* Now copy the keys into the context */
493 for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
494 ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
495 ctx->key_len = keys.enckeylen;
496
497 memzero_explicit(&keys, sizeof(keys));
498 return 0;
499
500 badkey:
501 memzero_explicit(&keys, sizeof(keys));
502 return err;
503 }
504
safexcel_context_control(struct safexcel_cipher_ctx * ctx,struct crypto_async_request * async,struct safexcel_cipher_req * sreq,struct safexcel_command_desc * cdesc)505 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
506 struct crypto_async_request *async,
507 struct safexcel_cipher_req *sreq,
508 struct safexcel_command_desc *cdesc)
509 {
510 struct safexcel_crypto_priv *priv = ctx->base.priv;
511 int ctrl_size = ctx->key_len / sizeof(u32);
512
513 cdesc->control_data.control1 = ctx->mode;
514
515 if (ctx->aead) {
516 /* Take in account the ipad+opad digests */
517 if (ctx->xcm) {
518 ctrl_size += ctx->state_sz / sizeof(u32);
519 cdesc->control_data.control0 =
520 CONTEXT_CONTROL_KEY_EN |
521 CONTEXT_CONTROL_DIGEST_XCM |
522 ctx->hash_alg |
523 CONTEXT_CONTROL_SIZE(ctrl_size);
524 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
525 /* Chacha20-Poly1305 */
526 cdesc->control_data.control0 =
527 CONTEXT_CONTROL_KEY_EN |
528 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
529 (sreq->direction == SAFEXCEL_ENCRYPT ?
530 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
531 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
532 ctx->hash_alg |
533 CONTEXT_CONTROL_SIZE(ctrl_size);
534 return 0;
535 } else {
536 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
537 cdesc->control_data.control0 =
538 CONTEXT_CONTROL_KEY_EN |
539 CONTEXT_CONTROL_DIGEST_HMAC |
540 ctx->hash_alg |
541 CONTEXT_CONTROL_SIZE(ctrl_size);
542 }
543
544 if (sreq->direction == SAFEXCEL_ENCRYPT &&
545 (ctx->xcm == EIP197_XCM_MODE_CCM ||
546 ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
547 cdesc->control_data.control0 |=
548 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
549 else if (sreq->direction == SAFEXCEL_ENCRYPT)
550 cdesc->control_data.control0 |=
551 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
552 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
553 cdesc->control_data.control0 |=
554 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
555 else
556 cdesc->control_data.control0 |=
557 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
558 } else {
559 if (sreq->direction == SAFEXCEL_ENCRYPT)
560 cdesc->control_data.control0 =
561 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
562 CONTEXT_CONTROL_KEY_EN |
563 CONTEXT_CONTROL_SIZE(ctrl_size);
564 else
565 cdesc->control_data.control0 =
566 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
567 CONTEXT_CONTROL_KEY_EN |
568 CONTEXT_CONTROL_SIZE(ctrl_size);
569 }
570
571 if (ctx->alg == SAFEXCEL_DES) {
572 cdesc->control_data.control0 |=
573 CONTEXT_CONTROL_CRYPTO_ALG_DES;
574 } else if (ctx->alg == SAFEXCEL_3DES) {
575 cdesc->control_data.control0 |=
576 CONTEXT_CONTROL_CRYPTO_ALG_3DES;
577 } else if (ctx->alg == SAFEXCEL_AES) {
578 switch (ctx->key_len >> ctx->xts) {
579 case AES_KEYSIZE_128:
580 cdesc->control_data.control0 |=
581 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
582 break;
583 case AES_KEYSIZE_192:
584 cdesc->control_data.control0 |=
585 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
586 break;
587 case AES_KEYSIZE_256:
588 cdesc->control_data.control0 |=
589 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
590 break;
591 default:
592 dev_err(priv->dev, "aes keysize not supported: %u\n",
593 ctx->key_len >> ctx->xts);
594 return -EINVAL;
595 }
596 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
597 cdesc->control_data.control0 |=
598 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
599 } else if (ctx->alg == SAFEXCEL_SM4) {
600 cdesc->control_data.control0 |=
601 CONTEXT_CONTROL_CRYPTO_ALG_SM4;
602 }
603
604 return 0;
605 }
606
safexcel_handle_req_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,struct safexcel_cipher_req * sreq,bool * should_complete,int * ret)607 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
608 struct crypto_async_request *async,
609 struct scatterlist *src,
610 struct scatterlist *dst,
611 unsigned int cryptlen,
612 struct safexcel_cipher_req *sreq,
613 bool *should_complete, int *ret)
614 {
615 struct skcipher_request *areq = skcipher_request_cast(async);
616 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
617 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
618 struct safexcel_result_desc *rdesc;
619 int ndesc = 0;
620
621 *ret = 0;
622
623 if (unlikely(!sreq->rdescs))
624 return 0;
625
626 while (sreq->rdescs--) {
627 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
628 if (IS_ERR(rdesc)) {
629 dev_err(priv->dev,
630 "cipher: result: could not retrieve the result descriptor\n");
631 *ret = PTR_ERR(rdesc);
632 break;
633 }
634
635 if (likely(!*ret))
636 *ret = safexcel_rdesc_check_errors(priv, rdesc);
637
638 ndesc++;
639 }
640
641 safexcel_complete(priv, ring);
642
643 if (src == dst) {
644 if (sreq->nr_src > 0)
645 dma_unmap_sg(priv->dev, src, sreq->nr_src,
646 DMA_BIDIRECTIONAL);
647 } else {
648 if (sreq->nr_src > 0)
649 dma_unmap_sg(priv->dev, src, sreq->nr_src,
650 DMA_TO_DEVICE);
651 if (sreq->nr_dst > 0)
652 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
653 DMA_FROM_DEVICE);
654 }
655
656 /*
657 * Update IV in req from last crypto output word for CBC modes
658 */
659 if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
660 (sreq->direction == SAFEXCEL_ENCRYPT)) {
661 /* For encrypt take the last output word */
662 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
663 crypto_skcipher_ivsize(skcipher),
664 (cryptlen -
665 crypto_skcipher_ivsize(skcipher)));
666 }
667
668 *should_complete = true;
669
670 return ndesc;
671 }
672
safexcel_send_req(struct crypto_async_request * base,int ring,struct safexcel_cipher_req * sreq,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,unsigned int assoclen,unsigned int digestsize,u8 * iv,int * commands,int * results)673 static int safexcel_send_req(struct crypto_async_request *base, int ring,
674 struct safexcel_cipher_req *sreq,
675 struct scatterlist *src, struct scatterlist *dst,
676 unsigned int cryptlen, unsigned int assoclen,
677 unsigned int digestsize, u8 *iv, int *commands,
678 int *results)
679 {
680 struct skcipher_request *areq = skcipher_request_cast(base);
681 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
682 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
683 struct safexcel_crypto_priv *priv = ctx->base.priv;
684 struct safexcel_command_desc *cdesc;
685 struct safexcel_command_desc *first_cdesc = NULL;
686 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
687 struct scatterlist *sg;
688 unsigned int totlen;
689 unsigned int totlen_src = cryptlen + assoclen;
690 unsigned int totlen_dst = totlen_src;
691 struct safexcel_token *atoken;
692 int n_cdesc = 0, n_rdesc = 0;
693 int queued, i, ret = 0;
694 bool first = true;
695
696 sreq->nr_src = sg_nents_for_len(src, totlen_src);
697
698 if (ctx->aead) {
699 /*
700 * AEAD has auth tag appended to output for encrypt and
701 * removed from the output for decrypt!
702 */
703 if (sreq->direction == SAFEXCEL_DECRYPT)
704 totlen_dst -= digestsize;
705 else
706 totlen_dst += digestsize;
707
708 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
709 &ctx->base.ipad, ctx->state_sz);
710 if (!ctx->xcm)
711 memcpy(ctx->base.ctxr->data + (ctx->key_len +
712 ctx->state_sz) / sizeof(u32), &ctx->base.opad,
713 ctx->state_sz);
714 } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
715 (sreq->direction == SAFEXCEL_DECRYPT)) {
716 /*
717 * Save IV from last crypto input word for CBC modes in decrypt
718 * direction. Need to do this first in case of inplace operation
719 * as it will be overwritten.
720 */
721 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
722 crypto_skcipher_ivsize(skcipher),
723 (totlen_src -
724 crypto_skcipher_ivsize(skcipher)));
725 }
726
727 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
728
729 /*
730 * Remember actual input length, source buffer length may be
731 * updated in case of inline operation below.
732 */
733 totlen = totlen_src;
734 queued = totlen_src;
735
736 if (src == dst) {
737 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
738 sreq->nr_dst = sreq->nr_src;
739 if (unlikely((totlen_src || totlen_dst) &&
740 (sreq->nr_src <= 0))) {
741 dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
742 max(totlen_src, totlen_dst));
743 return -EINVAL;
744 }
745 if (sreq->nr_src > 0)
746 dma_map_sg(priv->dev, src, sreq->nr_src,
747 DMA_BIDIRECTIONAL);
748 } else {
749 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
750 dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
751 totlen_src);
752 return -EINVAL;
753 }
754
755 if (sreq->nr_src > 0)
756 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
757
758 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
759 dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
760 totlen_dst);
761 ret = -EINVAL;
762 goto unmap;
763 }
764
765 if (sreq->nr_dst > 0)
766 dma_map_sg(priv->dev, dst, sreq->nr_dst,
767 DMA_FROM_DEVICE);
768 }
769
770 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
771
772 if (!totlen) {
773 /*
774 * The EIP97 cannot deal with zero length input packets!
775 * So stuff a dummy command descriptor indicating a 1 byte
776 * (dummy) input packet, using the context record as source.
777 */
778 first_cdesc = safexcel_add_cdesc(priv, ring,
779 1, 1, ctx->base.ctxr_dma,
780 1, 1, ctx->base.ctxr_dma,
781 &atoken);
782 if (IS_ERR(first_cdesc)) {
783 /* No space left in the command descriptor ring */
784 ret = PTR_ERR(first_cdesc);
785 goto cdesc_rollback;
786 }
787 n_cdesc = 1;
788 goto skip_cdesc;
789 }
790
791 /* command descriptors */
792 for_each_sg(src, sg, sreq->nr_src, i) {
793 int len = sg_dma_len(sg);
794
795 /* Do not overflow the request */
796 if (queued < len)
797 len = queued;
798
799 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
800 !(queued - len),
801 sg_dma_address(sg), len, totlen,
802 ctx->base.ctxr_dma, &atoken);
803 if (IS_ERR(cdesc)) {
804 /* No space left in the command descriptor ring */
805 ret = PTR_ERR(cdesc);
806 goto cdesc_rollback;
807 }
808
809 if (!n_cdesc)
810 first_cdesc = cdesc;
811
812 n_cdesc++;
813 queued -= len;
814 if (!queued)
815 break;
816 }
817 skip_cdesc:
818 /* Add context control words and token to first command descriptor */
819 safexcel_context_control(ctx, base, sreq, first_cdesc);
820 if (ctx->aead)
821 safexcel_aead_token(ctx, iv, first_cdesc, atoken,
822 sreq->direction, cryptlen,
823 assoclen, digestsize);
824 else
825 safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
826 cryptlen);
827
828 /* result descriptors */
829 for_each_sg(dst, sg, sreq->nr_dst, i) {
830 bool last = (i == sreq->nr_dst - 1);
831 u32 len = sg_dma_len(sg);
832
833 /* only allow the part of the buffer we know we need */
834 if (len > totlen_dst)
835 len = totlen_dst;
836 if (unlikely(!len))
837 break;
838 totlen_dst -= len;
839
840 /* skip over AAD space in buffer - not written */
841 if (assoclen) {
842 if (assoclen >= len) {
843 assoclen -= len;
844 continue;
845 }
846 rdesc = safexcel_add_rdesc(priv, ring, first, last,
847 sg_dma_address(sg) +
848 assoclen,
849 len - assoclen);
850 assoclen = 0;
851 } else {
852 rdesc = safexcel_add_rdesc(priv, ring, first, last,
853 sg_dma_address(sg),
854 len);
855 }
856 if (IS_ERR(rdesc)) {
857 /* No space left in the result descriptor ring */
858 ret = PTR_ERR(rdesc);
859 goto rdesc_rollback;
860 }
861 if (first) {
862 first_rdesc = rdesc;
863 first = false;
864 }
865 n_rdesc++;
866 }
867
868 if (unlikely(first)) {
869 /*
870 * Special case: AEAD decrypt with only AAD data.
871 * In this case there is NO output data from the engine,
872 * but the engine still needs a result descriptor!
873 * Create a dummy one just for catching the result token.
874 */
875 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
876 if (IS_ERR(rdesc)) {
877 /* No space left in the result descriptor ring */
878 ret = PTR_ERR(rdesc);
879 goto rdesc_rollback;
880 }
881 first_rdesc = rdesc;
882 n_rdesc = 1;
883 }
884
885 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
886
887 *commands = n_cdesc;
888 *results = n_rdesc;
889 return 0;
890
891 rdesc_rollback:
892 for (i = 0; i < n_rdesc; i++)
893 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
894 cdesc_rollback:
895 for (i = 0; i < n_cdesc; i++)
896 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
897 unmap:
898 if (src == dst) {
899 if (sreq->nr_src > 0)
900 dma_unmap_sg(priv->dev, src, sreq->nr_src,
901 DMA_BIDIRECTIONAL);
902 } else {
903 if (sreq->nr_src > 0)
904 dma_unmap_sg(priv->dev, src, sreq->nr_src,
905 DMA_TO_DEVICE);
906 if (sreq->nr_dst > 0)
907 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
908 DMA_FROM_DEVICE);
909 }
910
911 return ret;
912 }
913
safexcel_handle_inv_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * base,struct safexcel_cipher_req * sreq,bool * should_complete,int * ret)914 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
915 int ring,
916 struct crypto_async_request *base,
917 struct safexcel_cipher_req *sreq,
918 bool *should_complete, int *ret)
919 {
920 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
921 struct safexcel_result_desc *rdesc;
922 int ndesc = 0, enq_ret;
923
924 *ret = 0;
925
926 if (unlikely(!sreq->rdescs))
927 return 0;
928
929 while (sreq->rdescs--) {
930 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
931 if (IS_ERR(rdesc)) {
932 dev_err(priv->dev,
933 "cipher: invalidate: could not retrieve the result descriptor\n");
934 *ret = PTR_ERR(rdesc);
935 break;
936 }
937
938 if (likely(!*ret))
939 *ret = safexcel_rdesc_check_errors(priv, rdesc);
940
941 ndesc++;
942 }
943
944 safexcel_complete(priv, ring);
945
946 if (ctx->base.exit_inv) {
947 dma_pool_free(priv->context_pool, ctx->base.ctxr,
948 ctx->base.ctxr_dma);
949
950 *should_complete = true;
951
952 return ndesc;
953 }
954
955 ring = safexcel_select_ring(priv);
956 ctx->base.ring = ring;
957
958 spin_lock_bh(&priv->ring[ring].queue_lock);
959 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
960 spin_unlock_bh(&priv->ring[ring].queue_lock);
961
962 if (enq_ret != -EINPROGRESS)
963 *ret = enq_ret;
964
965 queue_work(priv->ring[ring].workqueue,
966 &priv->ring[ring].work_data.work);
967
968 *should_complete = false;
969
970 return ndesc;
971 }
972
safexcel_skcipher_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)973 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
974 int ring,
975 struct crypto_async_request *async,
976 bool *should_complete, int *ret)
977 {
978 struct skcipher_request *req = skcipher_request_cast(async);
979 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
980 int err;
981
982 if (sreq->needs_inv) {
983 sreq->needs_inv = false;
984 err = safexcel_handle_inv_result(priv, ring, async, sreq,
985 should_complete, ret);
986 } else {
987 err = safexcel_handle_req_result(priv, ring, async, req->src,
988 req->dst, req->cryptlen, sreq,
989 should_complete, ret);
990 }
991
992 return err;
993 }
994
safexcel_aead_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)995 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
996 int ring,
997 struct crypto_async_request *async,
998 bool *should_complete, int *ret)
999 {
1000 struct aead_request *req = aead_request_cast(async);
1001 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1002 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1003 int err;
1004
1005 if (sreq->needs_inv) {
1006 sreq->needs_inv = false;
1007 err = safexcel_handle_inv_result(priv, ring, async, sreq,
1008 should_complete, ret);
1009 } else {
1010 err = safexcel_handle_req_result(priv, ring, async, req->src,
1011 req->dst,
1012 req->cryptlen + crypto_aead_authsize(tfm),
1013 sreq, should_complete, ret);
1014 }
1015
1016 return err;
1017 }
1018
safexcel_cipher_send_inv(struct crypto_async_request * base,int ring,int * commands,int * results)1019 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1020 int ring, int *commands, int *results)
1021 {
1022 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1023 struct safexcel_crypto_priv *priv = ctx->base.priv;
1024 int ret;
1025
1026 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1027 if (unlikely(ret))
1028 return ret;
1029
1030 *commands = 1;
1031 *results = 1;
1032
1033 return 0;
1034 }
1035
safexcel_skcipher_send(struct crypto_async_request * async,int ring,int * commands,int * results)1036 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1037 int *commands, int *results)
1038 {
1039 struct skcipher_request *req = skcipher_request_cast(async);
1040 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1041 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1042 struct safexcel_crypto_priv *priv = ctx->base.priv;
1043 int ret;
1044
1045 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1046
1047 if (sreq->needs_inv) {
1048 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1049 } else {
1050 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1051 u8 input_iv[AES_BLOCK_SIZE];
1052
1053 /*
1054 * Save input IV in case of CBC decrypt mode
1055 * Will be overwritten with output IV prior to use!
1056 */
1057 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1058
1059 ret = safexcel_send_req(async, ring, sreq, req->src,
1060 req->dst, req->cryptlen, 0, 0, input_iv,
1061 commands, results);
1062 }
1063
1064 sreq->rdescs = *results;
1065 return ret;
1066 }
1067
safexcel_aead_send(struct crypto_async_request * async,int ring,int * commands,int * results)1068 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1069 int *commands, int *results)
1070 {
1071 struct aead_request *req = aead_request_cast(async);
1072 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1073 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1074 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1075 struct safexcel_crypto_priv *priv = ctx->base.priv;
1076 int ret;
1077
1078 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1079
1080 if (sreq->needs_inv)
1081 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1082 else
1083 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1084 req->cryptlen, req->assoclen,
1085 crypto_aead_authsize(tfm), req->iv,
1086 commands, results);
1087 sreq->rdescs = *results;
1088 return ret;
1089 }
1090
safexcel_cipher_exit_inv(struct crypto_tfm * tfm,struct crypto_async_request * base,struct safexcel_cipher_req * sreq,struct safexcel_inv_result * result)1091 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1092 struct crypto_async_request *base,
1093 struct safexcel_cipher_req *sreq,
1094 struct safexcel_inv_result *result)
1095 {
1096 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1097 struct safexcel_crypto_priv *priv = ctx->base.priv;
1098 int ring = ctx->base.ring;
1099
1100 init_completion(&result->completion);
1101
1102 ctx = crypto_tfm_ctx(base->tfm);
1103 ctx->base.exit_inv = true;
1104 sreq->needs_inv = true;
1105
1106 spin_lock_bh(&priv->ring[ring].queue_lock);
1107 crypto_enqueue_request(&priv->ring[ring].queue, base);
1108 spin_unlock_bh(&priv->ring[ring].queue_lock);
1109
1110 queue_work(priv->ring[ring].workqueue,
1111 &priv->ring[ring].work_data.work);
1112
1113 wait_for_completion(&result->completion);
1114
1115 if (result->error) {
1116 dev_warn(priv->dev,
1117 "cipher: sync: invalidate: completion error %d\n",
1118 result->error);
1119 return result->error;
1120 }
1121
1122 return 0;
1123 }
1124
safexcel_skcipher_exit_inv(struct crypto_tfm * tfm)1125 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1126 {
1127 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1128 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1129 struct safexcel_inv_result result = {};
1130
1131 memset(req, 0, sizeof(struct skcipher_request));
1132
1133 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1134 safexcel_inv_complete, &result);
1135 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1136
1137 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1138 }
1139
safexcel_aead_exit_inv(struct crypto_tfm * tfm)1140 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1141 {
1142 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1143 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1144 struct safexcel_inv_result result = {};
1145
1146 memset(req, 0, sizeof(struct aead_request));
1147
1148 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1149 safexcel_inv_complete, &result);
1150 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1151
1152 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1153 }
1154
safexcel_queue_req(struct crypto_async_request * base,struct safexcel_cipher_req * sreq,enum safexcel_cipher_direction dir)1155 static int safexcel_queue_req(struct crypto_async_request *base,
1156 struct safexcel_cipher_req *sreq,
1157 enum safexcel_cipher_direction dir)
1158 {
1159 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1160 struct safexcel_crypto_priv *priv = ctx->base.priv;
1161 int ret, ring;
1162
1163 sreq->needs_inv = false;
1164 sreq->direction = dir;
1165
1166 if (ctx->base.ctxr) {
1167 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1168 sreq->needs_inv = true;
1169 ctx->base.needs_inv = false;
1170 }
1171 } else {
1172 ctx->base.ring = safexcel_select_ring(priv);
1173 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1174 EIP197_GFP_FLAGS(*base),
1175 &ctx->base.ctxr_dma);
1176 if (!ctx->base.ctxr)
1177 return -ENOMEM;
1178 }
1179
1180 ring = ctx->base.ring;
1181
1182 spin_lock_bh(&priv->ring[ring].queue_lock);
1183 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1184 spin_unlock_bh(&priv->ring[ring].queue_lock);
1185
1186 queue_work(priv->ring[ring].workqueue,
1187 &priv->ring[ring].work_data.work);
1188
1189 return ret;
1190 }
1191
safexcel_encrypt(struct skcipher_request * req)1192 static int safexcel_encrypt(struct skcipher_request *req)
1193 {
1194 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1195 SAFEXCEL_ENCRYPT);
1196 }
1197
safexcel_decrypt(struct skcipher_request * req)1198 static int safexcel_decrypt(struct skcipher_request *req)
1199 {
1200 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1201 SAFEXCEL_DECRYPT);
1202 }
1203
safexcel_skcipher_cra_init(struct crypto_tfm * tfm)1204 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1205 {
1206 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1207 struct safexcel_alg_template *tmpl =
1208 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1209 alg.skcipher.base);
1210
1211 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1212 sizeof(struct safexcel_cipher_req));
1213
1214 ctx->base.priv = tmpl->priv;
1215
1216 ctx->base.send = safexcel_skcipher_send;
1217 ctx->base.handle_result = safexcel_skcipher_handle_result;
1218 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1219 ctx->ctrinit = 1;
1220 return 0;
1221 }
1222
safexcel_cipher_cra_exit(struct crypto_tfm * tfm)1223 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1224 {
1225 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1226
1227 memzero_explicit(ctx->key, sizeof(ctx->key));
1228
1229 /* context not allocated, skip invalidation */
1230 if (!ctx->base.ctxr)
1231 return -ENOMEM;
1232
1233 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1234 return 0;
1235 }
1236
safexcel_skcipher_cra_exit(struct crypto_tfm * tfm)1237 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1238 {
1239 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1240 struct safexcel_crypto_priv *priv = ctx->base.priv;
1241 int ret;
1242
1243 if (safexcel_cipher_cra_exit(tfm))
1244 return;
1245
1246 if (priv->flags & EIP197_TRC_CACHE) {
1247 ret = safexcel_skcipher_exit_inv(tfm);
1248 if (ret)
1249 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1250 ret);
1251 } else {
1252 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1253 ctx->base.ctxr_dma);
1254 }
1255 }
1256
safexcel_aead_cra_exit(struct crypto_tfm * tfm)1257 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1258 {
1259 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1260 struct safexcel_crypto_priv *priv = ctx->base.priv;
1261 int ret;
1262
1263 if (safexcel_cipher_cra_exit(tfm))
1264 return;
1265
1266 if (priv->flags & EIP197_TRC_CACHE) {
1267 ret = safexcel_aead_exit_inv(tfm);
1268 if (ret)
1269 dev_warn(priv->dev, "aead: invalidation error %d\n",
1270 ret);
1271 } else {
1272 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1273 ctx->base.ctxr_dma);
1274 }
1275 }
1276
safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm * tfm)1277 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1278 {
1279 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1280
1281 safexcel_skcipher_cra_init(tfm);
1282 ctx->alg = SAFEXCEL_AES;
1283 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1284 ctx->blocksz = 0;
1285 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1286 return 0;
1287 }
1288
1289 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1290 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1291 .algo_mask = SAFEXCEL_ALG_AES,
1292 .alg.skcipher = {
1293 .setkey = safexcel_skcipher_aes_setkey,
1294 .encrypt = safexcel_encrypt,
1295 .decrypt = safexcel_decrypt,
1296 .min_keysize = AES_MIN_KEY_SIZE,
1297 .max_keysize = AES_MAX_KEY_SIZE,
1298 .base = {
1299 .cra_name = "ecb(aes)",
1300 .cra_driver_name = "safexcel-ecb-aes",
1301 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1302 .cra_flags = CRYPTO_ALG_ASYNC |
1303 CRYPTO_ALG_ALLOCATES_MEMORY |
1304 CRYPTO_ALG_KERN_DRIVER_ONLY,
1305 .cra_blocksize = AES_BLOCK_SIZE,
1306 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1307 .cra_alignmask = 0,
1308 .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1309 .cra_exit = safexcel_skcipher_cra_exit,
1310 .cra_module = THIS_MODULE,
1311 },
1312 },
1313 };
1314
safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm * tfm)1315 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1316 {
1317 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1318
1319 safexcel_skcipher_cra_init(tfm);
1320 ctx->alg = SAFEXCEL_AES;
1321 ctx->blocksz = AES_BLOCK_SIZE;
1322 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1323 return 0;
1324 }
1325
1326 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1327 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1328 .algo_mask = SAFEXCEL_ALG_AES,
1329 .alg.skcipher = {
1330 .setkey = safexcel_skcipher_aes_setkey,
1331 .encrypt = safexcel_encrypt,
1332 .decrypt = safexcel_decrypt,
1333 .min_keysize = AES_MIN_KEY_SIZE,
1334 .max_keysize = AES_MAX_KEY_SIZE,
1335 .ivsize = AES_BLOCK_SIZE,
1336 .base = {
1337 .cra_name = "cbc(aes)",
1338 .cra_driver_name = "safexcel-cbc-aes",
1339 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1340 .cra_flags = CRYPTO_ALG_ASYNC |
1341 CRYPTO_ALG_ALLOCATES_MEMORY |
1342 CRYPTO_ALG_KERN_DRIVER_ONLY,
1343 .cra_blocksize = AES_BLOCK_SIZE,
1344 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1345 .cra_alignmask = 0,
1346 .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1347 .cra_exit = safexcel_skcipher_cra_exit,
1348 .cra_module = THIS_MODULE,
1349 },
1350 },
1351 };
1352
safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm * tfm)1353 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1354 {
1355 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1356
1357 safexcel_skcipher_cra_init(tfm);
1358 ctx->alg = SAFEXCEL_AES;
1359 ctx->blocksz = AES_BLOCK_SIZE;
1360 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1361 return 0;
1362 }
1363
1364 struct safexcel_alg_template safexcel_alg_cfb_aes = {
1365 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1366 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1367 .alg.skcipher = {
1368 .setkey = safexcel_skcipher_aes_setkey,
1369 .encrypt = safexcel_encrypt,
1370 .decrypt = safexcel_decrypt,
1371 .min_keysize = AES_MIN_KEY_SIZE,
1372 .max_keysize = AES_MAX_KEY_SIZE,
1373 .ivsize = AES_BLOCK_SIZE,
1374 .base = {
1375 .cra_name = "cfb(aes)",
1376 .cra_driver_name = "safexcel-cfb-aes",
1377 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1378 .cra_flags = CRYPTO_ALG_ASYNC |
1379 CRYPTO_ALG_ALLOCATES_MEMORY |
1380 CRYPTO_ALG_KERN_DRIVER_ONLY,
1381 .cra_blocksize = 1,
1382 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1383 .cra_alignmask = 0,
1384 .cra_init = safexcel_skcipher_aes_cfb_cra_init,
1385 .cra_exit = safexcel_skcipher_cra_exit,
1386 .cra_module = THIS_MODULE,
1387 },
1388 },
1389 };
1390
safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm * tfm)1391 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1392 {
1393 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1394
1395 safexcel_skcipher_cra_init(tfm);
1396 ctx->alg = SAFEXCEL_AES;
1397 ctx->blocksz = AES_BLOCK_SIZE;
1398 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1399 return 0;
1400 }
1401
1402 struct safexcel_alg_template safexcel_alg_ofb_aes = {
1403 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1404 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1405 .alg.skcipher = {
1406 .setkey = safexcel_skcipher_aes_setkey,
1407 .encrypt = safexcel_encrypt,
1408 .decrypt = safexcel_decrypt,
1409 .min_keysize = AES_MIN_KEY_SIZE,
1410 .max_keysize = AES_MAX_KEY_SIZE,
1411 .ivsize = AES_BLOCK_SIZE,
1412 .base = {
1413 .cra_name = "ofb(aes)",
1414 .cra_driver_name = "safexcel-ofb-aes",
1415 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1416 .cra_flags = CRYPTO_ALG_ASYNC |
1417 CRYPTO_ALG_ALLOCATES_MEMORY |
1418 CRYPTO_ALG_KERN_DRIVER_ONLY,
1419 .cra_blocksize = 1,
1420 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1421 .cra_alignmask = 0,
1422 .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1423 .cra_exit = safexcel_skcipher_cra_exit,
1424 .cra_module = THIS_MODULE,
1425 },
1426 },
1427 };
1428
safexcel_skcipher_aesctr_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1429 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1430 const u8 *key, unsigned int len)
1431 {
1432 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1433 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1434 struct safexcel_crypto_priv *priv = ctx->base.priv;
1435 struct crypto_aes_ctx aes;
1436 int ret, i;
1437 unsigned int keylen;
1438
1439 /* last 4 bytes of key are the nonce! */
1440 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1441 /* exclude the nonce here */
1442 keylen = len - CTR_RFC3686_NONCE_SIZE;
1443 ret = aes_expandkey(&aes, key, keylen);
1444 if (ret)
1445 return ret;
1446
1447 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1448 for (i = 0; i < keylen / sizeof(u32); i++) {
1449 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1450 ctx->base.needs_inv = true;
1451 break;
1452 }
1453 }
1454 }
1455
1456 for (i = 0; i < keylen / sizeof(u32); i++)
1457 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1458
1459 ctx->key_len = keylen;
1460
1461 memzero_explicit(&aes, sizeof(aes));
1462 return 0;
1463 }
1464
safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm * tfm)1465 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1466 {
1467 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1468
1469 safexcel_skcipher_cra_init(tfm);
1470 ctx->alg = SAFEXCEL_AES;
1471 ctx->blocksz = AES_BLOCK_SIZE;
1472 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1473 return 0;
1474 }
1475
1476 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1477 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1478 .algo_mask = SAFEXCEL_ALG_AES,
1479 .alg.skcipher = {
1480 .setkey = safexcel_skcipher_aesctr_setkey,
1481 .encrypt = safexcel_encrypt,
1482 .decrypt = safexcel_decrypt,
1483 /* Add nonce size */
1484 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1485 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1486 .ivsize = CTR_RFC3686_IV_SIZE,
1487 .base = {
1488 .cra_name = "rfc3686(ctr(aes))",
1489 .cra_driver_name = "safexcel-ctr-aes",
1490 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1491 .cra_flags = CRYPTO_ALG_ASYNC |
1492 CRYPTO_ALG_ALLOCATES_MEMORY |
1493 CRYPTO_ALG_KERN_DRIVER_ONLY,
1494 .cra_blocksize = 1,
1495 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1496 .cra_alignmask = 0,
1497 .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1498 .cra_exit = safexcel_skcipher_cra_exit,
1499 .cra_module = THIS_MODULE,
1500 },
1501 },
1502 };
1503
safexcel_des_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1504 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1505 unsigned int len)
1506 {
1507 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1508 struct safexcel_crypto_priv *priv = ctx->base.priv;
1509 int ret;
1510
1511 ret = verify_skcipher_des_key(ctfm, key);
1512 if (ret)
1513 return ret;
1514
1515 /* if context exits and key changed, need to invalidate it */
1516 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1517 if (memcmp(ctx->key, key, len))
1518 ctx->base.needs_inv = true;
1519
1520 memcpy(ctx->key, key, len);
1521 ctx->key_len = len;
1522
1523 return 0;
1524 }
1525
safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm * tfm)1526 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1527 {
1528 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1529
1530 safexcel_skcipher_cra_init(tfm);
1531 ctx->alg = SAFEXCEL_DES;
1532 ctx->blocksz = DES_BLOCK_SIZE;
1533 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1534 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1535 return 0;
1536 }
1537
1538 struct safexcel_alg_template safexcel_alg_cbc_des = {
1539 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1540 .algo_mask = SAFEXCEL_ALG_DES,
1541 .alg.skcipher = {
1542 .setkey = safexcel_des_setkey,
1543 .encrypt = safexcel_encrypt,
1544 .decrypt = safexcel_decrypt,
1545 .min_keysize = DES_KEY_SIZE,
1546 .max_keysize = DES_KEY_SIZE,
1547 .ivsize = DES_BLOCK_SIZE,
1548 .base = {
1549 .cra_name = "cbc(des)",
1550 .cra_driver_name = "safexcel-cbc-des",
1551 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1552 .cra_flags = CRYPTO_ALG_ASYNC |
1553 CRYPTO_ALG_ALLOCATES_MEMORY |
1554 CRYPTO_ALG_KERN_DRIVER_ONLY,
1555 .cra_blocksize = DES_BLOCK_SIZE,
1556 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1557 .cra_alignmask = 0,
1558 .cra_init = safexcel_skcipher_des_cbc_cra_init,
1559 .cra_exit = safexcel_skcipher_cra_exit,
1560 .cra_module = THIS_MODULE,
1561 },
1562 },
1563 };
1564
safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm * tfm)1565 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1566 {
1567 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1568
1569 safexcel_skcipher_cra_init(tfm);
1570 ctx->alg = SAFEXCEL_DES;
1571 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1572 ctx->blocksz = 0;
1573 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1574 return 0;
1575 }
1576
1577 struct safexcel_alg_template safexcel_alg_ecb_des = {
1578 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1579 .algo_mask = SAFEXCEL_ALG_DES,
1580 .alg.skcipher = {
1581 .setkey = safexcel_des_setkey,
1582 .encrypt = safexcel_encrypt,
1583 .decrypt = safexcel_decrypt,
1584 .min_keysize = DES_KEY_SIZE,
1585 .max_keysize = DES_KEY_SIZE,
1586 .base = {
1587 .cra_name = "ecb(des)",
1588 .cra_driver_name = "safexcel-ecb-des",
1589 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1590 .cra_flags = CRYPTO_ALG_ASYNC |
1591 CRYPTO_ALG_ALLOCATES_MEMORY |
1592 CRYPTO_ALG_KERN_DRIVER_ONLY,
1593 .cra_blocksize = DES_BLOCK_SIZE,
1594 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1595 .cra_alignmask = 0,
1596 .cra_init = safexcel_skcipher_des_ecb_cra_init,
1597 .cra_exit = safexcel_skcipher_cra_exit,
1598 .cra_module = THIS_MODULE,
1599 },
1600 },
1601 };
1602
safexcel_des3_ede_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1603 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1604 const u8 *key, unsigned int len)
1605 {
1606 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1607 struct safexcel_crypto_priv *priv = ctx->base.priv;
1608 int err;
1609
1610 err = verify_skcipher_des3_key(ctfm, key);
1611 if (err)
1612 return err;
1613
1614 /* if context exits and key changed, need to invalidate it */
1615 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1616 if (memcmp(ctx->key, key, len))
1617 ctx->base.needs_inv = true;
1618
1619 memcpy(ctx->key, key, len);
1620 ctx->key_len = len;
1621
1622 return 0;
1623 }
1624
safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm * tfm)1625 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1626 {
1627 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1628
1629 safexcel_skcipher_cra_init(tfm);
1630 ctx->alg = SAFEXCEL_3DES;
1631 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1632 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1633 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1634 return 0;
1635 }
1636
1637 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1638 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1639 .algo_mask = SAFEXCEL_ALG_DES,
1640 .alg.skcipher = {
1641 .setkey = safexcel_des3_ede_setkey,
1642 .encrypt = safexcel_encrypt,
1643 .decrypt = safexcel_decrypt,
1644 .min_keysize = DES3_EDE_KEY_SIZE,
1645 .max_keysize = DES3_EDE_KEY_SIZE,
1646 .ivsize = DES3_EDE_BLOCK_SIZE,
1647 .base = {
1648 .cra_name = "cbc(des3_ede)",
1649 .cra_driver_name = "safexcel-cbc-des3_ede",
1650 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1651 .cra_flags = CRYPTO_ALG_ASYNC |
1652 CRYPTO_ALG_ALLOCATES_MEMORY |
1653 CRYPTO_ALG_KERN_DRIVER_ONLY,
1654 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1655 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1656 .cra_alignmask = 0,
1657 .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1658 .cra_exit = safexcel_skcipher_cra_exit,
1659 .cra_module = THIS_MODULE,
1660 },
1661 },
1662 };
1663
safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm * tfm)1664 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1665 {
1666 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1667
1668 safexcel_skcipher_cra_init(tfm);
1669 ctx->alg = SAFEXCEL_3DES;
1670 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1671 ctx->blocksz = 0;
1672 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1673 return 0;
1674 }
1675
1676 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1677 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1678 .algo_mask = SAFEXCEL_ALG_DES,
1679 .alg.skcipher = {
1680 .setkey = safexcel_des3_ede_setkey,
1681 .encrypt = safexcel_encrypt,
1682 .decrypt = safexcel_decrypt,
1683 .min_keysize = DES3_EDE_KEY_SIZE,
1684 .max_keysize = DES3_EDE_KEY_SIZE,
1685 .base = {
1686 .cra_name = "ecb(des3_ede)",
1687 .cra_driver_name = "safexcel-ecb-des3_ede",
1688 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1689 .cra_flags = CRYPTO_ALG_ASYNC |
1690 CRYPTO_ALG_ALLOCATES_MEMORY |
1691 CRYPTO_ALG_KERN_DRIVER_ONLY,
1692 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1693 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1694 .cra_alignmask = 0,
1695 .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1696 .cra_exit = safexcel_skcipher_cra_exit,
1697 .cra_module = THIS_MODULE,
1698 },
1699 },
1700 };
1701
safexcel_aead_encrypt(struct aead_request * req)1702 static int safexcel_aead_encrypt(struct aead_request *req)
1703 {
1704 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1705
1706 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1707 }
1708
safexcel_aead_decrypt(struct aead_request * req)1709 static int safexcel_aead_decrypt(struct aead_request *req)
1710 {
1711 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1712
1713 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1714 }
1715
safexcel_aead_cra_init(struct crypto_tfm * tfm)1716 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1717 {
1718 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1719 struct safexcel_alg_template *tmpl =
1720 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1721 alg.aead.base);
1722
1723 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1724 sizeof(struct safexcel_cipher_req));
1725
1726 ctx->base.priv = tmpl->priv;
1727
1728 ctx->alg = SAFEXCEL_AES; /* default */
1729 ctx->blocksz = AES_BLOCK_SIZE;
1730 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1731 ctx->ctrinit = 1;
1732 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1733 ctx->aead = true;
1734 ctx->base.send = safexcel_aead_send;
1735 ctx->base.handle_result = safexcel_aead_handle_result;
1736 return 0;
1737 }
1738
safexcel_aead_sha1_cra_init(struct crypto_tfm * tfm)1739 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1740 {
1741 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1742
1743 safexcel_aead_cra_init(tfm);
1744 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1745 ctx->state_sz = SHA1_DIGEST_SIZE;
1746 return 0;
1747 }
1748
1749 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1750 .type = SAFEXCEL_ALG_TYPE_AEAD,
1751 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1752 .alg.aead = {
1753 .setkey = safexcel_aead_setkey,
1754 .encrypt = safexcel_aead_encrypt,
1755 .decrypt = safexcel_aead_decrypt,
1756 .ivsize = AES_BLOCK_SIZE,
1757 .maxauthsize = SHA1_DIGEST_SIZE,
1758 .base = {
1759 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1760 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1761 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1762 .cra_flags = CRYPTO_ALG_ASYNC |
1763 CRYPTO_ALG_ALLOCATES_MEMORY |
1764 CRYPTO_ALG_KERN_DRIVER_ONLY,
1765 .cra_blocksize = AES_BLOCK_SIZE,
1766 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1767 .cra_alignmask = 0,
1768 .cra_init = safexcel_aead_sha1_cra_init,
1769 .cra_exit = safexcel_aead_cra_exit,
1770 .cra_module = THIS_MODULE,
1771 },
1772 },
1773 };
1774
safexcel_aead_sha256_cra_init(struct crypto_tfm * tfm)1775 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1776 {
1777 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1778
1779 safexcel_aead_cra_init(tfm);
1780 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1781 ctx->state_sz = SHA256_DIGEST_SIZE;
1782 return 0;
1783 }
1784
1785 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1786 .type = SAFEXCEL_ALG_TYPE_AEAD,
1787 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1788 .alg.aead = {
1789 .setkey = safexcel_aead_setkey,
1790 .encrypt = safexcel_aead_encrypt,
1791 .decrypt = safexcel_aead_decrypt,
1792 .ivsize = AES_BLOCK_SIZE,
1793 .maxauthsize = SHA256_DIGEST_SIZE,
1794 .base = {
1795 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1796 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1797 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1798 .cra_flags = CRYPTO_ALG_ASYNC |
1799 CRYPTO_ALG_ALLOCATES_MEMORY |
1800 CRYPTO_ALG_KERN_DRIVER_ONLY,
1801 .cra_blocksize = AES_BLOCK_SIZE,
1802 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1803 .cra_alignmask = 0,
1804 .cra_init = safexcel_aead_sha256_cra_init,
1805 .cra_exit = safexcel_aead_cra_exit,
1806 .cra_module = THIS_MODULE,
1807 },
1808 },
1809 };
1810
safexcel_aead_sha224_cra_init(struct crypto_tfm * tfm)1811 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1812 {
1813 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1814
1815 safexcel_aead_cra_init(tfm);
1816 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1817 ctx->state_sz = SHA256_DIGEST_SIZE;
1818 return 0;
1819 }
1820
1821 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1822 .type = SAFEXCEL_ALG_TYPE_AEAD,
1823 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1824 .alg.aead = {
1825 .setkey = safexcel_aead_setkey,
1826 .encrypt = safexcel_aead_encrypt,
1827 .decrypt = safexcel_aead_decrypt,
1828 .ivsize = AES_BLOCK_SIZE,
1829 .maxauthsize = SHA224_DIGEST_SIZE,
1830 .base = {
1831 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1832 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1833 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1834 .cra_flags = CRYPTO_ALG_ASYNC |
1835 CRYPTO_ALG_ALLOCATES_MEMORY |
1836 CRYPTO_ALG_KERN_DRIVER_ONLY,
1837 .cra_blocksize = AES_BLOCK_SIZE,
1838 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1839 .cra_alignmask = 0,
1840 .cra_init = safexcel_aead_sha224_cra_init,
1841 .cra_exit = safexcel_aead_cra_exit,
1842 .cra_module = THIS_MODULE,
1843 },
1844 },
1845 };
1846
safexcel_aead_sha512_cra_init(struct crypto_tfm * tfm)1847 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1848 {
1849 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1850
1851 safexcel_aead_cra_init(tfm);
1852 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1853 ctx->state_sz = SHA512_DIGEST_SIZE;
1854 return 0;
1855 }
1856
1857 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1858 .type = SAFEXCEL_ALG_TYPE_AEAD,
1859 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1860 .alg.aead = {
1861 .setkey = safexcel_aead_setkey,
1862 .encrypt = safexcel_aead_encrypt,
1863 .decrypt = safexcel_aead_decrypt,
1864 .ivsize = AES_BLOCK_SIZE,
1865 .maxauthsize = SHA512_DIGEST_SIZE,
1866 .base = {
1867 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1868 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1869 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1870 .cra_flags = CRYPTO_ALG_ASYNC |
1871 CRYPTO_ALG_ALLOCATES_MEMORY |
1872 CRYPTO_ALG_KERN_DRIVER_ONLY,
1873 .cra_blocksize = AES_BLOCK_SIZE,
1874 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1875 .cra_alignmask = 0,
1876 .cra_init = safexcel_aead_sha512_cra_init,
1877 .cra_exit = safexcel_aead_cra_exit,
1878 .cra_module = THIS_MODULE,
1879 },
1880 },
1881 };
1882
safexcel_aead_sha384_cra_init(struct crypto_tfm * tfm)1883 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1884 {
1885 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1886
1887 safexcel_aead_cra_init(tfm);
1888 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1889 ctx->state_sz = SHA512_DIGEST_SIZE;
1890 return 0;
1891 }
1892
1893 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1894 .type = SAFEXCEL_ALG_TYPE_AEAD,
1895 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1896 .alg.aead = {
1897 .setkey = safexcel_aead_setkey,
1898 .encrypt = safexcel_aead_encrypt,
1899 .decrypt = safexcel_aead_decrypt,
1900 .ivsize = AES_BLOCK_SIZE,
1901 .maxauthsize = SHA384_DIGEST_SIZE,
1902 .base = {
1903 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1904 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1905 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1906 .cra_flags = CRYPTO_ALG_ASYNC |
1907 CRYPTO_ALG_ALLOCATES_MEMORY |
1908 CRYPTO_ALG_KERN_DRIVER_ONLY,
1909 .cra_blocksize = AES_BLOCK_SIZE,
1910 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1911 .cra_alignmask = 0,
1912 .cra_init = safexcel_aead_sha384_cra_init,
1913 .cra_exit = safexcel_aead_cra_exit,
1914 .cra_module = THIS_MODULE,
1915 },
1916 },
1917 };
1918
safexcel_aead_sha1_des3_cra_init(struct crypto_tfm * tfm)1919 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1920 {
1921 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1922
1923 safexcel_aead_sha1_cra_init(tfm);
1924 ctx->alg = SAFEXCEL_3DES; /* override default */
1925 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1926 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1927 return 0;
1928 }
1929
1930 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1931 .type = SAFEXCEL_ALG_TYPE_AEAD,
1932 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1933 .alg.aead = {
1934 .setkey = safexcel_aead_setkey,
1935 .encrypt = safexcel_aead_encrypt,
1936 .decrypt = safexcel_aead_decrypt,
1937 .ivsize = DES3_EDE_BLOCK_SIZE,
1938 .maxauthsize = SHA1_DIGEST_SIZE,
1939 .base = {
1940 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1941 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1942 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1943 .cra_flags = CRYPTO_ALG_ASYNC |
1944 CRYPTO_ALG_ALLOCATES_MEMORY |
1945 CRYPTO_ALG_KERN_DRIVER_ONLY,
1946 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1947 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1948 .cra_alignmask = 0,
1949 .cra_init = safexcel_aead_sha1_des3_cra_init,
1950 .cra_exit = safexcel_aead_cra_exit,
1951 .cra_module = THIS_MODULE,
1952 },
1953 },
1954 };
1955
safexcel_aead_sha256_des3_cra_init(struct crypto_tfm * tfm)1956 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1957 {
1958 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1959
1960 safexcel_aead_sha256_cra_init(tfm);
1961 ctx->alg = SAFEXCEL_3DES; /* override default */
1962 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1963 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1964 return 0;
1965 }
1966
1967 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1968 .type = SAFEXCEL_ALG_TYPE_AEAD,
1969 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1970 .alg.aead = {
1971 .setkey = safexcel_aead_setkey,
1972 .encrypt = safexcel_aead_encrypt,
1973 .decrypt = safexcel_aead_decrypt,
1974 .ivsize = DES3_EDE_BLOCK_SIZE,
1975 .maxauthsize = SHA256_DIGEST_SIZE,
1976 .base = {
1977 .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1978 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1979 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1980 .cra_flags = CRYPTO_ALG_ASYNC |
1981 CRYPTO_ALG_ALLOCATES_MEMORY |
1982 CRYPTO_ALG_KERN_DRIVER_ONLY,
1983 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1984 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1985 .cra_alignmask = 0,
1986 .cra_init = safexcel_aead_sha256_des3_cra_init,
1987 .cra_exit = safexcel_aead_cra_exit,
1988 .cra_module = THIS_MODULE,
1989 },
1990 },
1991 };
1992
safexcel_aead_sha224_des3_cra_init(struct crypto_tfm * tfm)1993 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1994 {
1995 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1996
1997 safexcel_aead_sha224_cra_init(tfm);
1998 ctx->alg = SAFEXCEL_3DES; /* override default */
1999 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2000 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2001 return 0;
2002 }
2003
2004 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
2005 .type = SAFEXCEL_ALG_TYPE_AEAD,
2006 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2007 .alg.aead = {
2008 .setkey = safexcel_aead_setkey,
2009 .encrypt = safexcel_aead_encrypt,
2010 .decrypt = safexcel_aead_decrypt,
2011 .ivsize = DES3_EDE_BLOCK_SIZE,
2012 .maxauthsize = SHA224_DIGEST_SIZE,
2013 .base = {
2014 .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
2015 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
2016 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2017 .cra_flags = CRYPTO_ALG_ASYNC |
2018 CRYPTO_ALG_ALLOCATES_MEMORY |
2019 CRYPTO_ALG_KERN_DRIVER_ONLY,
2020 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2021 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2022 .cra_alignmask = 0,
2023 .cra_init = safexcel_aead_sha224_des3_cra_init,
2024 .cra_exit = safexcel_aead_cra_exit,
2025 .cra_module = THIS_MODULE,
2026 },
2027 },
2028 };
2029
safexcel_aead_sha512_des3_cra_init(struct crypto_tfm * tfm)2030 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2031 {
2032 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2033
2034 safexcel_aead_sha512_cra_init(tfm);
2035 ctx->alg = SAFEXCEL_3DES; /* override default */
2036 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2037 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2038 return 0;
2039 }
2040
2041 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2042 .type = SAFEXCEL_ALG_TYPE_AEAD,
2043 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2044 .alg.aead = {
2045 .setkey = safexcel_aead_setkey,
2046 .encrypt = safexcel_aead_encrypt,
2047 .decrypt = safexcel_aead_decrypt,
2048 .ivsize = DES3_EDE_BLOCK_SIZE,
2049 .maxauthsize = SHA512_DIGEST_SIZE,
2050 .base = {
2051 .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2052 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2053 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2054 .cra_flags = CRYPTO_ALG_ASYNC |
2055 CRYPTO_ALG_ALLOCATES_MEMORY |
2056 CRYPTO_ALG_KERN_DRIVER_ONLY,
2057 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2058 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2059 .cra_alignmask = 0,
2060 .cra_init = safexcel_aead_sha512_des3_cra_init,
2061 .cra_exit = safexcel_aead_cra_exit,
2062 .cra_module = THIS_MODULE,
2063 },
2064 },
2065 };
2066
safexcel_aead_sha384_des3_cra_init(struct crypto_tfm * tfm)2067 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2068 {
2069 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2070
2071 safexcel_aead_sha384_cra_init(tfm);
2072 ctx->alg = SAFEXCEL_3DES; /* override default */
2073 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2074 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2075 return 0;
2076 }
2077
2078 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2079 .type = SAFEXCEL_ALG_TYPE_AEAD,
2080 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2081 .alg.aead = {
2082 .setkey = safexcel_aead_setkey,
2083 .encrypt = safexcel_aead_encrypt,
2084 .decrypt = safexcel_aead_decrypt,
2085 .ivsize = DES3_EDE_BLOCK_SIZE,
2086 .maxauthsize = SHA384_DIGEST_SIZE,
2087 .base = {
2088 .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2089 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2090 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2091 .cra_flags = CRYPTO_ALG_ASYNC |
2092 CRYPTO_ALG_ALLOCATES_MEMORY |
2093 CRYPTO_ALG_KERN_DRIVER_ONLY,
2094 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2095 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2096 .cra_alignmask = 0,
2097 .cra_init = safexcel_aead_sha384_des3_cra_init,
2098 .cra_exit = safexcel_aead_cra_exit,
2099 .cra_module = THIS_MODULE,
2100 },
2101 },
2102 };
2103
safexcel_aead_sha1_des_cra_init(struct crypto_tfm * tfm)2104 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2105 {
2106 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2107
2108 safexcel_aead_sha1_cra_init(tfm);
2109 ctx->alg = SAFEXCEL_DES; /* override default */
2110 ctx->blocksz = DES_BLOCK_SIZE;
2111 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2112 return 0;
2113 }
2114
2115 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2116 .type = SAFEXCEL_ALG_TYPE_AEAD,
2117 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2118 .alg.aead = {
2119 .setkey = safexcel_aead_setkey,
2120 .encrypt = safexcel_aead_encrypt,
2121 .decrypt = safexcel_aead_decrypt,
2122 .ivsize = DES_BLOCK_SIZE,
2123 .maxauthsize = SHA1_DIGEST_SIZE,
2124 .base = {
2125 .cra_name = "authenc(hmac(sha1),cbc(des))",
2126 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2127 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2128 .cra_flags = CRYPTO_ALG_ASYNC |
2129 CRYPTO_ALG_ALLOCATES_MEMORY |
2130 CRYPTO_ALG_KERN_DRIVER_ONLY,
2131 .cra_blocksize = DES_BLOCK_SIZE,
2132 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2133 .cra_alignmask = 0,
2134 .cra_init = safexcel_aead_sha1_des_cra_init,
2135 .cra_exit = safexcel_aead_cra_exit,
2136 .cra_module = THIS_MODULE,
2137 },
2138 },
2139 };
2140
safexcel_aead_sha256_des_cra_init(struct crypto_tfm * tfm)2141 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2142 {
2143 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2144
2145 safexcel_aead_sha256_cra_init(tfm);
2146 ctx->alg = SAFEXCEL_DES; /* override default */
2147 ctx->blocksz = DES_BLOCK_SIZE;
2148 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2149 return 0;
2150 }
2151
2152 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2153 .type = SAFEXCEL_ALG_TYPE_AEAD,
2154 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2155 .alg.aead = {
2156 .setkey = safexcel_aead_setkey,
2157 .encrypt = safexcel_aead_encrypt,
2158 .decrypt = safexcel_aead_decrypt,
2159 .ivsize = DES_BLOCK_SIZE,
2160 .maxauthsize = SHA256_DIGEST_SIZE,
2161 .base = {
2162 .cra_name = "authenc(hmac(sha256),cbc(des))",
2163 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2164 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2165 .cra_flags = CRYPTO_ALG_ASYNC |
2166 CRYPTO_ALG_ALLOCATES_MEMORY |
2167 CRYPTO_ALG_KERN_DRIVER_ONLY,
2168 .cra_blocksize = DES_BLOCK_SIZE,
2169 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2170 .cra_alignmask = 0,
2171 .cra_init = safexcel_aead_sha256_des_cra_init,
2172 .cra_exit = safexcel_aead_cra_exit,
2173 .cra_module = THIS_MODULE,
2174 },
2175 },
2176 };
2177
safexcel_aead_sha224_des_cra_init(struct crypto_tfm * tfm)2178 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2179 {
2180 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2181
2182 safexcel_aead_sha224_cra_init(tfm);
2183 ctx->alg = SAFEXCEL_DES; /* override default */
2184 ctx->blocksz = DES_BLOCK_SIZE;
2185 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2186 return 0;
2187 }
2188
2189 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2190 .type = SAFEXCEL_ALG_TYPE_AEAD,
2191 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2192 .alg.aead = {
2193 .setkey = safexcel_aead_setkey,
2194 .encrypt = safexcel_aead_encrypt,
2195 .decrypt = safexcel_aead_decrypt,
2196 .ivsize = DES_BLOCK_SIZE,
2197 .maxauthsize = SHA224_DIGEST_SIZE,
2198 .base = {
2199 .cra_name = "authenc(hmac(sha224),cbc(des))",
2200 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2201 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2202 .cra_flags = CRYPTO_ALG_ASYNC |
2203 CRYPTO_ALG_ALLOCATES_MEMORY |
2204 CRYPTO_ALG_KERN_DRIVER_ONLY,
2205 .cra_blocksize = DES_BLOCK_SIZE,
2206 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2207 .cra_alignmask = 0,
2208 .cra_init = safexcel_aead_sha224_des_cra_init,
2209 .cra_exit = safexcel_aead_cra_exit,
2210 .cra_module = THIS_MODULE,
2211 },
2212 },
2213 };
2214
safexcel_aead_sha512_des_cra_init(struct crypto_tfm * tfm)2215 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2216 {
2217 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2218
2219 safexcel_aead_sha512_cra_init(tfm);
2220 ctx->alg = SAFEXCEL_DES; /* override default */
2221 ctx->blocksz = DES_BLOCK_SIZE;
2222 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2223 return 0;
2224 }
2225
2226 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2227 .type = SAFEXCEL_ALG_TYPE_AEAD,
2228 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2229 .alg.aead = {
2230 .setkey = safexcel_aead_setkey,
2231 .encrypt = safexcel_aead_encrypt,
2232 .decrypt = safexcel_aead_decrypt,
2233 .ivsize = DES_BLOCK_SIZE,
2234 .maxauthsize = SHA512_DIGEST_SIZE,
2235 .base = {
2236 .cra_name = "authenc(hmac(sha512),cbc(des))",
2237 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2238 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2239 .cra_flags = CRYPTO_ALG_ASYNC |
2240 CRYPTO_ALG_ALLOCATES_MEMORY |
2241 CRYPTO_ALG_KERN_DRIVER_ONLY,
2242 .cra_blocksize = DES_BLOCK_SIZE,
2243 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2244 .cra_alignmask = 0,
2245 .cra_init = safexcel_aead_sha512_des_cra_init,
2246 .cra_exit = safexcel_aead_cra_exit,
2247 .cra_module = THIS_MODULE,
2248 },
2249 },
2250 };
2251
safexcel_aead_sha384_des_cra_init(struct crypto_tfm * tfm)2252 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2253 {
2254 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2255
2256 safexcel_aead_sha384_cra_init(tfm);
2257 ctx->alg = SAFEXCEL_DES; /* override default */
2258 ctx->blocksz = DES_BLOCK_SIZE;
2259 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2260 return 0;
2261 }
2262
2263 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2264 .type = SAFEXCEL_ALG_TYPE_AEAD,
2265 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2266 .alg.aead = {
2267 .setkey = safexcel_aead_setkey,
2268 .encrypt = safexcel_aead_encrypt,
2269 .decrypt = safexcel_aead_decrypt,
2270 .ivsize = DES_BLOCK_SIZE,
2271 .maxauthsize = SHA384_DIGEST_SIZE,
2272 .base = {
2273 .cra_name = "authenc(hmac(sha384),cbc(des))",
2274 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2275 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2276 .cra_flags = CRYPTO_ALG_ASYNC |
2277 CRYPTO_ALG_ALLOCATES_MEMORY |
2278 CRYPTO_ALG_KERN_DRIVER_ONLY,
2279 .cra_blocksize = DES_BLOCK_SIZE,
2280 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2281 .cra_alignmask = 0,
2282 .cra_init = safexcel_aead_sha384_des_cra_init,
2283 .cra_exit = safexcel_aead_cra_exit,
2284 .cra_module = THIS_MODULE,
2285 },
2286 },
2287 };
2288
safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm * tfm)2289 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2290 {
2291 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2292
2293 safexcel_aead_sha1_cra_init(tfm);
2294 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2295 return 0;
2296 }
2297
2298 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2299 .type = SAFEXCEL_ALG_TYPE_AEAD,
2300 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2301 .alg.aead = {
2302 .setkey = safexcel_aead_setkey,
2303 .encrypt = safexcel_aead_encrypt,
2304 .decrypt = safexcel_aead_decrypt,
2305 .ivsize = CTR_RFC3686_IV_SIZE,
2306 .maxauthsize = SHA1_DIGEST_SIZE,
2307 .base = {
2308 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2309 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2310 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2311 .cra_flags = CRYPTO_ALG_ASYNC |
2312 CRYPTO_ALG_ALLOCATES_MEMORY |
2313 CRYPTO_ALG_KERN_DRIVER_ONLY,
2314 .cra_blocksize = 1,
2315 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2316 .cra_alignmask = 0,
2317 .cra_init = safexcel_aead_sha1_ctr_cra_init,
2318 .cra_exit = safexcel_aead_cra_exit,
2319 .cra_module = THIS_MODULE,
2320 },
2321 },
2322 };
2323
safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm * tfm)2324 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2325 {
2326 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2327
2328 safexcel_aead_sha256_cra_init(tfm);
2329 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2330 return 0;
2331 }
2332
2333 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2334 .type = SAFEXCEL_ALG_TYPE_AEAD,
2335 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2336 .alg.aead = {
2337 .setkey = safexcel_aead_setkey,
2338 .encrypt = safexcel_aead_encrypt,
2339 .decrypt = safexcel_aead_decrypt,
2340 .ivsize = CTR_RFC3686_IV_SIZE,
2341 .maxauthsize = SHA256_DIGEST_SIZE,
2342 .base = {
2343 .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2344 .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2345 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2346 .cra_flags = CRYPTO_ALG_ASYNC |
2347 CRYPTO_ALG_ALLOCATES_MEMORY |
2348 CRYPTO_ALG_KERN_DRIVER_ONLY,
2349 .cra_blocksize = 1,
2350 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2351 .cra_alignmask = 0,
2352 .cra_init = safexcel_aead_sha256_ctr_cra_init,
2353 .cra_exit = safexcel_aead_cra_exit,
2354 .cra_module = THIS_MODULE,
2355 },
2356 },
2357 };
2358
safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm * tfm)2359 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2360 {
2361 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2362
2363 safexcel_aead_sha224_cra_init(tfm);
2364 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2365 return 0;
2366 }
2367
2368 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2369 .type = SAFEXCEL_ALG_TYPE_AEAD,
2370 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2371 .alg.aead = {
2372 .setkey = safexcel_aead_setkey,
2373 .encrypt = safexcel_aead_encrypt,
2374 .decrypt = safexcel_aead_decrypt,
2375 .ivsize = CTR_RFC3686_IV_SIZE,
2376 .maxauthsize = SHA224_DIGEST_SIZE,
2377 .base = {
2378 .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2379 .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2380 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2381 .cra_flags = CRYPTO_ALG_ASYNC |
2382 CRYPTO_ALG_ALLOCATES_MEMORY |
2383 CRYPTO_ALG_KERN_DRIVER_ONLY,
2384 .cra_blocksize = 1,
2385 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2386 .cra_alignmask = 0,
2387 .cra_init = safexcel_aead_sha224_ctr_cra_init,
2388 .cra_exit = safexcel_aead_cra_exit,
2389 .cra_module = THIS_MODULE,
2390 },
2391 },
2392 };
2393
safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm * tfm)2394 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2395 {
2396 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2397
2398 safexcel_aead_sha512_cra_init(tfm);
2399 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2400 return 0;
2401 }
2402
2403 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2404 .type = SAFEXCEL_ALG_TYPE_AEAD,
2405 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2406 .alg.aead = {
2407 .setkey = safexcel_aead_setkey,
2408 .encrypt = safexcel_aead_encrypt,
2409 .decrypt = safexcel_aead_decrypt,
2410 .ivsize = CTR_RFC3686_IV_SIZE,
2411 .maxauthsize = SHA512_DIGEST_SIZE,
2412 .base = {
2413 .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2414 .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2415 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2416 .cra_flags = CRYPTO_ALG_ASYNC |
2417 CRYPTO_ALG_ALLOCATES_MEMORY |
2418 CRYPTO_ALG_KERN_DRIVER_ONLY,
2419 .cra_blocksize = 1,
2420 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2421 .cra_alignmask = 0,
2422 .cra_init = safexcel_aead_sha512_ctr_cra_init,
2423 .cra_exit = safexcel_aead_cra_exit,
2424 .cra_module = THIS_MODULE,
2425 },
2426 },
2427 };
2428
safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm * tfm)2429 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2430 {
2431 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2432
2433 safexcel_aead_sha384_cra_init(tfm);
2434 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2435 return 0;
2436 }
2437
2438 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2439 .type = SAFEXCEL_ALG_TYPE_AEAD,
2440 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2441 .alg.aead = {
2442 .setkey = safexcel_aead_setkey,
2443 .encrypt = safexcel_aead_encrypt,
2444 .decrypt = safexcel_aead_decrypt,
2445 .ivsize = CTR_RFC3686_IV_SIZE,
2446 .maxauthsize = SHA384_DIGEST_SIZE,
2447 .base = {
2448 .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2449 .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2450 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2451 .cra_flags = CRYPTO_ALG_ASYNC |
2452 CRYPTO_ALG_ALLOCATES_MEMORY |
2453 CRYPTO_ALG_KERN_DRIVER_ONLY,
2454 .cra_blocksize = 1,
2455 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2456 .cra_alignmask = 0,
2457 .cra_init = safexcel_aead_sha384_ctr_cra_init,
2458 .cra_exit = safexcel_aead_cra_exit,
2459 .cra_module = THIS_MODULE,
2460 },
2461 },
2462 };
2463
safexcel_skcipher_aesxts_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2464 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2465 const u8 *key, unsigned int len)
2466 {
2467 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2468 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2469 struct safexcel_crypto_priv *priv = ctx->base.priv;
2470 struct crypto_aes_ctx aes;
2471 int ret, i;
2472 unsigned int keylen;
2473
2474 /* Check for illegal XTS keys */
2475 ret = xts_verify_key(ctfm, key, len);
2476 if (ret)
2477 return ret;
2478
2479 /* Only half of the key data is cipher key */
2480 keylen = (len >> 1);
2481 ret = aes_expandkey(&aes, key, keylen);
2482 if (ret)
2483 return ret;
2484
2485 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2486 for (i = 0; i < keylen / sizeof(u32); i++) {
2487 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2488 ctx->base.needs_inv = true;
2489 break;
2490 }
2491 }
2492 }
2493
2494 for (i = 0; i < keylen / sizeof(u32); i++)
2495 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2496
2497 /* The other half is the tweak key */
2498 ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2499 if (ret)
2500 return ret;
2501
2502 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2503 for (i = 0; i < keylen / sizeof(u32); i++) {
2504 if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2505 aes.key_enc[i]) {
2506 ctx->base.needs_inv = true;
2507 break;
2508 }
2509 }
2510 }
2511
2512 for (i = 0; i < keylen / sizeof(u32); i++)
2513 ctx->key[i + keylen / sizeof(u32)] =
2514 cpu_to_le32(aes.key_enc[i]);
2515
2516 ctx->key_len = keylen << 1;
2517
2518 memzero_explicit(&aes, sizeof(aes));
2519 return 0;
2520 }
2521
safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm * tfm)2522 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2523 {
2524 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2525
2526 safexcel_skcipher_cra_init(tfm);
2527 ctx->alg = SAFEXCEL_AES;
2528 ctx->blocksz = AES_BLOCK_SIZE;
2529 ctx->xts = 1;
2530 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2531 return 0;
2532 }
2533
safexcel_encrypt_xts(struct skcipher_request * req)2534 static int safexcel_encrypt_xts(struct skcipher_request *req)
2535 {
2536 if (req->cryptlen < XTS_BLOCK_SIZE)
2537 return -EINVAL;
2538 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2539 SAFEXCEL_ENCRYPT);
2540 }
2541
safexcel_decrypt_xts(struct skcipher_request * req)2542 static int safexcel_decrypt_xts(struct skcipher_request *req)
2543 {
2544 if (req->cryptlen < XTS_BLOCK_SIZE)
2545 return -EINVAL;
2546 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2547 SAFEXCEL_DECRYPT);
2548 }
2549
2550 struct safexcel_alg_template safexcel_alg_xts_aes = {
2551 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2552 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2553 .alg.skcipher = {
2554 .setkey = safexcel_skcipher_aesxts_setkey,
2555 .encrypt = safexcel_encrypt_xts,
2556 .decrypt = safexcel_decrypt_xts,
2557 /* XTS actually uses 2 AES keys glued together */
2558 .min_keysize = AES_MIN_KEY_SIZE * 2,
2559 .max_keysize = AES_MAX_KEY_SIZE * 2,
2560 .ivsize = XTS_BLOCK_SIZE,
2561 .base = {
2562 .cra_name = "xts(aes)",
2563 .cra_driver_name = "safexcel-xts-aes",
2564 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2565 .cra_flags = CRYPTO_ALG_ASYNC |
2566 CRYPTO_ALG_ALLOCATES_MEMORY |
2567 CRYPTO_ALG_KERN_DRIVER_ONLY,
2568 .cra_blocksize = XTS_BLOCK_SIZE,
2569 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2570 .cra_alignmask = 0,
2571 .cra_init = safexcel_skcipher_aes_xts_cra_init,
2572 .cra_exit = safexcel_skcipher_cra_exit,
2573 .cra_module = THIS_MODULE,
2574 },
2575 },
2576 };
2577
safexcel_aead_gcm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2578 static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2579 unsigned int len)
2580 {
2581 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2582 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2583 struct safexcel_crypto_priv *priv = ctx->base.priv;
2584 struct crypto_aes_ctx aes;
2585 u32 hashkey[AES_BLOCK_SIZE >> 2];
2586 int ret, i;
2587
2588 ret = aes_expandkey(&aes, key, len);
2589 if (ret) {
2590 memzero_explicit(&aes, sizeof(aes));
2591 return ret;
2592 }
2593
2594 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2595 for (i = 0; i < len / sizeof(u32); i++) {
2596 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2597 ctx->base.needs_inv = true;
2598 break;
2599 }
2600 }
2601 }
2602
2603 for (i = 0; i < len / sizeof(u32); i++)
2604 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2605
2606 ctx->key_len = len;
2607
2608 /* Compute hash key by encrypting zeroes with cipher key */
2609 memset(hashkey, 0, AES_BLOCK_SIZE);
2610 aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
2611
2612 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2613 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2614 if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2615 ctx->base.needs_inv = true;
2616 break;
2617 }
2618 }
2619 }
2620
2621 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2622 ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2623
2624 memzero_explicit(hashkey, AES_BLOCK_SIZE);
2625 memzero_explicit(&aes, sizeof(aes));
2626 return 0;
2627 }
2628
safexcel_aead_gcm_cra_init(struct crypto_tfm * tfm)2629 static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2630 {
2631 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2632
2633 safexcel_aead_cra_init(tfm);
2634 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2635 ctx->state_sz = GHASH_BLOCK_SIZE;
2636 ctx->xcm = EIP197_XCM_MODE_GCM;
2637 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2638
2639 return 0;
2640 }
2641
safexcel_aead_gcm_cra_exit(struct crypto_tfm * tfm)2642 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2643 {
2644 safexcel_aead_cra_exit(tfm);
2645 }
2646
safexcel_aead_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2647 static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2648 unsigned int authsize)
2649 {
2650 return crypto_gcm_check_authsize(authsize);
2651 }
2652
2653 struct safexcel_alg_template safexcel_alg_gcm = {
2654 .type = SAFEXCEL_ALG_TYPE_AEAD,
2655 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2656 .alg.aead = {
2657 .setkey = safexcel_aead_gcm_setkey,
2658 .setauthsize = safexcel_aead_gcm_setauthsize,
2659 .encrypt = safexcel_aead_encrypt,
2660 .decrypt = safexcel_aead_decrypt,
2661 .ivsize = GCM_AES_IV_SIZE,
2662 .maxauthsize = GHASH_DIGEST_SIZE,
2663 .base = {
2664 .cra_name = "gcm(aes)",
2665 .cra_driver_name = "safexcel-gcm-aes",
2666 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2667 .cra_flags = CRYPTO_ALG_ASYNC |
2668 CRYPTO_ALG_ALLOCATES_MEMORY |
2669 CRYPTO_ALG_KERN_DRIVER_ONLY,
2670 .cra_blocksize = 1,
2671 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2672 .cra_alignmask = 0,
2673 .cra_init = safexcel_aead_gcm_cra_init,
2674 .cra_exit = safexcel_aead_gcm_cra_exit,
2675 .cra_module = THIS_MODULE,
2676 },
2677 },
2678 };
2679
safexcel_aead_ccm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2680 static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2681 unsigned int len)
2682 {
2683 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2684 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2685 struct safexcel_crypto_priv *priv = ctx->base.priv;
2686 struct crypto_aes_ctx aes;
2687 int ret, i;
2688
2689 ret = aes_expandkey(&aes, key, len);
2690 if (ret) {
2691 memzero_explicit(&aes, sizeof(aes));
2692 return ret;
2693 }
2694
2695 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2696 for (i = 0; i < len / sizeof(u32); i++) {
2697 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2698 ctx->base.needs_inv = true;
2699 break;
2700 }
2701 }
2702 }
2703
2704 for (i = 0; i < len / sizeof(u32); i++) {
2705 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2706 ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2707 cpu_to_be32(aes.key_enc[i]);
2708 }
2709
2710 ctx->key_len = len;
2711 ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2712
2713 if (len == AES_KEYSIZE_192)
2714 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2715 else if (len == AES_KEYSIZE_256)
2716 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2717 else
2718 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2719
2720 memzero_explicit(&aes, sizeof(aes));
2721 return 0;
2722 }
2723
safexcel_aead_ccm_cra_init(struct crypto_tfm * tfm)2724 static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2725 {
2726 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2727
2728 safexcel_aead_cra_init(tfm);
2729 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2730 ctx->state_sz = 3 * AES_BLOCK_SIZE;
2731 ctx->xcm = EIP197_XCM_MODE_CCM;
2732 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2733 ctx->ctrinit = 0;
2734 return 0;
2735 }
2736
safexcel_aead_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2737 static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2738 unsigned int authsize)
2739 {
2740 /* Borrowed from crypto/ccm.c */
2741 switch (authsize) {
2742 case 4:
2743 case 6:
2744 case 8:
2745 case 10:
2746 case 12:
2747 case 14:
2748 case 16:
2749 break;
2750 default:
2751 return -EINVAL;
2752 }
2753
2754 return 0;
2755 }
2756
safexcel_ccm_encrypt(struct aead_request * req)2757 static int safexcel_ccm_encrypt(struct aead_request *req)
2758 {
2759 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2760
2761 if (req->iv[0] < 1 || req->iv[0] > 7)
2762 return -EINVAL;
2763
2764 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2765 }
2766
safexcel_ccm_decrypt(struct aead_request * req)2767 static int safexcel_ccm_decrypt(struct aead_request *req)
2768 {
2769 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2770
2771 if (req->iv[0] < 1 || req->iv[0] > 7)
2772 return -EINVAL;
2773
2774 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2775 }
2776
2777 struct safexcel_alg_template safexcel_alg_ccm = {
2778 .type = SAFEXCEL_ALG_TYPE_AEAD,
2779 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2780 .alg.aead = {
2781 .setkey = safexcel_aead_ccm_setkey,
2782 .setauthsize = safexcel_aead_ccm_setauthsize,
2783 .encrypt = safexcel_ccm_encrypt,
2784 .decrypt = safexcel_ccm_decrypt,
2785 .ivsize = AES_BLOCK_SIZE,
2786 .maxauthsize = AES_BLOCK_SIZE,
2787 .base = {
2788 .cra_name = "ccm(aes)",
2789 .cra_driver_name = "safexcel-ccm-aes",
2790 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2791 .cra_flags = CRYPTO_ALG_ASYNC |
2792 CRYPTO_ALG_ALLOCATES_MEMORY |
2793 CRYPTO_ALG_KERN_DRIVER_ONLY,
2794 .cra_blocksize = 1,
2795 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2796 .cra_alignmask = 0,
2797 .cra_init = safexcel_aead_ccm_cra_init,
2798 .cra_exit = safexcel_aead_cra_exit,
2799 .cra_module = THIS_MODULE,
2800 },
2801 },
2802 };
2803
safexcel_chacha20_setkey(struct safexcel_cipher_ctx * ctx,const u8 * key)2804 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2805 const u8 *key)
2806 {
2807 struct safexcel_crypto_priv *priv = ctx->base.priv;
2808
2809 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2810 if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2811 ctx->base.needs_inv = true;
2812
2813 memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2814 ctx->key_len = CHACHA_KEY_SIZE;
2815 }
2816
safexcel_skcipher_chacha20_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2817 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2818 const u8 *key, unsigned int len)
2819 {
2820 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2821
2822 if (len != CHACHA_KEY_SIZE)
2823 return -EINVAL;
2824
2825 safexcel_chacha20_setkey(ctx, key);
2826
2827 return 0;
2828 }
2829
safexcel_skcipher_chacha20_cra_init(struct crypto_tfm * tfm)2830 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2831 {
2832 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2833
2834 safexcel_skcipher_cra_init(tfm);
2835 ctx->alg = SAFEXCEL_CHACHA20;
2836 ctx->ctrinit = 0;
2837 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2838 return 0;
2839 }
2840
2841 struct safexcel_alg_template safexcel_alg_chacha20 = {
2842 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2843 .algo_mask = SAFEXCEL_ALG_CHACHA20,
2844 .alg.skcipher = {
2845 .setkey = safexcel_skcipher_chacha20_setkey,
2846 .encrypt = safexcel_encrypt,
2847 .decrypt = safexcel_decrypt,
2848 .min_keysize = CHACHA_KEY_SIZE,
2849 .max_keysize = CHACHA_KEY_SIZE,
2850 .ivsize = CHACHA_IV_SIZE,
2851 .base = {
2852 .cra_name = "chacha20",
2853 .cra_driver_name = "safexcel-chacha20",
2854 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2855 .cra_flags = CRYPTO_ALG_ASYNC |
2856 CRYPTO_ALG_ALLOCATES_MEMORY |
2857 CRYPTO_ALG_KERN_DRIVER_ONLY,
2858 .cra_blocksize = 1,
2859 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2860 .cra_alignmask = 0,
2861 .cra_init = safexcel_skcipher_chacha20_cra_init,
2862 .cra_exit = safexcel_skcipher_cra_exit,
2863 .cra_module = THIS_MODULE,
2864 },
2865 },
2866 };
2867
safexcel_aead_chachapoly_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2868 static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2869 const u8 *key, unsigned int len)
2870 {
2871 struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2872
2873 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2874 len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2875 /* ESP variant has nonce appended to key */
2876 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2877 ctx->nonce = *(u32 *)(key + len);
2878 }
2879 if (len != CHACHA_KEY_SIZE)
2880 return -EINVAL;
2881
2882 safexcel_chacha20_setkey(ctx, key);
2883
2884 return 0;
2885 }
2886
safexcel_aead_chachapoly_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2887 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2888 unsigned int authsize)
2889 {
2890 if (authsize != POLY1305_DIGEST_SIZE)
2891 return -EINVAL;
2892 return 0;
2893 }
2894
safexcel_aead_chachapoly_crypt(struct aead_request * req,enum safexcel_cipher_direction dir)2895 static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2896 enum safexcel_cipher_direction dir)
2897 {
2898 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2899 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2900 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2901 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2902 struct aead_request *subreq = aead_request_ctx(req);
2903 u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2904 int ret = 0;
2905
2906 /*
2907 * Instead of wasting time detecting umpteen silly corner cases,
2908 * just dump all "small" requests to the fallback implementation.
2909 * HW would not be faster on such small requests anyway.
2910 */
2911 if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2912 req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2913 req->cryptlen > POLY1305_DIGEST_SIZE)) {
2914 return safexcel_queue_req(&req->base, creq, dir);
2915 }
2916
2917 /* HW cannot do full (AAD+payload) zero length, use fallback */
2918 memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2919 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2920 /* ESP variant has nonce appended to the key */
2921 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2922 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2923 CHACHA_KEY_SIZE +
2924 EIP197_AEAD_IPSEC_NONCE_SIZE);
2925 } else {
2926 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2927 CHACHA_KEY_SIZE);
2928 }
2929 if (ret) {
2930 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2931 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2932 CRYPTO_TFM_REQ_MASK);
2933 return ret;
2934 }
2935
2936 aead_request_set_tfm(subreq, ctx->fback);
2937 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2938 req->base.data);
2939 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2940 req->iv);
2941 aead_request_set_ad(subreq, req->assoclen);
2942
2943 return (dir == SAFEXCEL_ENCRYPT) ?
2944 crypto_aead_encrypt(subreq) :
2945 crypto_aead_decrypt(subreq);
2946 }
2947
safexcel_aead_chachapoly_encrypt(struct aead_request * req)2948 static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2949 {
2950 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2951 }
2952
safexcel_aead_chachapoly_decrypt(struct aead_request * req)2953 static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2954 {
2955 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2956 }
2957
safexcel_aead_fallback_cra_init(struct crypto_tfm * tfm)2958 static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2959 {
2960 struct crypto_aead *aead = __crypto_aead_cast(tfm);
2961 struct aead_alg *alg = crypto_aead_alg(aead);
2962 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2963
2964 safexcel_aead_cra_init(tfm);
2965
2966 /* Allocate fallback implementation */
2967 ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2968 CRYPTO_ALG_ASYNC |
2969 CRYPTO_ALG_NEED_FALLBACK);
2970 if (IS_ERR(ctx->fback))
2971 return PTR_ERR(ctx->fback);
2972
2973 crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2974 sizeof(struct aead_request) +
2975 crypto_aead_reqsize(ctx->fback)));
2976
2977 return 0;
2978 }
2979
safexcel_aead_chachapoly_cra_init(struct crypto_tfm * tfm)2980 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2981 {
2982 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2983
2984 safexcel_aead_fallback_cra_init(tfm);
2985 ctx->alg = SAFEXCEL_CHACHA20;
2986 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2987 CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2988 ctx->ctrinit = 0;
2989 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2990 ctx->state_sz = 0; /* Precomputed by HW */
2991 return 0;
2992 }
2993
safexcel_aead_fallback_cra_exit(struct crypto_tfm * tfm)2994 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2995 {
2996 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2997
2998 crypto_free_aead(ctx->fback);
2999 safexcel_aead_cra_exit(tfm);
3000 }
3001
3002 struct safexcel_alg_template safexcel_alg_chachapoly = {
3003 .type = SAFEXCEL_ALG_TYPE_AEAD,
3004 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3005 .alg.aead = {
3006 .setkey = safexcel_aead_chachapoly_setkey,
3007 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3008 .encrypt = safexcel_aead_chachapoly_encrypt,
3009 .decrypt = safexcel_aead_chachapoly_decrypt,
3010 .ivsize = CHACHAPOLY_IV_SIZE,
3011 .maxauthsize = POLY1305_DIGEST_SIZE,
3012 .base = {
3013 .cra_name = "rfc7539(chacha20,poly1305)",
3014 .cra_driver_name = "safexcel-chacha20-poly1305",
3015 /* +1 to put it above HW chacha + SW poly */
3016 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3017 .cra_flags = CRYPTO_ALG_ASYNC |
3018 CRYPTO_ALG_ALLOCATES_MEMORY |
3019 CRYPTO_ALG_KERN_DRIVER_ONLY |
3020 CRYPTO_ALG_NEED_FALLBACK,
3021 .cra_blocksize = 1,
3022 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3023 .cra_alignmask = 0,
3024 .cra_init = safexcel_aead_chachapoly_cra_init,
3025 .cra_exit = safexcel_aead_fallback_cra_exit,
3026 .cra_module = THIS_MODULE,
3027 },
3028 },
3029 };
3030
safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm * tfm)3031 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3032 {
3033 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3034 int ret;
3035
3036 ret = safexcel_aead_chachapoly_cra_init(tfm);
3037 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3038 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3039 return ret;
3040 }
3041
3042 struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3043 .type = SAFEXCEL_ALG_TYPE_AEAD,
3044 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3045 .alg.aead = {
3046 .setkey = safexcel_aead_chachapoly_setkey,
3047 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3048 .encrypt = safexcel_aead_chachapoly_encrypt,
3049 .decrypt = safexcel_aead_chachapoly_decrypt,
3050 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3051 .maxauthsize = POLY1305_DIGEST_SIZE,
3052 .base = {
3053 .cra_name = "rfc7539esp(chacha20,poly1305)",
3054 .cra_driver_name = "safexcel-chacha20-poly1305-esp",
3055 /* +1 to put it above HW chacha + SW poly */
3056 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3057 .cra_flags = CRYPTO_ALG_ASYNC |
3058 CRYPTO_ALG_ALLOCATES_MEMORY |
3059 CRYPTO_ALG_KERN_DRIVER_ONLY |
3060 CRYPTO_ALG_NEED_FALLBACK,
3061 .cra_blocksize = 1,
3062 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3063 .cra_alignmask = 0,
3064 .cra_init = safexcel_aead_chachapolyesp_cra_init,
3065 .cra_exit = safexcel_aead_fallback_cra_exit,
3066 .cra_module = THIS_MODULE,
3067 },
3068 },
3069 };
3070
safexcel_skcipher_sm4_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)3071 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3072 const u8 *key, unsigned int len)
3073 {
3074 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3075 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3076 struct safexcel_crypto_priv *priv = ctx->base.priv;
3077
3078 if (len != SM4_KEY_SIZE)
3079 return -EINVAL;
3080
3081 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3082 if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3083 ctx->base.needs_inv = true;
3084
3085 memcpy(ctx->key, key, SM4_KEY_SIZE);
3086 ctx->key_len = SM4_KEY_SIZE;
3087
3088 return 0;
3089 }
3090
safexcel_sm4_blk_encrypt(struct skcipher_request * req)3091 static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3092 {
3093 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3094 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3095 return -EINVAL;
3096 else
3097 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3098 SAFEXCEL_ENCRYPT);
3099 }
3100
safexcel_sm4_blk_decrypt(struct skcipher_request * req)3101 static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3102 {
3103 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3104 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3105 return -EINVAL;
3106 else
3107 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3108 SAFEXCEL_DECRYPT);
3109 }
3110
safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm * tfm)3111 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3112 {
3113 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3114
3115 safexcel_skcipher_cra_init(tfm);
3116 ctx->alg = SAFEXCEL_SM4;
3117 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3118 ctx->blocksz = 0;
3119 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3120 return 0;
3121 }
3122
3123 struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3124 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3125 .algo_mask = SAFEXCEL_ALG_SM4,
3126 .alg.skcipher = {
3127 .setkey = safexcel_skcipher_sm4_setkey,
3128 .encrypt = safexcel_sm4_blk_encrypt,
3129 .decrypt = safexcel_sm4_blk_decrypt,
3130 .min_keysize = SM4_KEY_SIZE,
3131 .max_keysize = SM4_KEY_SIZE,
3132 .base = {
3133 .cra_name = "ecb(sm4)",
3134 .cra_driver_name = "safexcel-ecb-sm4",
3135 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3136 .cra_flags = CRYPTO_ALG_ASYNC |
3137 CRYPTO_ALG_ALLOCATES_MEMORY |
3138 CRYPTO_ALG_KERN_DRIVER_ONLY,
3139 .cra_blocksize = SM4_BLOCK_SIZE,
3140 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3141 .cra_alignmask = 0,
3142 .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3143 .cra_exit = safexcel_skcipher_cra_exit,
3144 .cra_module = THIS_MODULE,
3145 },
3146 },
3147 };
3148
safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm * tfm)3149 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3150 {
3151 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3152
3153 safexcel_skcipher_cra_init(tfm);
3154 ctx->alg = SAFEXCEL_SM4;
3155 ctx->blocksz = SM4_BLOCK_SIZE;
3156 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3157 return 0;
3158 }
3159
3160 struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3161 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3162 .algo_mask = SAFEXCEL_ALG_SM4,
3163 .alg.skcipher = {
3164 .setkey = safexcel_skcipher_sm4_setkey,
3165 .encrypt = safexcel_sm4_blk_encrypt,
3166 .decrypt = safexcel_sm4_blk_decrypt,
3167 .min_keysize = SM4_KEY_SIZE,
3168 .max_keysize = SM4_KEY_SIZE,
3169 .ivsize = SM4_BLOCK_SIZE,
3170 .base = {
3171 .cra_name = "cbc(sm4)",
3172 .cra_driver_name = "safexcel-cbc-sm4",
3173 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3174 .cra_flags = CRYPTO_ALG_ASYNC |
3175 CRYPTO_ALG_ALLOCATES_MEMORY |
3176 CRYPTO_ALG_KERN_DRIVER_ONLY,
3177 .cra_blocksize = SM4_BLOCK_SIZE,
3178 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3179 .cra_alignmask = 0,
3180 .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3181 .cra_exit = safexcel_skcipher_cra_exit,
3182 .cra_module = THIS_MODULE,
3183 },
3184 },
3185 };
3186
safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm * tfm)3187 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3188 {
3189 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3190
3191 safexcel_skcipher_cra_init(tfm);
3192 ctx->alg = SAFEXCEL_SM4;
3193 ctx->blocksz = SM4_BLOCK_SIZE;
3194 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3195 return 0;
3196 }
3197
3198 struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3199 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3200 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3201 .alg.skcipher = {
3202 .setkey = safexcel_skcipher_sm4_setkey,
3203 .encrypt = safexcel_encrypt,
3204 .decrypt = safexcel_decrypt,
3205 .min_keysize = SM4_KEY_SIZE,
3206 .max_keysize = SM4_KEY_SIZE,
3207 .ivsize = SM4_BLOCK_SIZE,
3208 .base = {
3209 .cra_name = "ofb(sm4)",
3210 .cra_driver_name = "safexcel-ofb-sm4",
3211 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3212 .cra_flags = CRYPTO_ALG_ASYNC |
3213 CRYPTO_ALG_ALLOCATES_MEMORY |
3214 CRYPTO_ALG_KERN_DRIVER_ONLY,
3215 .cra_blocksize = 1,
3216 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3217 .cra_alignmask = 0,
3218 .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3219 .cra_exit = safexcel_skcipher_cra_exit,
3220 .cra_module = THIS_MODULE,
3221 },
3222 },
3223 };
3224
safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm * tfm)3225 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3226 {
3227 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3228
3229 safexcel_skcipher_cra_init(tfm);
3230 ctx->alg = SAFEXCEL_SM4;
3231 ctx->blocksz = SM4_BLOCK_SIZE;
3232 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3233 return 0;
3234 }
3235
3236 struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3237 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3238 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3239 .alg.skcipher = {
3240 .setkey = safexcel_skcipher_sm4_setkey,
3241 .encrypt = safexcel_encrypt,
3242 .decrypt = safexcel_decrypt,
3243 .min_keysize = SM4_KEY_SIZE,
3244 .max_keysize = SM4_KEY_SIZE,
3245 .ivsize = SM4_BLOCK_SIZE,
3246 .base = {
3247 .cra_name = "cfb(sm4)",
3248 .cra_driver_name = "safexcel-cfb-sm4",
3249 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3250 .cra_flags = CRYPTO_ALG_ASYNC |
3251 CRYPTO_ALG_ALLOCATES_MEMORY |
3252 CRYPTO_ALG_KERN_DRIVER_ONLY,
3253 .cra_blocksize = 1,
3254 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3255 .cra_alignmask = 0,
3256 .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3257 .cra_exit = safexcel_skcipher_cra_exit,
3258 .cra_module = THIS_MODULE,
3259 },
3260 },
3261 };
3262
safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)3263 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3264 const u8 *key, unsigned int len)
3265 {
3266 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3267 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3268
3269 /* last 4 bytes of key are the nonce! */
3270 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3271 /* exclude the nonce here */
3272 len -= CTR_RFC3686_NONCE_SIZE;
3273
3274 return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3275 }
3276
safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm * tfm)3277 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3278 {
3279 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3280
3281 safexcel_skcipher_cra_init(tfm);
3282 ctx->alg = SAFEXCEL_SM4;
3283 ctx->blocksz = SM4_BLOCK_SIZE;
3284 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3285 return 0;
3286 }
3287
3288 struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3289 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3290 .algo_mask = SAFEXCEL_ALG_SM4,
3291 .alg.skcipher = {
3292 .setkey = safexcel_skcipher_sm4ctr_setkey,
3293 .encrypt = safexcel_encrypt,
3294 .decrypt = safexcel_decrypt,
3295 /* Add nonce size */
3296 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3297 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3298 .ivsize = CTR_RFC3686_IV_SIZE,
3299 .base = {
3300 .cra_name = "rfc3686(ctr(sm4))",
3301 .cra_driver_name = "safexcel-ctr-sm4",
3302 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3303 .cra_flags = CRYPTO_ALG_ASYNC |
3304 CRYPTO_ALG_ALLOCATES_MEMORY |
3305 CRYPTO_ALG_KERN_DRIVER_ONLY,
3306 .cra_blocksize = 1,
3307 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3308 .cra_alignmask = 0,
3309 .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3310 .cra_exit = safexcel_skcipher_cra_exit,
3311 .cra_module = THIS_MODULE,
3312 },
3313 },
3314 };
3315
safexcel_aead_sm4_blk_encrypt(struct aead_request * req)3316 static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3317 {
3318 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3319 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3320 return -EINVAL;
3321
3322 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3323 SAFEXCEL_ENCRYPT);
3324 }
3325
safexcel_aead_sm4_blk_decrypt(struct aead_request * req)3326 static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3327 {
3328 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3329
3330 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3331 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3332 return -EINVAL;
3333
3334 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3335 SAFEXCEL_DECRYPT);
3336 }
3337
safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm * tfm)3338 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3339 {
3340 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3341
3342 safexcel_aead_cra_init(tfm);
3343 ctx->alg = SAFEXCEL_SM4;
3344 ctx->blocksz = SM4_BLOCK_SIZE;
3345 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3346 ctx->state_sz = SHA1_DIGEST_SIZE;
3347 return 0;
3348 }
3349
3350 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3351 .type = SAFEXCEL_ALG_TYPE_AEAD,
3352 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3353 .alg.aead = {
3354 .setkey = safexcel_aead_setkey,
3355 .encrypt = safexcel_aead_sm4_blk_encrypt,
3356 .decrypt = safexcel_aead_sm4_blk_decrypt,
3357 .ivsize = SM4_BLOCK_SIZE,
3358 .maxauthsize = SHA1_DIGEST_SIZE,
3359 .base = {
3360 .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3361 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3362 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3363 .cra_flags = CRYPTO_ALG_ASYNC |
3364 CRYPTO_ALG_ALLOCATES_MEMORY |
3365 CRYPTO_ALG_KERN_DRIVER_ONLY,
3366 .cra_blocksize = SM4_BLOCK_SIZE,
3367 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3368 .cra_alignmask = 0,
3369 .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3370 .cra_exit = safexcel_aead_cra_exit,
3371 .cra_module = THIS_MODULE,
3372 },
3373 },
3374 };
3375
safexcel_aead_fallback_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3376 static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3377 const u8 *key, unsigned int len)
3378 {
3379 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3380 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3381
3382 /* Keep fallback cipher synchronized */
3383 return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3384 safexcel_aead_setkey(ctfm, key, len);
3385 }
3386
safexcel_aead_fallback_setauthsize(struct crypto_aead * ctfm,unsigned int authsize)3387 static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3388 unsigned int authsize)
3389 {
3390 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3391 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3392
3393 /* Keep fallback cipher synchronized */
3394 return crypto_aead_setauthsize(ctx->fback, authsize);
3395 }
3396
safexcel_aead_fallback_crypt(struct aead_request * req,enum safexcel_cipher_direction dir)3397 static int safexcel_aead_fallback_crypt(struct aead_request *req,
3398 enum safexcel_cipher_direction dir)
3399 {
3400 struct crypto_aead *aead = crypto_aead_reqtfm(req);
3401 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3402 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3403 struct aead_request *subreq = aead_request_ctx(req);
3404
3405 aead_request_set_tfm(subreq, ctx->fback);
3406 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3407 req->base.data);
3408 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3409 req->iv);
3410 aead_request_set_ad(subreq, req->assoclen);
3411
3412 return (dir == SAFEXCEL_ENCRYPT) ?
3413 crypto_aead_encrypt(subreq) :
3414 crypto_aead_decrypt(subreq);
3415 }
3416
safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request * req)3417 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3418 {
3419 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3420
3421 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3422 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3423 return -EINVAL;
3424 else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3425 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3426
3427 /* HW cannot do full (AAD+payload) zero length, use fallback */
3428 return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3429 }
3430
safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request * req)3431 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3432 {
3433 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3434 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3435
3436 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3437 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3438 return -EINVAL;
3439 else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3440 /* If input length > 0 only */
3441 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3442
3443 /* HW cannot do full (AAD+payload) zero length, use fallback */
3444 return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3445 }
3446
safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm * tfm)3447 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3448 {
3449 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3450
3451 safexcel_aead_fallback_cra_init(tfm);
3452 ctx->alg = SAFEXCEL_SM4;
3453 ctx->blocksz = SM4_BLOCK_SIZE;
3454 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3455 ctx->state_sz = SM3_DIGEST_SIZE;
3456 return 0;
3457 }
3458
3459 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3460 .type = SAFEXCEL_ALG_TYPE_AEAD,
3461 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3462 .alg.aead = {
3463 .setkey = safexcel_aead_fallback_setkey,
3464 .setauthsize = safexcel_aead_fallback_setauthsize,
3465 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3466 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3467 .ivsize = SM4_BLOCK_SIZE,
3468 .maxauthsize = SM3_DIGEST_SIZE,
3469 .base = {
3470 .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3471 .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3472 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3473 .cra_flags = CRYPTO_ALG_ASYNC |
3474 CRYPTO_ALG_ALLOCATES_MEMORY |
3475 CRYPTO_ALG_KERN_DRIVER_ONLY |
3476 CRYPTO_ALG_NEED_FALLBACK,
3477 .cra_blocksize = SM4_BLOCK_SIZE,
3478 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3479 .cra_alignmask = 0,
3480 .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3481 .cra_exit = safexcel_aead_fallback_cra_exit,
3482 .cra_module = THIS_MODULE,
3483 },
3484 },
3485 };
3486
safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm * tfm)3487 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3488 {
3489 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3490
3491 safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3492 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3493 return 0;
3494 }
3495
3496 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3497 .type = SAFEXCEL_ALG_TYPE_AEAD,
3498 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3499 .alg.aead = {
3500 .setkey = safexcel_aead_setkey,
3501 .encrypt = safexcel_aead_encrypt,
3502 .decrypt = safexcel_aead_decrypt,
3503 .ivsize = CTR_RFC3686_IV_SIZE,
3504 .maxauthsize = SHA1_DIGEST_SIZE,
3505 .base = {
3506 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3507 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3508 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3509 .cra_flags = CRYPTO_ALG_ASYNC |
3510 CRYPTO_ALG_ALLOCATES_MEMORY |
3511 CRYPTO_ALG_KERN_DRIVER_ONLY,
3512 .cra_blocksize = 1,
3513 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3514 .cra_alignmask = 0,
3515 .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3516 .cra_exit = safexcel_aead_cra_exit,
3517 .cra_module = THIS_MODULE,
3518 },
3519 },
3520 };
3521
safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm * tfm)3522 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3523 {
3524 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3525
3526 safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3527 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3528 return 0;
3529 }
3530
3531 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3532 .type = SAFEXCEL_ALG_TYPE_AEAD,
3533 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3534 .alg.aead = {
3535 .setkey = safexcel_aead_setkey,
3536 .encrypt = safexcel_aead_encrypt,
3537 .decrypt = safexcel_aead_decrypt,
3538 .ivsize = CTR_RFC3686_IV_SIZE,
3539 .maxauthsize = SM3_DIGEST_SIZE,
3540 .base = {
3541 .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3542 .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3543 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3544 .cra_flags = CRYPTO_ALG_ASYNC |
3545 CRYPTO_ALG_ALLOCATES_MEMORY |
3546 CRYPTO_ALG_KERN_DRIVER_ONLY,
3547 .cra_blocksize = 1,
3548 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3549 .cra_alignmask = 0,
3550 .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3551 .cra_exit = safexcel_aead_cra_exit,
3552 .cra_module = THIS_MODULE,
3553 },
3554 },
3555 };
3556
safexcel_rfc4106_gcm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3557 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3558 unsigned int len)
3559 {
3560 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3561 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3562
3563 /* last 4 bytes of key are the nonce! */
3564 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3565
3566 len -= CTR_RFC3686_NONCE_SIZE;
3567 return safexcel_aead_gcm_setkey(ctfm, key, len);
3568 }
3569
safexcel_rfc4106_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3570 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3571 unsigned int authsize)
3572 {
3573 return crypto_rfc4106_check_authsize(authsize);
3574 }
3575
safexcel_rfc4106_encrypt(struct aead_request * req)3576 static int safexcel_rfc4106_encrypt(struct aead_request *req)
3577 {
3578 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3579 safexcel_aead_encrypt(req);
3580 }
3581
safexcel_rfc4106_decrypt(struct aead_request * req)3582 static int safexcel_rfc4106_decrypt(struct aead_request *req)
3583 {
3584 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3585 safexcel_aead_decrypt(req);
3586 }
3587
safexcel_rfc4106_gcm_cra_init(struct crypto_tfm * tfm)3588 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3589 {
3590 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3591 int ret;
3592
3593 ret = safexcel_aead_gcm_cra_init(tfm);
3594 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3595 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3596 return ret;
3597 }
3598
3599 struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3600 .type = SAFEXCEL_ALG_TYPE_AEAD,
3601 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3602 .alg.aead = {
3603 .setkey = safexcel_rfc4106_gcm_setkey,
3604 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3605 .encrypt = safexcel_rfc4106_encrypt,
3606 .decrypt = safexcel_rfc4106_decrypt,
3607 .ivsize = GCM_RFC4106_IV_SIZE,
3608 .maxauthsize = GHASH_DIGEST_SIZE,
3609 .base = {
3610 .cra_name = "rfc4106(gcm(aes))",
3611 .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3612 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3613 .cra_flags = CRYPTO_ALG_ASYNC |
3614 CRYPTO_ALG_ALLOCATES_MEMORY |
3615 CRYPTO_ALG_KERN_DRIVER_ONLY,
3616 .cra_blocksize = 1,
3617 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3618 .cra_alignmask = 0,
3619 .cra_init = safexcel_rfc4106_gcm_cra_init,
3620 .cra_exit = safexcel_aead_gcm_cra_exit,
3621 },
3622 },
3623 };
3624
safexcel_rfc4543_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3625 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3626 unsigned int authsize)
3627 {
3628 if (authsize != GHASH_DIGEST_SIZE)
3629 return -EINVAL;
3630
3631 return 0;
3632 }
3633
safexcel_rfc4543_gcm_cra_init(struct crypto_tfm * tfm)3634 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3635 {
3636 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3637 int ret;
3638
3639 ret = safexcel_aead_gcm_cra_init(tfm);
3640 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3641 return ret;
3642 }
3643
3644 struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3645 .type = SAFEXCEL_ALG_TYPE_AEAD,
3646 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3647 .alg.aead = {
3648 .setkey = safexcel_rfc4106_gcm_setkey,
3649 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3650 .encrypt = safexcel_rfc4106_encrypt,
3651 .decrypt = safexcel_rfc4106_decrypt,
3652 .ivsize = GCM_RFC4543_IV_SIZE,
3653 .maxauthsize = GHASH_DIGEST_SIZE,
3654 .base = {
3655 .cra_name = "rfc4543(gcm(aes))",
3656 .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3657 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3658 .cra_flags = CRYPTO_ALG_ASYNC |
3659 CRYPTO_ALG_ALLOCATES_MEMORY |
3660 CRYPTO_ALG_KERN_DRIVER_ONLY,
3661 .cra_blocksize = 1,
3662 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3663 .cra_alignmask = 0,
3664 .cra_init = safexcel_rfc4543_gcm_cra_init,
3665 .cra_exit = safexcel_aead_gcm_cra_exit,
3666 },
3667 },
3668 };
3669
safexcel_rfc4309_ccm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3670 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3671 unsigned int len)
3672 {
3673 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3674 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3675
3676 /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3677 *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3678 /* last 3 bytes of key are the nonce! */
3679 memcpy((u8 *)&ctx->nonce + 1, key + len -
3680 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3681 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3682
3683 len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3684 return safexcel_aead_ccm_setkey(ctfm, key, len);
3685 }
3686
safexcel_rfc4309_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3687 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3688 unsigned int authsize)
3689 {
3690 /* Borrowed from crypto/ccm.c */
3691 switch (authsize) {
3692 case 8:
3693 case 12:
3694 case 16:
3695 break;
3696 default:
3697 return -EINVAL;
3698 }
3699
3700 return 0;
3701 }
3702
safexcel_rfc4309_ccm_encrypt(struct aead_request * req)3703 static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3704 {
3705 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3706
3707 /* Borrowed from crypto/ccm.c */
3708 if (req->assoclen != 16 && req->assoclen != 20)
3709 return -EINVAL;
3710
3711 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3712 }
3713
safexcel_rfc4309_ccm_decrypt(struct aead_request * req)3714 static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3715 {
3716 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3717
3718 /* Borrowed from crypto/ccm.c */
3719 if (req->assoclen != 16 && req->assoclen != 20)
3720 return -EINVAL;
3721
3722 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3723 }
3724
safexcel_rfc4309_ccm_cra_init(struct crypto_tfm * tfm)3725 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3726 {
3727 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3728 int ret;
3729
3730 ret = safexcel_aead_ccm_cra_init(tfm);
3731 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3732 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3733 return ret;
3734 }
3735
3736 struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3737 .type = SAFEXCEL_ALG_TYPE_AEAD,
3738 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3739 .alg.aead = {
3740 .setkey = safexcel_rfc4309_ccm_setkey,
3741 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3742 .encrypt = safexcel_rfc4309_ccm_encrypt,
3743 .decrypt = safexcel_rfc4309_ccm_decrypt,
3744 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3745 .maxauthsize = AES_BLOCK_SIZE,
3746 .base = {
3747 .cra_name = "rfc4309(ccm(aes))",
3748 .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3749 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3750 .cra_flags = CRYPTO_ALG_ASYNC |
3751 CRYPTO_ALG_ALLOCATES_MEMORY |
3752 CRYPTO_ALG_KERN_DRIVER_ONLY,
3753 .cra_blocksize = 1,
3754 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3755 .cra_alignmask = 0,
3756 .cra_init = safexcel_rfc4309_ccm_cra_init,
3757 .cra_exit = safexcel_aead_cra_exit,
3758 .cra_module = THIS_MODULE,
3759 },
3760 },
3761 };
3762