1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 */
7
8 #include <crypto/aes.h>
9 #include <crypto/hmac.h>
10 #include <crypto/md5.h>
11 #include <crypto/sha1.h>
12 #include <crypto/sha2.h>
13 #include <crypto/sha3.h>
14 #include <crypto/skcipher.h>
15 #include <crypto/sm3.h>
16 #include <crypto/internal/cipher.h>
17 #include <linux/device.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/dmapool.h>
20
21 #include "safexcel.h"
22
23 struct safexcel_ahash_ctx {
24 struct safexcel_context base;
25
26 u32 alg;
27 u8 key_sz;
28 bool cbcmac;
29 bool do_fallback;
30 bool fb_init_done;
31 bool fb_do_setkey;
32
33 struct crypto_aes_ctx *aes;
34 struct crypto_ahash *fback;
35 struct crypto_shash *shpre;
36 struct shash_desc *shdesc;
37 };
38
39 struct safexcel_ahash_req {
40 bool last_req;
41 bool finish;
42 bool hmac;
43 bool needs_inv;
44 bool hmac_zlen;
45 bool len_is_le;
46 bool not_first;
47 bool xcbcmac;
48
49 int nents;
50 dma_addr_t result_dma;
51
52 u32 digest;
53
54 u8 state_sz; /* expected state size, only set once */
55 u8 block_sz; /* block size, only set once */
56 u8 digest_sz; /* output digest size, only set once */
57 __le32 state[SHA3_512_BLOCK_SIZE /
58 sizeof(__le32)] __aligned(sizeof(__le32));
59
60 u64 len;
61 u64 processed;
62
63 u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
64 dma_addr_t cache_dma;
65 unsigned int cache_sz;
66
67 u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
68 };
69
safexcel_queued_len(struct safexcel_ahash_req * req)70 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
71 {
72 return req->len - req->processed;
73 }
74
safexcel_hash_token(struct safexcel_command_desc * cdesc,u32 input_length,u32 result_length,bool cbcmac)75 static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
76 u32 input_length, u32 result_length,
77 bool cbcmac)
78 {
79 struct safexcel_token *token =
80 (struct safexcel_token *)cdesc->control_data.token;
81
82 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
83 token[0].packet_length = input_length;
84 token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
85
86 input_length &= 15;
87 if (unlikely(cbcmac && input_length)) {
88 token[0].stat = 0;
89 token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
90 token[1].packet_length = 16 - input_length;
91 token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
92 token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
93 } else {
94 token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
95 eip197_noop_token(&token[1]);
96 }
97
98 token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
99 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
100 EIP197_TOKEN_STAT_LAST_PACKET;
101 token[2].packet_length = result_length;
102 token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
103 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
104
105 eip197_noop_token(&token[3]);
106 }
107
safexcel_context_control(struct safexcel_ahash_ctx * ctx,struct safexcel_ahash_req * req,struct safexcel_command_desc * cdesc)108 static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
109 struct safexcel_ahash_req *req,
110 struct safexcel_command_desc *cdesc)
111 {
112 struct safexcel_crypto_priv *priv = ctx->base.priv;
113 u64 count = 0;
114
115 cdesc->control_data.control0 = ctx->alg;
116 cdesc->control_data.control1 = 0;
117
118 /*
119 * Copy the input digest if needed, and setup the context
120 * fields. Do this now as we need it to setup the first command
121 * descriptor.
122 */
123 if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
124 if (req->xcbcmac)
125 memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz);
126 else
127 memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
128
129 if (!req->finish && req->xcbcmac)
130 cdesc->control_data.control0 |=
131 CONTEXT_CONTROL_DIGEST_XCM |
132 CONTEXT_CONTROL_TYPE_HASH_OUT |
133 CONTEXT_CONTROL_NO_FINISH_HASH |
134 CONTEXT_CONTROL_SIZE(req->state_sz /
135 sizeof(u32));
136 else
137 cdesc->control_data.control0 |=
138 CONTEXT_CONTROL_DIGEST_XCM |
139 CONTEXT_CONTROL_TYPE_HASH_OUT |
140 CONTEXT_CONTROL_SIZE(req->state_sz /
141 sizeof(u32));
142 return;
143 } else if (!req->processed) {
144 /* First - and possibly only - block of basic hash only */
145 if (req->finish)
146 cdesc->control_data.control0 |= req->digest |
147 CONTEXT_CONTROL_TYPE_HASH_OUT |
148 CONTEXT_CONTROL_RESTART_HASH |
149 /* ensure its not 0! */
150 CONTEXT_CONTROL_SIZE(1);
151 else
152 cdesc->control_data.control0 |= req->digest |
153 CONTEXT_CONTROL_TYPE_HASH_OUT |
154 CONTEXT_CONTROL_RESTART_HASH |
155 CONTEXT_CONTROL_NO_FINISH_HASH |
156 /* ensure its not 0! */
157 CONTEXT_CONTROL_SIZE(1);
158 return;
159 }
160
161 /* Hash continuation or HMAC, setup (inner) digest from state */
162 memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
163
164 if (req->finish) {
165 /* Compute digest count for hash/HMAC finish operations */
166 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
167 req->hmac_zlen || (req->processed != req->block_sz)) {
168 count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
169
170 /* This is a hardware limitation, as the
171 * counter must fit into an u32. This represents
172 * a fairly big amount of input data, so we
173 * shouldn't see this.
174 */
175 if (unlikely(count & 0xffffffff00000000ULL)) {
176 dev_warn(priv->dev,
177 "Input data is too big\n");
178 return;
179 }
180 }
181
182 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
183 /* Special case: zero length HMAC */
184 req->hmac_zlen ||
185 /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
186 (req->processed != req->block_sz)) {
187 /* Basic hash continue operation, need digest + cnt */
188 cdesc->control_data.control0 |=
189 CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
190 CONTEXT_CONTROL_TYPE_HASH_OUT |
191 CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
192 /* For zero-len HMAC, don't finalize, already padded! */
193 if (req->hmac_zlen)
194 cdesc->control_data.control0 |=
195 CONTEXT_CONTROL_NO_FINISH_HASH;
196 cdesc->control_data.control1 |=
197 CONTEXT_CONTROL_DIGEST_CNT;
198 ctx->base.ctxr->data[req->state_sz >> 2] =
199 cpu_to_le32(count);
200 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
201
202 /* Clear zero-length HMAC flag for next operation! */
203 req->hmac_zlen = false;
204 } else { /* HMAC */
205 /* Need outer digest for HMAC finalization */
206 memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
207 &ctx->base.opad, req->state_sz);
208
209 /* Single pass HMAC - no digest count */
210 cdesc->control_data.control0 |=
211 CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
212 CONTEXT_CONTROL_TYPE_HASH_OUT |
213 CONTEXT_CONTROL_DIGEST_HMAC;
214 }
215 } else { /* Hash continuation, do not finish yet */
216 cdesc->control_data.control0 |=
217 CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
218 CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
219 CONTEXT_CONTROL_TYPE_HASH_OUT |
220 CONTEXT_CONTROL_NO_FINISH_HASH;
221 }
222 }
223
224 static int safexcel_ahash_enqueue(struct ahash_request *areq);
225
safexcel_handle_req_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)226 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
227 int ring,
228 struct crypto_async_request *async,
229 bool *should_complete, int *ret)
230 {
231 struct safexcel_result_desc *rdesc;
232 struct ahash_request *areq = ahash_request_cast(async);
233 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
234 struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
235 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
236 u64 cache_len;
237
238 *ret = 0;
239
240 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
241 if (IS_ERR(rdesc)) {
242 dev_err(priv->dev,
243 "hash: result: could not retrieve the result descriptor\n");
244 *ret = PTR_ERR(rdesc);
245 } else {
246 *ret = safexcel_rdesc_check_errors(priv, rdesc);
247 }
248
249 safexcel_complete(priv, ring);
250
251 if (sreq->nents) {
252 dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
253 sreq->nents = 0;
254 }
255
256 if (sreq->result_dma) {
257 dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
258 DMA_FROM_DEVICE);
259 sreq->result_dma = 0;
260 }
261
262 if (sreq->cache_dma) {
263 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
264 DMA_TO_DEVICE);
265 sreq->cache_dma = 0;
266 sreq->cache_sz = 0;
267 }
268
269 if (sreq->finish) {
270 if (sreq->hmac &&
271 (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
272 /* Faking HMAC using hash - need to do outer hash */
273 memcpy(sreq->cache, sreq->state,
274 crypto_ahash_digestsize(ahash));
275
276 memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz);
277
278 sreq->len = sreq->block_sz +
279 crypto_ahash_digestsize(ahash);
280 sreq->processed = sreq->block_sz;
281 sreq->hmac = 0;
282
283 if (priv->flags & EIP197_TRC_CACHE)
284 ctx->base.needs_inv = true;
285 areq->nbytes = 0;
286 safexcel_ahash_enqueue(areq);
287
288 *should_complete = false; /* Not done yet */
289 return 1;
290 }
291
292 if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
293 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
294 /* Undo final XOR with 0xffffffff ...*/
295 *(__le32 *)areq->result = ~sreq->state[0];
296 } else {
297 memcpy(areq->result, sreq->state,
298 crypto_ahash_digestsize(ahash));
299 }
300 }
301
302 cache_len = safexcel_queued_len(sreq);
303 if (cache_len)
304 memcpy(sreq->cache, sreq->cache_next, cache_len);
305
306 *should_complete = true;
307
308 return 1;
309 }
310
safexcel_ahash_send_req(struct crypto_async_request * async,int ring,int * commands,int * results)311 static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
312 int *commands, int *results)
313 {
314 struct ahash_request *areq = ahash_request_cast(async);
315 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
316 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
317 struct safexcel_crypto_priv *priv = ctx->base.priv;
318 struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
319 struct safexcel_result_desc *rdesc;
320 struct scatterlist *sg;
321 struct safexcel_token *dmmy;
322 int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
323 u64 queued, len;
324
325 queued = safexcel_queued_len(req);
326 if (queued <= HASH_CACHE_SIZE)
327 cache_len = queued;
328 else
329 cache_len = queued - areq->nbytes;
330
331 if (!req->finish && !req->last_req) {
332 /* If this is not the last request and the queued data does not
333 * fit into full cache blocks, cache it for the next send call.
334 */
335 extra = queued & (HASH_CACHE_SIZE - 1);
336
337 /* If this is not the last request and the queued data
338 * is a multiple of a block, cache the last one for now.
339 */
340 if (!extra)
341 extra = HASH_CACHE_SIZE;
342
343 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
344 req->cache_next, extra,
345 areq->nbytes - extra);
346
347 queued -= extra;
348
349 if (!queued) {
350 *commands = 0;
351 *results = 0;
352 return 0;
353 }
354
355 extra = 0;
356 }
357
358 if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
359 if (unlikely(cache_len < AES_BLOCK_SIZE)) {
360 /*
361 * Cache contains less than 1 full block, complete.
362 */
363 extra = AES_BLOCK_SIZE - cache_len;
364 if (queued > cache_len) {
365 /* More data follows: borrow bytes */
366 u64 tmp = queued - cache_len;
367
368 skip = min_t(u64, tmp, extra);
369 sg_pcopy_to_buffer(areq->src,
370 sg_nents(areq->src),
371 req->cache + cache_len,
372 skip, 0);
373 }
374 extra -= skip;
375 memset(req->cache + cache_len + skip, 0, extra);
376 if (!ctx->cbcmac && extra) {
377 // 10- padding for XCBCMAC & CMAC
378 req->cache[cache_len + skip] = 0x80;
379 // HW will use K2 iso K3 - compensate!
380 for (i = 0; i < AES_BLOCK_SIZE / 4; i++) {
381 u32 *cache = (void *)req->cache;
382 u32 *ipad = ctx->base.ipad.word;
383 u32 x;
384
385 x = ipad[i] ^ ipad[i + 4];
386 cache[i] ^= swab32(x);
387 }
388 }
389 cache_len = AES_BLOCK_SIZE;
390 queued = queued + extra;
391 }
392
393 /* XCBC continue: XOR previous result into 1st word */
394 crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
395 }
396
397 len = queued;
398 /* Add a command descriptor for the cached data, if any */
399 if (cache_len) {
400 req->cache_dma = dma_map_single(priv->dev, req->cache,
401 cache_len, DMA_TO_DEVICE);
402 if (dma_mapping_error(priv->dev, req->cache_dma))
403 return -EINVAL;
404
405 req->cache_sz = cache_len;
406 first_cdesc = safexcel_add_cdesc(priv, ring, 1,
407 (cache_len == len),
408 req->cache_dma, cache_len,
409 len, ctx->base.ctxr_dma,
410 &dmmy);
411 if (IS_ERR(first_cdesc)) {
412 ret = PTR_ERR(first_cdesc);
413 goto unmap_cache;
414 }
415 n_cdesc++;
416
417 queued -= cache_len;
418 if (!queued)
419 goto send_command;
420 }
421
422 /* Now handle the current ahash request buffer(s) */
423 req->nents = dma_map_sg(priv->dev, areq->src,
424 sg_nents_for_len(areq->src,
425 areq->nbytes),
426 DMA_TO_DEVICE);
427 if (!req->nents) {
428 ret = -ENOMEM;
429 goto cdesc_rollback;
430 }
431
432 for_each_sg(areq->src, sg, req->nents, i) {
433 int sglen = sg_dma_len(sg);
434
435 if (unlikely(sglen <= skip)) {
436 skip -= sglen;
437 continue;
438 }
439
440 /* Do not overflow the request */
441 if ((queued + skip) <= sglen)
442 sglen = queued;
443 else
444 sglen -= skip;
445
446 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
447 !(queued - sglen),
448 sg_dma_address(sg) + skip, sglen,
449 len, ctx->base.ctxr_dma, &dmmy);
450 if (IS_ERR(cdesc)) {
451 ret = PTR_ERR(cdesc);
452 goto unmap_sg;
453 }
454
455 if (!n_cdesc)
456 first_cdesc = cdesc;
457 n_cdesc++;
458
459 queued -= sglen;
460 if (!queued)
461 break;
462 skip = 0;
463 }
464
465 send_command:
466 /* Setup the context options */
467 safexcel_context_control(ctx, req, first_cdesc);
468
469 /* Add the token */
470 safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
471
472 req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
473 DMA_FROM_DEVICE);
474 if (dma_mapping_error(priv->dev, req->result_dma)) {
475 ret = -EINVAL;
476 goto unmap_sg;
477 }
478
479 /* Add a result descriptor */
480 rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
481 req->digest_sz);
482 if (IS_ERR(rdesc)) {
483 ret = PTR_ERR(rdesc);
484 goto unmap_result;
485 }
486
487 safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
488
489 req->processed += len - extra;
490
491 *commands = n_cdesc;
492 *results = 1;
493 return 0;
494
495 unmap_result:
496 dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
497 DMA_FROM_DEVICE);
498 unmap_sg:
499 if (req->nents) {
500 dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
501 req->nents = 0;
502 }
503 cdesc_rollback:
504 for (i = 0; i < n_cdesc; i++)
505 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
506 unmap_cache:
507 if (req->cache_dma) {
508 dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
509 DMA_TO_DEVICE);
510 req->cache_dma = 0;
511 req->cache_sz = 0;
512 }
513
514 return ret;
515 }
516
safexcel_handle_inv_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)517 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
518 int ring,
519 struct crypto_async_request *async,
520 bool *should_complete, int *ret)
521 {
522 struct safexcel_result_desc *rdesc;
523 struct ahash_request *areq = ahash_request_cast(async);
524 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
525 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
526 int enq_ret;
527
528 *ret = 0;
529
530 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
531 if (IS_ERR(rdesc)) {
532 dev_err(priv->dev,
533 "hash: invalidate: could not retrieve the result descriptor\n");
534 *ret = PTR_ERR(rdesc);
535 } else {
536 *ret = safexcel_rdesc_check_errors(priv, rdesc);
537 }
538
539 safexcel_complete(priv, ring);
540
541 if (ctx->base.exit_inv) {
542 dma_pool_free(priv->context_pool, ctx->base.ctxr,
543 ctx->base.ctxr_dma);
544
545 *should_complete = true;
546 return 1;
547 }
548
549 ring = safexcel_select_ring(priv);
550 ctx->base.ring = ring;
551
552 spin_lock_bh(&priv->ring[ring].queue_lock);
553 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
554 spin_unlock_bh(&priv->ring[ring].queue_lock);
555
556 if (enq_ret != -EINPROGRESS)
557 *ret = enq_ret;
558
559 queue_work(priv->ring[ring].workqueue,
560 &priv->ring[ring].work_data.work);
561
562 *should_complete = false;
563
564 return 1;
565 }
566
safexcel_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)567 static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
568 struct crypto_async_request *async,
569 bool *should_complete, int *ret)
570 {
571 struct ahash_request *areq = ahash_request_cast(async);
572 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
573 int err;
574
575 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
576
577 if (req->needs_inv) {
578 req->needs_inv = false;
579 err = safexcel_handle_inv_result(priv, ring, async,
580 should_complete, ret);
581 } else {
582 err = safexcel_handle_req_result(priv, ring, async,
583 should_complete, ret);
584 }
585
586 return err;
587 }
588
safexcel_ahash_send_inv(struct crypto_async_request * async,int ring,int * commands,int * results)589 static int safexcel_ahash_send_inv(struct crypto_async_request *async,
590 int ring, int *commands, int *results)
591 {
592 struct ahash_request *areq = ahash_request_cast(async);
593 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
594 int ret;
595
596 ret = safexcel_invalidate_cache(async, ctx->base.priv,
597 ctx->base.ctxr_dma, ring);
598 if (unlikely(ret))
599 return ret;
600
601 *commands = 1;
602 *results = 1;
603
604 return 0;
605 }
606
safexcel_ahash_send(struct crypto_async_request * async,int ring,int * commands,int * results)607 static int safexcel_ahash_send(struct crypto_async_request *async,
608 int ring, int *commands, int *results)
609 {
610 struct ahash_request *areq = ahash_request_cast(async);
611 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
612 int ret;
613
614 if (req->needs_inv)
615 ret = safexcel_ahash_send_inv(async, ring, commands, results);
616 else
617 ret = safexcel_ahash_send_req(async, ring, commands, results);
618
619 return ret;
620 }
621
safexcel_ahash_exit_inv(struct crypto_tfm * tfm)622 static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
623 {
624 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
625 struct safexcel_crypto_priv *priv = ctx->base.priv;
626 EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
627 struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
628 struct safexcel_inv_result result = {};
629 int ring = ctx->base.ring;
630
631 memset(req, 0, EIP197_AHASH_REQ_SIZE);
632
633 /* create invalidation request */
634 init_completion(&result.completion);
635 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
636 safexcel_inv_complete, &result);
637
638 ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
639 ctx = crypto_tfm_ctx(req->base.tfm);
640 ctx->base.exit_inv = true;
641 rctx->needs_inv = true;
642
643 spin_lock_bh(&priv->ring[ring].queue_lock);
644 crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
645 spin_unlock_bh(&priv->ring[ring].queue_lock);
646
647 queue_work(priv->ring[ring].workqueue,
648 &priv->ring[ring].work_data.work);
649
650 wait_for_completion(&result.completion);
651
652 if (result.error) {
653 dev_warn(priv->dev, "hash: completion error (%d)\n",
654 result.error);
655 return result.error;
656 }
657
658 return 0;
659 }
660
661 /* safexcel_ahash_cache: cache data until at least one request can be sent to
662 * the engine, aka. when there is at least 1 block size in the pipe.
663 */
safexcel_ahash_cache(struct ahash_request * areq)664 static int safexcel_ahash_cache(struct ahash_request *areq)
665 {
666 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
667 u64 cache_len;
668
669 /* cache_len: everything accepted by the driver but not sent yet,
670 * tot sz handled by update() - last req sz - tot sz handled by send()
671 */
672 cache_len = safexcel_queued_len(req);
673
674 /*
675 * In case there isn't enough bytes to proceed (less than a
676 * block size), cache the data until we have enough.
677 */
678 if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
679 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
680 req->cache + cache_len,
681 areq->nbytes, 0);
682 return 0;
683 }
684
685 /* We couldn't cache all the data */
686 return -E2BIG;
687 }
688
safexcel_ahash_enqueue(struct ahash_request * areq)689 static int safexcel_ahash_enqueue(struct ahash_request *areq)
690 {
691 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
692 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
693 struct safexcel_crypto_priv *priv = ctx->base.priv;
694 int ret, ring;
695
696 req->needs_inv = false;
697
698 if (ctx->base.ctxr) {
699 if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
700 /* invalidate for *any* non-XCBC continuation */
701 ((req->not_first && !req->xcbcmac) ||
702 /* invalidate if (i)digest changed */
703 memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
704 /* invalidate for HMAC finish with odigest changed */
705 (req->finish && req->hmac &&
706 memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
707 &ctx->base.opad, req->state_sz))))
708 /*
709 * We're still setting needs_inv here, even though it is
710 * cleared right away, because the needs_inv flag can be
711 * set in other functions and we want to keep the same
712 * logic.
713 */
714 ctx->base.needs_inv = true;
715
716 if (ctx->base.needs_inv) {
717 ctx->base.needs_inv = false;
718 req->needs_inv = true;
719 }
720 } else {
721 ctx->base.ring = safexcel_select_ring(priv);
722 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
723 EIP197_GFP_FLAGS(areq->base),
724 &ctx->base.ctxr_dma);
725 if (!ctx->base.ctxr)
726 return -ENOMEM;
727 }
728 req->not_first = true;
729
730 ring = ctx->base.ring;
731
732 spin_lock_bh(&priv->ring[ring].queue_lock);
733 ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
734 spin_unlock_bh(&priv->ring[ring].queue_lock);
735
736 queue_work(priv->ring[ring].workqueue,
737 &priv->ring[ring].work_data.work);
738
739 return ret;
740 }
741
safexcel_ahash_update(struct ahash_request * areq)742 static int safexcel_ahash_update(struct ahash_request *areq)
743 {
744 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
745 int ret;
746
747 /* If the request is 0 length, do nothing */
748 if (!areq->nbytes)
749 return 0;
750
751 /* Add request to the cache if it fits */
752 ret = safexcel_ahash_cache(areq);
753
754 /* Update total request length */
755 req->len += areq->nbytes;
756
757 /* If not all data could fit into the cache, go process the excess.
758 * Also go process immediately for an HMAC IV precompute, which
759 * will never be finished at all, but needs to be processed anyway.
760 */
761 if ((ret && !req->finish) || req->last_req)
762 return safexcel_ahash_enqueue(areq);
763
764 return 0;
765 }
766
safexcel_ahash_final(struct ahash_request * areq)767 static int safexcel_ahash_final(struct ahash_request *areq)
768 {
769 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
770 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
771
772 req->finish = true;
773
774 if (unlikely(!req->len && !areq->nbytes)) {
775 /*
776 * If we have an overall 0 length *hash* request:
777 * The HW cannot do 0 length hash, so we provide the correct
778 * result directly here.
779 */
780 if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
781 memcpy(areq->result, md5_zero_message_hash,
782 MD5_DIGEST_SIZE);
783 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
784 memcpy(areq->result, sha1_zero_message_hash,
785 SHA1_DIGEST_SIZE);
786 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
787 memcpy(areq->result, sha224_zero_message_hash,
788 SHA224_DIGEST_SIZE);
789 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
790 memcpy(areq->result, sha256_zero_message_hash,
791 SHA256_DIGEST_SIZE);
792 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
793 memcpy(areq->result, sha384_zero_message_hash,
794 SHA384_DIGEST_SIZE);
795 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
796 memcpy(areq->result, sha512_zero_message_hash,
797 SHA512_DIGEST_SIZE);
798 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
799 memcpy(areq->result,
800 EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
801 }
802
803 return 0;
804 } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
805 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
806 req->len == sizeof(u32) && !areq->nbytes)) {
807 /* Zero length CRC32 */
808 memcpy(areq->result, &ctx->base.ipad, sizeof(u32));
809 return 0;
810 } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
811 !areq->nbytes)) {
812 /* Zero length CBC MAC */
813 memset(areq->result, 0, AES_BLOCK_SIZE);
814 return 0;
815 } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
816 !areq->nbytes)) {
817 /* Zero length (X)CBC/CMAC */
818 int i;
819
820 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
821 u32 *result = (void *)areq->result;
822
823 /* K3 */
824 result[i] = swab32(ctx->base.ipad.word[i + 4]);
825 }
826 areq->result[0] ^= 0x80; // 10- padding
827 aes_encrypt(ctx->aes, areq->result, areq->result);
828 return 0;
829 } else if (unlikely(req->hmac &&
830 (req->len == req->block_sz) &&
831 !areq->nbytes)) {
832 /*
833 * If we have an overall 0 length *HMAC* request:
834 * For HMAC, we need to finalize the inner digest
835 * and then perform the outer hash.
836 */
837
838 /* generate pad block in the cache */
839 /* start with a hash block of all zeroes */
840 memset(req->cache, 0, req->block_sz);
841 /* set the first byte to 0x80 to 'append a 1 bit' */
842 req->cache[0] = 0x80;
843 /* add the length in bits in the last 2 bytes */
844 if (req->len_is_le) {
845 /* Little endian length word (e.g. MD5) */
846 req->cache[req->block_sz-8] = (req->block_sz << 3) &
847 255;
848 req->cache[req->block_sz-7] = (req->block_sz >> 5);
849 } else {
850 /* Big endian length word (e.g. any SHA) */
851 req->cache[req->block_sz-2] = (req->block_sz >> 5);
852 req->cache[req->block_sz-1] = (req->block_sz << 3) &
853 255;
854 }
855
856 req->len += req->block_sz; /* plus 1 hash block */
857
858 /* Set special zero-length HMAC flag */
859 req->hmac_zlen = true;
860
861 /* Finalize HMAC */
862 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
863 } else if (req->hmac) {
864 /* Finalize HMAC */
865 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
866 }
867
868 return safexcel_ahash_enqueue(areq);
869 }
870
safexcel_ahash_finup(struct ahash_request * areq)871 static int safexcel_ahash_finup(struct ahash_request *areq)
872 {
873 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
874
875 req->finish = true;
876
877 safexcel_ahash_update(areq);
878 return safexcel_ahash_final(areq);
879 }
880
safexcel_ahash_export(struct ahash_request * areq,void * out)881 static int safexcel_ahash_export(struct ahash_request *areq, void *out)
882 {
883 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
884 struct safexcel_ahash_export_state *export = out;
885
886 export->len = req->len;
887 export->processed = req->processed;
888
889 export->digest = req->digest;
890
891 memcpy(export->state, req->state, req->state_sz);
892 memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
893
894 return 0;
895 }
896
safexcel_ahash_import(struct ahash_request * areq,const void * in)897 static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
898 {
899 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
900 const struct safexcel_ahash_export_state *export = in;
901 int ret;
902
903 ret = crypto_ahash_init(areq);
904 if (ret)
905 return ret;
906
907 req->len = export->len;
908 req->processed = export->processed;
909
910 req->digest = export->digest;
911
912 memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
913 memcpy(req->state, export->state, req->state_sz);
914
915 return 0;
916 }
917
safexcel_ahash_cra_init(struct crypto_tfm * tfm)918 static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
919 {
920 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
921 struct safexcel_alg_template *tmpl =
922 container_of(__crypto_ahash_alg(tfm->__crt_alg),
923 struct safexcel_alg_template, alg.ahash);
924
925 ctx->base.priv = tmpl->priv;
926 ctx->base.send = safexcel_ahash_send;
927 ctx->base.handle_result = safexcel_handle_result;
928 ctx->fb_do_setkey = false;
929
930 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
931 sizeof(struct safexcel_ahash_req));
932 return 0;
933 }
934
safexcel_sha1_init(struct ahash_request * areq)935 static int safexcel_sha1_init(struct ahash_request *areq)
936 {
937 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
938 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
939
940 memset(req, 0, sizeof(*req));
941
942 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
943 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
944 req->state_sz = SHA1_DIGEST_SIZE;
945 req->digest_sz = SHA1_DIGEST_SIZE;
946 req->block_sz = SHA1_BLOCK_SIZE;
947
948 return 0;
949 }
950
safexcel_sha1_digest(struct ahash_request * areq)951 static int safexcel_sha1_digest(struct ahash_request *areq)
952 {
953 int ret = safexcel_sha1_init(areq);
954
955 if (ret)
956 return ret;
957
958 return safexcel_ahash_finup(areq);
959 }
960
safexcel_ahash_cra_exit(struct crypto_tfm * tfm)961 static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
962 {
963 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
964 struct safexcel_crypto_priv *priv = ctx->base.priv;
965 int ret;
966
967 /* context not allocated, skip invalidation */
968 if (!ctx->base.ctxr)
969 return;
970
971 if (priv->flags & EIP197_TRC_CACHE) {
972 ret = safexcel_ahash_exit_inv(tfm);
973 if (ret)
974 dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
975 } else {
976 dma_pool_free(priv->context_pool, ctx->base.ctxr,
977 ctx->base.ctxr_dma);
978 }
979 }
980
981 struct safexcel_alg_template safexcel_alg_sha1 = {
982 .type = SAFEXCEL_ALG_TYPE_AHASH,
983 .algo_mask = SAFEXCEL_ALG_SHA1,
984 .alg.ahash = {
985 .init = safexcel_sha1_init,
986 .update = safexcel_ahash_update,
987 .final = safexcel_ahash_final,
988 .finup = safexcel_ahash_finup,
989 .digest = safexcel_sha1_digest,
990 .export = safexcel_ahash_export,
991 .import = safexcel_ahash_import,
992 .halg = {
993 .digestsize = SHA1_DIGEST_SIZE,
994 .statesize = sizeof(struct safexcel_ahash_export_state),
995 .base = {
996 .cra_name = "sha1",
997 .cra_driver_name = "safexcel-sha1",
998 .cra_priority = SAFEXCEL_CRA_PRIORITY,
999 .cra_flags = CRYPTO_ALG_ASYNC |
1000 CRYPTO_ALG_ALLOCATES_MEMORY |
1001 CRYPTO_ALG_KERN_DRIVER_ONLY,
1002 .cra_blocksize = SHA1_BLOCK_SIZE,
1003 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1004 .cra_init = safexcel_ahash_cra_init,
1005 .cra_exit = safexcel_ahash_cra_exit,
1006 .cra_module = THIS_MODULE,
1007 },
1008 },
1009 },
1010 };
1011
safexcel_hmac_sha1_init(struct ahash_request * areq)1012 static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1013 {
1014 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1015 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1016
1017 memset(req, 0, sizeof(*req));
1018
1019 /* Start from ipad precompute */
1020 memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE);
1021 /* Already processed the key^ipad part now! */
1022 req->len = SHA1_BLOCK_SIZE;
1023 req->processed = SHA1_BLOCK_SIZE;
1024
1025 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1026 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1027 req->state_sz = SHA1_DIGEST_SIZE;
1028 req->digest_sz = SHA1_DIGEST_SIZE;
1029 req->block_sz = SHA1_BLOCK_SIZE;
1030 req->hmac = true;
1031
1032 return 0;
1033 }
1034
safexcel_hmac_sha1_digest(struct ahash_request * areq)1035 static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1036 {
1037 int ret = safexcel_hmac_sha1_init(areq);
1038
1039 if (ret)
1040 return ret;
1041
1042 return safexcel_ahash_finup(areq);
1043 }
1044
1045 struct safexcel_ahash_result {
1046 struct completion completion;
1047 int error;
1048 };
1049
safexcel_ahash_complete(struct crypto_async_request * req,int error)1050 static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
1051 {
1052 struct safexcel_ahash_result *result = req->data;
1053
1054 if (error == -EINPROGRESS)
1055 return;
1056
1057 result->error = error;
1058 complete(&result->completion);
1059 }
1060
safexcel_hmac_init_pad(struct ahash_request * areq,unsigned int blocksize,const u8 * key,unsigned int keylen,u8 * ipad,u8 * opad)1061 static int safexcel_hmac_init_pad(struct ahash_request *areq,
1062 unsigned int blocksize, const u8 *key,
1063 unsigned int keylen, u8 *ipad, u8 *opad)
1064 {
1065 struct safexcel_ahash_result result;
1066 struct scatterlist sg;
1067 int ret, i;
1068 u8 *keydup;
1069
1070 if (keylen <= blocksize) {
1071 memcpy(ipad, key, keylen);
1072 } else {
1073 keydup = kmemdup(key, keylen, GFP_KERNEL);
1074 if (!keydup)
1075 return -ENOMEM;
1076
1077 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1078 safexcel_ahash_complete, &result);
1079 sg_init_one(&sg, keydup, keylen);
1080 ahash_request_set_crypt(areq, &sg, ipad, keylen);
1081 init_completion(&result.completion);
1082
1083 ret = crypto_ahash_digest(areq);
1084 if (ret == -EINPROGRESS || ret == -EBUSY) {
1085 wait_for_completion_interruptible(&result.completion);
1086 ret = result.error;
1087 }
1088
1089 /* Avoid leaking */
1090 kfree_sensitive(keydup);
1091
1092 if (ret)
1093 return ret;
1094
1095 keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1096 }
1097
1098 memset(ipad + keylen, 0, blocksize - keylen);
1099 memcpy(opad, ipad, blocksize);
1100
1101 for (i = 0; i < blocksize; i++) {
1102 ipad[i] ^= HMAC_IPAD_VALUE;
1103 opad[i] ^= HMAC_OPAD_VALUE;
1104 }
1105
1106 return 0;
1107 }
1108
safexcel_hmac_init_iv(struct ahash_request * areq,unsigned int blocksize,u8 * pad,void * state)1109 static int safexcel_hmac_init_iv(struct ahash_request *areq,
1110 unsigned int blocksize, u8 *pad, void *state)
1111 {
1112 struct safexcel_ahash_result result;
1113 struct safexcel_ahash_req *req;
1114 struct scatterlist sg;
1115 int ret;
1116
1117 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1118 safexcel_ahash_complete, &result);
1119 sg_init_one(&sg, pad, blocksize);
1120 ahash_request_set_crypt(areq, &sg, pad, blocksize);
1121 init_completion(&result.completion);
1122
1123 ret = crypto_ahash_init(areq);
1124 if (ret)
1125 return ret;
1126
1127 req = ahash_request_ctx(areq);
1128 req->hmac = true;
1129 req->last_req = true;
1130
1131 ret = crypto_ahash_update(areq);
1132 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1133 return ret;
1134
1135 wait_for_completion_interruptible(&result.completion);
1136 if (result.error)
1137 return result.error;
1138
1139 return crypto_ahash_export(areq, state);
1140 }
1141
__safexcel_hmac_setkey(const char * alg,const u8 * key,unsigned int keylen,void * istate,void * ostate)1142 static int __safexcel_hmac_setkey(const char *alg, const u8 *key,
1143 unsigned int keylen,
1144 void *istate, void *ostate)
1145 {
1146 struct ahash_request *areq;
1147 struct crypto_ahash *tfm;
1148 unsigned int blocksize;
1149 u8 *ipad, *opad;
1150 int ret;
1151
1152 tfm = crypto_alloc_ahash(alg, 0, 0);
1153 if (IS_ERR(tfm))
1154 return PTR_ERR(tfm);
1155
1156 areq = ahash_request_alloc(tfm, GFP_KERNEL);
1157 if (!areq) {
1158 ret = -ENOMEM;
1159 goto free_ahash;
1160 }
1161
1162 crypto_ahash_clear_flags(tfm, ~0);
1163 blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1164
1165 ipad = kcalloc(2, blocksize, GFP_KERNEL);
1166 if (!ipad) {
1167 ret = -ENOMEM;
1168 goto free_request;
1169 }
1170
1171 opad = ipad + blocksize;
1172
1173 ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1174 if (ret)
1175 goto free_ipad;
1176
1177 ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1178 if (ret)
1179 goto free_ipad;
1180
1181 ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1182
1183 free_ipad:
1184 kfree(ipad);
1185 free_request:
1186 ahash_request_free(areq);
1187 free_ahash:
1188 crypto_free_ahash(tfm);
1189
1190 return ret;
1191 }
1192
safexcel_hmac_setkey(struct safexcel_context * base,const u8 * key,unsigned int keylen,const char * alg,unsigned int state_sz)1193 int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key,
1194 unsigned int keylen, const char *alg,
1195 unsigned int state_sz)
1196 {
1197 struct safexcel_crypto_priv *priv = base->priv;
1198 struct safexcel_ahash_export_state istate, ostate;
1199 int ret;
1200
1201 ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1202 if (ret)
1203 return ret;
1204
1205 if (priv->flags & EIP197_TRC_CACHE && base->ctxr &&
1206 (memcmp(&base->ipad, istate.state, state_sz) ||
1207 memcmp(&base->opad, ostate.state, state_sz)))
1208 base->needs_inv = true;
1209
1210 memcpy(&base->ipad, &istate.state, state_sz);
1211 memcpy(&base->opad, &ostate.state, state_sz);
1212
1213 return 0;
1214 }
1215
safexcel_hmac_alg_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen,const char * alg,unsigned int state_sz)1216 static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1217 unsigned int keylen, const char *alg,
1218 unsigned int state_sz)
1219 {
1220 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
1221
1222 return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz);
1223 }
1224
safexcel_hmac_sha1_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1225 static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1226 unsigned int keylen)
1227 {
1228 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1229 SHA1_DIGEST_SIZE);
1230 }
1231
1232 struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1233 .type = SAFEXCEL_ALG_TYPE_AHASH,
1234 .algo_mask = SAFEXCEL_ALG_SHA1,
1235 .alg.ahash = {
1236 .init = safexcel_hmac_sha1_init,
1237 .update = safexcel_ahash_update,
1238 .final = safexcel_ahash_final,
1239 .finup = safexcel_ahash_finup,
1240 .digest = safexcel_hmac_sha1_digest,
1241 .setkey = safexcel_hmac_sha1_setkey,
1242 .export = safexcel_ahash_export,
1243 .import = safexcel_ahash_import,
1244 .halg = {
1245 .digestsize = SHA1_DIGEST_SIZE,
1246 .statesize = sizeof(struct safexcel_ahash_export_state),
1247 .base = {
1248 .cra_name = "hmac(sha1)",
1249 .cra_driver_name = "safexcel-hmac-sha1",
1250 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1251 .cra_flags = CRYPTO_ALG_ASYNC |
1252 CRYPTO_ALG_ALLOCATES_MEMORY |
1253 CRYPTO_ALG_KERN_DRIVER_ONLY,
1254 .cra_blocksize = SHA1_BLOCK_SIZE,
1255 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1256 .cra_init = safexcel_ahash_cra_init,
1257 .cra_exit = safexcel_ahash_cra_exit,
1258 .cra_module = THIS_MODULE,
1259 },
1260 },
1261 },
1262 };
1263
safexcel_sha256_init(struct ahash_request * areq)1264 static int safexcel_sha256_init(struct ahash_request *areq)
1265 {
1266 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1267 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1268
1269 memset(req, 0, sizeof(*req));
1270
1271 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1272 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1273 req->state_sz = SHA256_DIGEST_SIZE;
1274 req->digest_sz = SHA256_DIGEST_SIZE;
1275 req->block_sz = SHA256_BLOCK_SIZE;
1276
1277 return 0;
1278 }
1279
safexcel_sha256_digest(struct ahash_request * areq)1280 static int safexcel_sha256_digest(struct ahash_request *areq)
1281 {
1282 int ret = safexcel_sha256_init(areq);
1283
1284 if (ret)
1285 return ret;
1286
1287 return safexcel_ahash_finup(areq);
1288 }
1289
1290 struct safexcel_alg_template safexcel_alg_sha256 = {
1291 .type = SAFEXCEL_ALG_TYPE_AHASH,
1292 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1293 .alg.ahash = {
1294 .init = safexcel_sha256_init,
1295 .update = safexcel_ahash_update,
1296 .final = safexcel_ahash_final,
1297 .finup = safexcel_ahash_finup,
1298 .digest = safexcel_sha256_digest,
1299 .export = safexcel_ahash_export,
1300 .import = safexcel_ahash_import,
1301 .halg = {
1302 .digestsize = SHA256_DIGEST_SIZE,
1303 .statesize = sizeof(struct safexcel_ahash_export_state),
1304 .base = {
1305 .cra_name = "sha256",
1306 .cra_driver_name = "safexcel-sha256",
1307 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1308 .cra_flags = CRYPTO_ALG_ASYNC |
1309 CRYPTO_ALG_ALLOCATES_MEMORY |
1310 CRYPTO_ALG_KERN_DRIVER_ONLY,
1311 .cra_blocksize = SHA256_BLOCK_SIZE,
1312 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1313 .cra_init = safexcel_ahash_cra_init,
1314 .cra_exit = safexcel_ahash_cra_exit,
1315 .cra_module = THIS_MODULE,
1316 },
1317 },
1318 },
1319 };
1320
safexcel_sha224_init(struct ahash_request * areq)1321 static int safexcel_sha224_init(struct ahash_request *areq)
1322 {
1323 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1324 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1325
1326 memset(req, 0, sizeof(*req));
1327
1328 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1329 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1330 req->state_sz = SHA256_DIGEST_SIZE;
1331 req->digest_sz = SHA256_DIGEST_SIZE;
1332 req->block_sz = SHA256_BLOCK_SIZE;
1333
1334 return 0;
1335 }
1336
safexcel_sha224_digest(struct ahash_request * areq)1337 static int safexcel_sha224_digest(struct ahash_request *areq)
1338 {
1339 int ret = safexcel_sha224_init(areq);
1340
1341 if (ret)
1342 return ret;
1343
1344 return safexcel_ahash_finup(areq);
1345 }
1346
1347 struct safexcel_alg_template safexcel_alg_sha224 = {
1348 .type = SAFEXCEL_ALG_TYPE_AHASH,
1349 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1350 .alg.ahash = {
1351 .init = safexcel_sha224_init,
1352 .update = safexcel_ahash_update,
1353 .final = safexcel_ahash_final,
1354 .finup = safexcel_ahash_finup,
1355 .digest = safexcel_sha224_digest,
1356 .export = safexcel_ahash_export,
1357 .import = safexcel_ahash_import,
1358 .halg = {
1359 .digestsize = SHA224_DIGEST_SIZE,
1360 .statesize = sizeof(struct safexcel_ahash_export_state),
1361 .base = {
1362 .cra_name = "sha224",
1363 .cra_driver_name = "safexcel-sha224",
1364 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1365 .cra_flags = CRYPTO_ALG_ASYNC |
1366 CRYPTO_ALG_ALLOCATES_MEMORY |
1367 CRYPTO_ALG_KERN_DRIVER_ONLY,
1368 .cra_blocksize = SHA224_BLOCK_SIZE,
1369 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1370 .cra_init = safexcel_ahash_cra_init,
1371 .cra_exit = safexcel_ahash_cra_exit,
1372 .cra_module = THIS_MODULE,
1373 },
1374 },
1375 },
1376 };
1377
safexcel_hmac_sha224_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1378 static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1379 unsigned int keylen)
1380 {
1381 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1382 SHA256_DIGEST_SIZE);
1383 }
1384
safexcel_hmac_sha224_init(struct ahash_request * areq)1385 static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1386 {
1387 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1388 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1389
1390 memset(req, 0, sizeof(*req));
1391
1392 /* Start from ipad precompute */
1393 memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1394 /* Already processed the key^ipad part now! */
1395 req->len = SHA256_BLOCK_SIZE;
1396 req->processed = SHA256_BLOCK_SIZE;
1397
1398 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1399 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1400 req->state_sz = SHA256_DIGEST_SIZE;
1401 req->digest_sz = SHA256_DIGEST_SIZE;
1402 req->block_sz = SHA256_BLOCK_SIZE;
1403 req->hmac = true;
1404
1405 return 0;
1406 }
1407
safexcel_hmac_sha224_digest(struct ahash_request * areq)1408 static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1409 {
1410 int ret = safexcel_hmac_sha224_init(areq);
1411
1412 if (ret)
1413 return ret;
1414
1415 return safexcel_ahash_finup(areq);
1416 }
1417
1418 struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1419 .type = SAFEXCEL_ALG_TYPE_AHASH,
1420 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1421 .alg.ahash = {
1422 .init = safexcel_hmac_sha224_init,
1423 .update = safexcel_ahash_update,
1424 .final = safexcel_ahash_final,
1425 .finup = safexcel_ahash_finup,
1426 .digest = safexcel_hmac_sha224_digest,
1427 .setkey = safexcel_hmac_sha224_setkey,
1428 .export = safexcel_ahash_export,
1429 .import = safexcel_ahash_import,
1430 .halg = {
1431 .digestsize = SHA224_DIGEST_SIZE,
1432 .statesize = sizeof(struct safexcel_ahash_export_state),
1433 .base = {
1434 .cra_name = "hmac(sha224)",
1435 .cra_driver_name = "safexcel-hmac-sha224",
1436 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1437 .cra_flags = CRYPTO_ALG_ASYNC |
1438 CRYPTO_ALG_ALLOCATES_MEMORY |
1439 CRYPTO_ALG_KERN_DRIVER_ONLY,
1440 .cra_blocksize = SHA224_BLOCK_SIZE,
1441 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1442 .cra_init = safexcel_ahash_cra_init,
1443 .cra_exit = safexcel_ahash_cra_exit,
1444 .cra_module = THIS_MODULE,
1445 },
1446 },
1447 },
1448 };
1449
safexcel_hmac_sha256_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1450 static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1451 unsigned int keylen)
1452 {
1453 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1454 SHA256_DIGEST_SIZE);
1455 }
1456
safexcel_hmac_sha256_init(struct ahash_request * areq)1457 static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1458 {
1459 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1460 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1461
1462 memset(req, 0, sizeof(*req));
1463
1464 /* Start from ipad precompute */
1465 memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1466 /* Already processed the key^ipad part now! */
1467 req->len = SHA256_BLOCK_SIZE;
1468 req->processed = SHA256_BLOCK_SIZE;
1469
1470 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1471 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1472 req->state_sz = SHA256_DIGEST_SIZE;
1473 req->digest_sz = SHA256_DIGEST_SIZE;
1474 req->block_sz = SHA256_BLOCK_SIZE;
1475 req->hmac = true;
1476
1477 return 0;
1478 }
1479
safexcel_hmac_sha256_digest(struct ahash_request * areq)1480 static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1481 {
1482 int ret = safexcel_hmac_sha256_init(areq);
1483
1484 if (ret)
1485 return ret;
1486
1487 return safexcel_ahash_finup(areq);
1488 }
1489
1490 struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1491 .type = SAFEXCEL_ALG_TYPE_AHASH,
1492 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1493 .alg.ahash = {
1494 .init = safexcel_hmac_sha256_init,
1495 .update = safexcel_ahash_update,
1496 .final = safexcel_ahash_final,
1497 .finup = safexcel_ahash_finup,
1498 .digest = safexcel_hmac_sha256_digest,
1499 .setkey = safexcel_hmac_sha256_setkey,
1500 .export = safexcel_ahash_export,
1501 .import = safexcel_ahash_import,
1502 .halg = {
1503 .digestsize = SHA256_DIGEST_SIZE,
1504 .statesize = sizeof(struct safexcel_ahash_export_state),
1505 .base = {
1506 .cra_name = "hmac(sha256)",
1507 .cra_driver_name = "safexcel-hmac-sha256",
1508 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1509 .cra_flags = CRYPTO_ALG_ASYNC |
1510 CRYPTO_ALG_ALLOCATES_MEMORY |
1511 CRYPTO_ALG_KERN_DRIVER_ONLY,
1512 .cra_blocksize = SHA256_BLOCK_SIZE,
1513 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1514 .cra_init = safexcel_ahash_cra_init,
1515 .cra_exit = safexcel_ahash_cra_exit,
1516 .cra_module = THIS_MODULE,
1517 },
1518 },
1519 },
1520 };
1521
safexcel_sha512_init(struct ahash_request * areq)1522 static int safexcel_sha512_init(struct ahash_request *areq)
1523 {
1524 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1525 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1526
1527 memset(req, 0, sizeof(*req));
1528
1529 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1530 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1531 req->state_sz = SHA512_DIGEST_SIZE;
1532 req->digest_sz = SHA512_DIGEST_SIZE;
1533 req->block_sz = SHA512_BLOCK_SIZE;
1534
1535 return 0;
1536 }
1537
safexcel_sha512_digest(struct ahash_request * areq)1538 static int safexcel_sha512_digest(struct ahash_request *areq)
1539 {
1540 int ret = safexcel_sha512_init(areq);
1541
1542 if (ret)
1543 return ret;
1544
1545 return safexcel_ahash_finup(areq);
1546 }
1547
1548 struct safexcel_alg_template safexcel_alg_sha512 = {
1549 .type = SAFEXCEL_ALG_TYPE_AHASH,
1550 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1551 .alg.ahash = {
1552 .init = safexcel_sha512_init,
1553 .update = safexcel_ahash_update,
1554 .final = safexcel_ahash_final,
1555 .finup = safexcel_ahash_finup,
1556 .digest = safexcel_sha512_digest,
1557 .export = safexcel_ahash_export,
1558 .import = safexcel_ahash_import,
1559 .halg = {
1560 .digestsize = SHA512_DIGEST_SIZE,
1561 .statesize = sizeof(struct safexcel_ahash_export_state),
1562 .base = {
1563 .cra_name = "sha512",
1564 .cra_driver_name = "safexcel-sha512",
1565 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1566 .cra_flags = CRYPTO_ALG_ASYNC |
1567 CRYPTO_ALG_ALLOCATES_MEMORY |
1568 CRYPTO_ALG_KERN_DRIVER_ONLY,
1569 .cra_blocksize = SHA512_BLOCK_SIZE,
1570 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1571 .cra_init = safexcel_ahash_cra_init,
1572 .cra_exit = safexcel_ahash_cra_exit,
1573 .cra_module = THIS_MODULE,
1574 },
1575 },
1576 },
1577 };
1578
safexcel_sha384_init(struct ahash_request * areq)1579 static int safexcel_sha384_init(struct ahash_request *areq)
1580 {
1581 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1582 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1583
1584 memset(req, 0, sizeof(*req));
1585
1586 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1587 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1588 req->state_sz = SHA512_DIGEST_SIZE;
1589 req->digest_sz = SHA512_DIGEST_SIZE;
1590 req->block_sz = SHA512_BLOCK_SIZE;
1591
1592 return 0;
1593 }
1594
safexcel_sha384_digest(struct ahash_request * areq)1595 static int safexcel_sha384_digest(struct ahash_request *areq)
1596 {
1597 int ret = safexcel_sha384_init(areq);
1598
1599 if (ret)
1600 return ret;
1601
1602 return safexcel_ahash_finup(areq);
1603 }
1604
1605 struct safexcel_alg_template safexcel_alg_sha384 = {
1606 .type = SAFEXCEL_ALG_TYPE_AHASH,
1607 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1608 .alg.ahash = {
1609 .init = safexcel_sha384_init,
1610 .update = safexcel_ahash_update,
1611 .final = safexcel_ahash_final,
1612 .finup = safexcel_ahash_finup,
1613 .digest = safexcel_sha384_digest,
1614 .export = safexcel_ahash_export,
1615 .import = safexcel_ahash_import,
1616 .halg = {
1617 .digestsize = SHA384_DIGEST_SIZE,
1618 .statesize = sizeof(struct safexcel_ahash_export_state),
1619 .base = {
1620 .cra_name = "sha384",
1621 .cra_driver_name = "safexcel-sha384",
1622 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1623 .cra_flags = CRYPTO_ALG_ASYNC |
1624 CRYPTO_ALG_ALLOCATES_MEMORY |
1625 CRYPTO_ALG_KERN_DRIVER_ONLY,
1626 .cra_blocksize = SHA384_BLOCK_SIZE,
1627 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1628 .cra_init = safexcel_ahash_cra_init,
1629 .cra_exit = safexcel_ahash_cra_exit,
1630 .cra_module = THIS_MODULE,
1631 },
1632 },
1633 },
1634 };
1635
safexcel_hmac_sha512_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1636 static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1637 unsigned int keylen)
1638 {
1639 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1640 SHA512_DIGEST_SIZE);
1641 }
1642
safexcel_hmac_sha512_init(struct ahash_request * areq)1643 static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1644 {
1645 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1646 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1647
1648 memset(req, 0, sizeof(*req));
1649
1650 /* Start from ipad precompute */
1651 memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1652 /* Already processed the key^ipad part now! */
1653 req->len = SHA512_BLOCK_SIZE;
1654 req->processed = SHA512_BLOCK_SIZE;
1655
1656 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1657 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1658 req->state_sz = SHA512_DIGEST_SIZE;
1659 req->digest_sz = SHA512_DIGEST_SIZE;
1660 req->block_sz = SHA512_BLOCK_SIZE;
1661 req->hmac = true;
1662
1663 return 0;
1664 }
1665
safexcel_hmac_sha512_digest(struct ahash_request * areq)1666 static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1667 {
1668 int ret = safexcel_hmac_sha512_init(areq);
1669
1670 if (ret)
1671 return ret;
1672
1673 return safexcel_ahash_finup(areq);
1674 }
1675
1676 struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1677 .type = SAFEXCEL_ALG_TYPE_AHASH,
1678 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1679 .alg.ahash = {
1680 .init = safexcel_hmac_sha512_init,
1681 .update = safexcel_ahash_update,
1682 .final = safexcel_ahash_final,
1683 .finup = safexcel_ahash_finup,
1684 .digest = safexcel_hmac_sha512_digest,
1685 .setkey = safexcel_hmac_sha512_setkey,
1686 .export = safexcel_ahash_export,
1687 .import = safexcel_ahash_import,
1688 .halg = {
1689 .digestsize = SHA512_DIGEST_SIZE,
1690 .statesize = sizeof(struct safexcel_ahash_export_state),
1691 .base = {
1692 .cra_name = "hmac(sha512)",
1693 .cra_driver_name = "safexcel-hmac-sha512",
1694 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1695 .cra_flags = CRYPTO_ALG_ASYNC |
1696 CRYPTO_ALG_ALLOCATES_MEMORY |
1697 CRYPTO_ALG_KERN_DRIVER_ONLY,
1698 .cra_blocksize = SHA512_BLOCK_SIZE,
1699 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1700 .cra_init = safexcel_ahash_cra_init,
1701 .cra_exit = safexcel_ahash_cra_exit,
1702 .cra_module = THIS_MODULE,
1703 },
1704 },
1705 },
1706 };
1707
safexcel_hmac_sha384_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1708 static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1709 unsigned int keylen)
1710 {
1711 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1712 SHA512_DIGEST_SIZE);
1713 }
1714
safexcel_hmac_sha384_init(struct ahash_request * areq)1715 static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1716 {
1717 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1718 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1719
1720 memset(req, 0, sizeof(*req));
1721
1722 /* Start from ipad precompute */
1723 memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1724 /* Already processed the key^ipad part now! */
1725 req->len = SHA512_BLOCK_SIZE;
1726 req->processed = SHA512_BLOCK_SIZE;
1727
1728 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1729 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1730 req->state_sz = SHA512_DIGEST_SIZE;
1731 req->digest_sz = SHA512_DIGEST_SIZE;
1732 req->block_sz = SHA512_BLOCK_SIZE;
1733 req->hmac = true;
1734
1735 return 0;
1736 }
1737
safexcel_hmac_sha384_digest(struct ahash_request * areq)1738 static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1739 {
1740 int ret = safexcel_hmac_sha384_init(areq);
1741
1742 if (ret)
1743 return ret;
1744
1745 return safexcel_ahash_finup(areq);
1746 }
1747
1748 struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1749 .type = SAFEXCEL_ALG_TYPE_AHASH,
1750 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1751 .alg.ahash = {
1752 .init = safexcel_hmac_sha384_init,
1753 .update = safexcel_ahash_update,
1754 .final = safexcel_ahash_final,
1755 .finup = safexcel_ahash_finup,
1756 .digest = safexcel_hmac_sha384_digest,
1757 .setkey = safexcel_hmac_sha384_setkey,
1758 .export = safexcel_ahash_export,
1759 .import = safexcel_ahash_import,
1760 .halg = {
1761 .digestsize = SHA384_DIGEST_SIZE,
1762 .statesize = sizeof(struct safexcel_ahash_export_state),
1763 .base = {
1764 .cra_name = "hmac(sha384)",
1765 .cra_driver_name = "safexcel-hmac-sha384",
1766 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1767 .cra_flags = CRYPTO_ALG_ASYNC |
1768 CRYPTO_ALG_ALLOCATES_MEMORY |
1769 CRYPTO_ALG_KERN_DRIVER_ONLY,
1770 .cra_blocksize = SHA384_BLOCK_SIZE,
1771 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1772 .cra_init = safexcel_ahash_cra_init,
1773 .cra_exit = safexcel_ahash_cra_exit,
1774 .cra_module = THIS_MODULE,
1775 },
1776 },
1777 },
1778 };
1779
safexcel_md5_init(struct ahash_request * areq)1780 static int safexcel_md5_init(struct ahash_request *areq)
1781 {
1782 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1783 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1784
1785 memset(req, 0, sizeof(*req));
1786
1787 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1788 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1789 req->state_sz = MD5_DIGEST_SIZE;
1790 req->digest_sz = MD5_DIGEST_SIZE;
1791 req->block_sz = MD5_HMAC_BLOCK_SIZE;
1792
1793 return 0;
1794 }
1795
safexcel_md5_digest(struct ahash_request * areq)1796 static int safexcel_md5_digest(struct ahash_request *areq)
1797 {
1798 int ret = safexcel_md5_init(areq);
1799
1800 if (ret)
1801 return ret;
1802
1803 return safexcel_ahash_finup(areq);
1804 }
1805
1806 struct safexcel_alg_template safexcel_alg_md5 = {
1807 .type = SAFEXCEL_ALG_TYPE_AHASH,
1808 .algo_mask = SAFEXCEL_ALG_MD5,
1809 .alg.ahash = {
1810 .init = safexcel_md5_init,
1811 .update = safexcel_ahash_update,
1812 .final = safexcel_ahash_final,
1813 .finup = safexcel_ahash_finup,
1814 .digest = safexcel_md5_digest,
1815 .export = safexcel_ahash_export,
1816 .import = safexcel_ahash_import,
1817 .halg = {
1818 .digestsize = MD5_DIGEST_SIZE,
1819 .statesize = sizeof(struct safexcel_ahash_export_state),
1820 .base = {
1821 .cra_name = "md5",
1822 .cra_driver_name = "safexcel-md5",
1823 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1824 .cra_flags = CRYPTO_ALG_ASYNC |
1825 CRYPTO_ALG_ALLOCATES_MEMORY |
1826 CRYPTO_ALG_KERN_DRIVER_ONLY,
1827 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1828 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1829 .cra_init = safexcel_ahash_cra_init,
1830 .cra_exit = safexcel_ahash_cra_exit,
1831 .cra_module = THIS_MODULE,
1832 },
1833 },
1834 },
1835 };
1836
safexcel_hmac_md5_init(struct ahash_request * areq)1837 static int safexcel_hmac_md5_init(struct ahash_request *areq)
1838 {
1839 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1840 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1841
1842 memset(req, 0, sizeof(*req));
1843
1844 /* Start from ipad precompute */
1845 memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE);
1846 /* Already processed the key^ipad part now! */
1847 req->len = MD5_HMAC_BLOCK_SIZE;
1848 req->processed = MD5_HMAC_BLOCK_SIZE;
1849
1850 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1851 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1852 req->state_sz = MD5_DIGEST_SIZE;
1853 req->digest_sz = MD5_DIGEST_SIZE;
1854 req->block_sz = MD5_HMAC_BLOCK_SIZE;
1855 req->len_is_le = true; /* MD5 is little endian! ... */
1856 req->hmac = true;
1857
1858 return 0;
1859 }
1860
safexcel_hmac_md5_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1861 static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1862 unsigned int keylen)
1863 {
1864 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1865 MD5_DIGEST_SIZE);
1866 }
1867
safexcel_hmac_md5_digest(struct ahash_request * areq)1868 static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1869 {
1870 int ret = safexcel_hmac_md5_init(areq);
1871
1872 if (ret)
1873 return ret;
1874
1875 return safexcel_ahash_finup(areq);
1876 }
1877
1878 struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1879 .type = SAFEXCEL_ALG_TYPE_AHASH,
1880 .algo_mask = SAFEXCEL_ALG_MD5,
1881 .alg.ahash = {
1882 .init = safexcel_hmac_md5_init,
1883 .update = safexcel_ahash_update,
1884 .final = safexcel_ahash_final,
1885 .finup = safexcel_ahash_finup,
1886 .digest = safexcel_hmac_md5_digest,
1887 .setkey = safexcel_hmac_md5_setkey,
1888 .export = safexcel_ahash_export,
1889 .import = safexcel_ahash_import,
1890 .halg = {
1891 .digestsize = MD5_DIGEST_SIZE,
1892 .statesize = sizeof(struct safexcel_ahash_export_state),
1893 .base = {
1894 .cra_name = "hmac(md5)",
1895 .cra_driver_name = "safexcel-hmac-md5",
1896 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1897 .cra_flags = CRYPTO_ALG_ASYNC |
1898 CRYPTO_ALG_ALLOCATES_MEMORY |
1899 CRYPTO_ALG_KERN_DRIVER_ONLY,
1900 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1901 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1902 .cra_init = safexcel_ahash_cra_init,
1903 .cra_exit = safexcel_ahash_cra_exit,
1904 .cra_module = THIS_MODULE,
1905 },
1906 },
1907 },
1908 };
1909
safexcel_crc32_cra_init(struct crypto_tfm * tfm)1910 static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1911 {
1912 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1913 int ret = safexcel_ahash_cra_init(tfm);
1914
1915 /* Default 'key' is all zeroes */
1916 memset(&ctx->base.ipad, 0, sizeof(u32));
1917 return ret;
1918 }
1919
safexcel_crc32_init(struct ahash_request * areq)1920 static int safexcel_crc32_init(struct ahash_request *areq)
1921 {
1922 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1923 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1924
1925 memset(req, 0, sizeof(*req));
1926
1927 /* Start from loaded key */
1928 req->state[0] = cpu_to_le32(~ctx->base.ipad.word[0]);
1929 /* Set processed to non-zero to enable invalidation detection */
1930 req->len = sizeof(u32);
1931 req->processed = sizeof(u32);
1932
1933 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1934 req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1935 req->state_sz = sizeof(u32);
1936 req->digest_sz = sizeof(u32);
1937 req->block_sz = sizeof(u32);
1938
1939 return 0;
1940 }
1941
safexcel_crc32_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1942 static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1943 unsigned int keylen)
1944 {
1945 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1946
1947 if (keylen != sizeof(u32))
1948 return -EINVAL;
1949
1950 memcpy(&ctx->base.ipad, key, sizeof(u32));
1951 return 0;
1952 }
1953
safexcel_crc32_digest(struct ahash_request * areq)1954 static int safexcel_crc32_digest(struct ahash_request *areq)
1955 {
1956 return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1957 }
1958
1959 struct safexcel_alg_template safexcel_alg_crc32 = {
1960 .type = SAFEXCEL_ALG_TYPE_AHASH,
1961 .algo_mask = 0,
1962 .alg.ahash = {
1963 .init = safexcel_crc32_init,
1964 .update = safexcel_ahash_update,
1965 .final = safexcel_ahash_final,
1966 .finup = safexcel_ahash_finup,
1967 .digest = safexcel_crc32_digest,
1968 .setkey = safexcel_crc32_setkey,
1969 .export = safexcel_ahash_export,
1970 .import = safexcel_ahash_import,
1971 .halg = {
1972 .digestsize = sizeof(u32),
1973 .statesize = sizeof(struct safexcel_ahash_export_state),
1974 .base = {
1975 .cra_name = "crc32",
1976 .cra_driver_name = "safexcel-crc32",
1977 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1978 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1979 CRYPTO_ALG_ASYNC |
1980 CRYPTO_ALG_ALLOCATES_MEMORY |
1981 CRYPTO_ALG_KERN_DRIVER_ONLY,
1982 .cra_blocksize = 1,
1983 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1984 .cra_init = safexcel_crc32_cra_init,
1985 .cra_exit = safexcel_ahash_cra_exit,
1986 .cra_module = THIS_MODULE,
1987 },
1988 },
1989 },
1990 };
1991
safexcel_cbcmac_init(struct ahash_request * areq)1992 static int safexcel_cbcmac_init(struct ahash_request *areq)
1993 {
1994 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1995 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1996
1997 memset(req, 0, sizeof(*req));
1998
1999 /* Start from loaded keys */
2000 memcpy(req->state, &ctx->base.ipad, ctx->key_sz);
2001 /* Set processed to non-zero to enable invalidation detection */
2002 req->len = AES_BLOCK_SIZE;
2003 req->processed = AES_BLOCK_SIZE;
2004
2005 req->digest = CONTEXT_CONTROL_DIGEST_XCM;
2006 req->state_sz = ctx->key_sz;
2007 req->digest_sz = AES_BLOCK_SIZE;
2008 req->block_sz = AES_BLOCK_SIZE;
2009 req->xcbcmac = true;
2010
2011 return 0;
2012 }
2013
safexcel_cbcmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int len)2014 static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2015 unsigned int len)
2016 {
2017 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2018 struct crypto_aes_ctx aes;
2019 int ret, i;
2020
2021 ret = aes_expandkey(&aes, key, len);
2022 if (ret)
2023 return ret;
2024
2025 memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE);
2026 for (i = 0; i < len / sizeof(u32); i++)
2027 ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]);
2028
2029 if (len == AES_KEYSIZE_192) {
2030 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2031 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2032 } else if (len == AES_KEYSIZE_256) {
2033 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2034 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2035 } else {
2036 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2037 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2038 }
2039 ctx->cbcmac = true;
2040
2041 memzero_explicit(&aes, sizeof(aes));
2042 return 0;
2043 }
2044
safexcel_cbcmac_digest(struct ahash_request * areq)2045 static int safexcel_cbcmac_digest(struct ahash_request *areq)
2046 {
2047 return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2048 }
2049
2050 struct safexcel_alg_template safexcel_alg_cbcmac = {
2051 .type = SAFEXCEL_ALG_TYPE_AHASH,
2052 .algo_mask = 0,
2053 .alg.ahash = {
2054 .init = safexcel_cbcmac_init,
2055 .update = safexcel_ahash_update,
2056 .final = safexcel_ahash_final,
2057 .finup = safexcel_ahash_finup,
2058 .digest = safexcel_cbcmac_digest,
2059 .setkey = safexcel_cbcmac_setkey,
2060 .export = safexcel_ahash_export,
2061 .import = safexcel_ahash_import,
2062 .halg = {
2063 .digestsize = AES_BLOCK_SIZE,
2064 .statesize = sizeof(struct safexcel_ahash_export_state),
2065 .base = {
2066 .cra_name = "cbcmac(aes)",
2067 .cra_driver_name = "safexcel-cbcmac-aes",
2068 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2069 .cra_flags = CRYPTO_ALG_ASYNC |
2070 CRYPTO_ALG_ALLOCATES_MEMORY |
2071 CRYPTO_ALG_KERN_DRIVER_ONLY,
2072 .cra_blocksize = 1,
2073 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2074 .cra_init = safexcel_ahash_cra_init,
2075 .cra_exit = safexcel_ahash_cra_exit,
2076 .cra_module = THIS_MODULE,
2077 },
2078 },
2079 },
2080 };
2081
safexcel_xcbcmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int len)2082 static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2083 unsigned int len)
2084 {
2085 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2086 u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2087 int ret, i;
2088
2089 ret = aes_expandkey(ctx->aes, key, len);
2090 if (ret)
2091 return ret;
2092
2093 /* precompute the XCBC key material */
2094 aes_encrypt(ctx->aes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2095 "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2096 aes_encrypt(ctx->aes, (u8 *)key_tmp,
2097 "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2098 aes_encrypt(ctx->aes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2099 "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2100 for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2101 ctx->base.ipad.word[i] = swab32(key_tmp[i]);
2102
2103 ret = aes_expandkey(ctx->aes,
2104 (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2105 AES_MIN_KEY_SIZE);
2106 if (ret)
2107 return ret;
2108
2109 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2110 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2111 ctx->cbcmac = false;
2112
2113 return 0;
2114 }
2115
safexcel_xcbcmac_cra_init(struct crypto_tfm * tfm)2116 static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2117 {
2118 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2119
2120 safexcel_ahash_cra_init(tfm);
2121 ctx->aes = kmalloc(sizeof(*ctx->aes), GFP_KERNEL);
2122 return PTR_ERR_OR_ZERO(ctx->aes);
2123 }
2124
safexcel_xcbcmac_cra_exit(struct crypto_tfm * tfm)2125 static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2126 {
2127 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2128
2129 kfree(ctx->aes);
2130 safexcel_ahash_cra_exit(tfm);
2131 }
2132
2133 struct safexcel_alg_template safexcel_alg_xcbcmac = {
2134 .type = SAFEXCEL_ALG_TYPE_AHASH,
2135 .algo_mask = 0,
2136 .alg.ahash = {
2137 .init = safexcel_cbcmac_init,
2138 .update = safexcel_ahash_update,
2139 .final = safexcel_ahash_final,
2140 .finup = safexcel_ahash_finup,
2141 .digest = safexcel_cbcmac_digest,
2142 .setkey = safexcel_xcbcmac_setkey,
2143 .export = safexcel_ahash_export,
2144 .import = safexcel_ahash_import,
2145 .halg = {
2146 .digestsize = AES_BLOCK_SIZE,
2147 .statesize = sizeof(struct safexcel_ahash_export_state),
2148 .base = {
2149 .cra_name = "xcbc(aes)",
2150 .cra_driver_name = "safexcel-xcbc-aes",
2151 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2152 .cra_flags = CRYPTO_ALG_ASYNC |
2153 CRYPTO_ALG_ALLOCATES_MEMORY |
2154 CRYPTO_ALG_KERN_DRIVER_ONLY,
2155 .cra_blocksize = AES_BLOCK_SIZE,
2156 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2157 .cra_init = safexcel_xcbcmac_cra_init,
2158 .cra_exit = safexcel_xcbcmac_cra_exit,
2159 .cra_module = THIS_MODULE,
2160 },
2161 },
2162 },
2163 };
2164
safexcel_cmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int len)2165 static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2166 unsigned int len)
2167 {
2168 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2169 __be64 consts[4];
2170 u64 _const[2];
2171 u8 msb_mask, gfmask;
2172 int ret, i;
2173
2174 /* precompute the CMAC key material */
2175 ret = aes_expandkey(ctx->aes, key, len);
2176 if (ret)
2177 return ret;
2178
2179 for (i = 0; i < len / sizeof(u32); i++)
2180 ctx->base.ipad.word[i + 8] = swab32(ctx->aes->key_enc[i]);
2181
2182 /* code below borrowed from crypto/cmac.c */
2183 /* encrypt the zero block */
2184 memset(consts, 0, AES_BLOCK_SIZE);
2185 aes_encrypt(ctx->aes, (u8 *)consts, (u8 *)consts);
2186
2187 gfmask = 0x87;
2188 _const[0] = be64_to_cpu(consts[1]);
2189 _const[1] = be64_to_cpu(consts[0]);
2190
2191 /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2192 for (i = 0; i < 4; i += 2) {
2193 msb_mask = ((s64)_const[1] >> 63) & gfmask;
2194 _const[1] = (_const[1] << 1) | (_const[0] >> 63);
2195 _const[0] = (_const[0] << 1) ^ msb_mask;
2196
2197 consts[i + 0] = cpu_to_be64(_const[1]);
2198 consts[i + 1] = cpu_to_be64(_const[0]);
2199 }
2200 /* end of code borrowed from crypto/cmac.c */
2201
2202 for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2203 ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]);
2204
2205 if (len == AES_KEYSIZE_192) {
2206 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2207 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2208 } else if (len == AES_KEYSIZE_256) {
2209 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2210 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2211 } else {
2212 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2213 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2214 }
2215 ctx->cbcmac = false;
2216
2217 return 0;
2218 }
2219
2220 struct safexcel_alg_template safexcel_alg_cmac = {
2221 .type = SAFEXCEL_ALG_TYPE_AHASH,
2222 .algo_mask = 0,
2223 .alg.ahash = {
2224 .init = safexcel_cbcmac_init,
2225 .update = safexcel_ahash_update,
2226 .final = safexcel_ahash_final,
2227 .finup = safexcel_ahash_finup,
2228 .digest = safexcel_cbcmac_digest,
2229 .setkey = safexcel_cmac_setkey,
2230 .export = safexcel_ahash_export,
2231 .import = safexcel_ahash_import,
2232 .halg = {
2233 .digestsize = AES_BLOCK_SIZE,
2234 .statesize = sizeof(struct safexcel_ahash_export_state),
2235 .base = {
2236 .cra_name = "cmac(aes)",
2237 .cra_driver_name = "safexcel-cmac-aes",
2238 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2239 .cra_flags = CRYPTO_ALG_ASYNC |
2240 CRYPTO_ALG_ALLOCATES_MEMORY |
2241 CRYPTO_ALG_KERN_DRIVER_ONLY,
2242 .cra_blocksize = AES_BLOCK_SIZE,
2243 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2244 .cra_init = safexcel_xcbcmac_cra_init,
2245 .cra_exit = safexcel_xcbcmac_cra_exit,
2246 .cra_module = THIS_MODULE,
2247 },
2248 },
2249 },
2250 };
2251
safexcel_sm3_init(struct ahash_request * areq)2252 static int safexcel_sm3_init(struct ahash_request *areq)
2253 {
2254 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2255 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2256
2257 memset(req, 0, sizeof(*req));
2258
2259 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2260 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2261 req->state_sz = SM3_DIGEST_SIZE;
2262 req->digest_sz = SM3_DIGEST_SIZE;
2263 req->block_sz = SM3_BLOCK_SIZE;
2264
2265 return 0;
2266 }
2267
safexcel_sm3_digest(struct ahash_request * areq)2268 static int safexcel_sm3_digest(struct ahash_request *areq)
2269 {
2270 int ret = safexcel_sm3_init(areq);
2271
2272 if (ret)
2273 return ret;
2274
2275 return safexcel_ahash_finup(areq);
2276 }
2277
2278 struct safexcel_alg_template safexcel_alg_sm3 = {
2279 .type = SAFEXCEL_ALG_TYPE_AHASH,
2280 .algo_mask = SAFEXCEL_ALG_SM3,
2281 .alg.ahash = {
2282 .init = safexcel_sm3_init,
2283 .update = safexcel_ahash_update,
2284 .final = safexcel_ahash_final,
2285 .finup = safexcel_ahash_finup,
2286 .digest = safexcel_sm3_digest,
2287 .export = safexcel_ahash_export,
2288 .import = safexcel_ahash_import,
2289 .halg = {
2290 .digestsize = SM3_DIGEST_SIZE,
2291 .statesize = sizeof(struct safexcel_ahash_export_state),
2292 .base = {
2293 .cra_name = "sm3",
2294 .cra_driver_name = "safexcel-sm3",
2295 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2296 .cra_flags = CRYPTO_ALG_ASYNC |
2297 CRYPTO_ALG_ALLOCATES_MEMORY |
2298 CRYPTO_ALG_KERN_DRIVER_ONLY,
2299 .cra_blocksize = SM3_BLOCK_SIZE,
2300 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2301 .cra_init = safexcel_ahash_cra_init,
2302 .cra_exit = safexcel_ahash_cra_exit,
2303 .cra_module = THIS_MODULE,
2304 },
2305 },
2306 },
2307 };
2308
safexcel_hmac_sm3_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)2309 static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2310 unsigned int keylen)
2311 {
2312 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2313 SM3_DIGEST_SIZE);
2314 }
2315
safexcel_hmac_sm3_init(struct ahash_request * areq)2316 static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2317 {
2318 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2319 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2320
2321 memset(req, 0, sizeof(*req));
2322
2323 /* Start from ipad precompute */
2324 memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE);
2325 /* Already processed the key^ipad part now! */
2326 req->len = SM3_BLOCK_SIZE;
2327 req->processed = SM3_BLOCK_SIZE;
2328
2329 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2330 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2331 req->state_sz = SM3_DIGEST_SIZE;
2332 req->digest_sz = SM3_DIGEST_SIZE;
2333 req->block_sz = SM3_BLOCK_SIZE;
2334 req->hmac = true;
2335
2336 return 0;
2337 }
2338
safexcel_hmac_sm3_digest(struct ahash_request * areq)2339 static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2340 {
2341 int ret = safexcel_hmac_sm3_init(areq);
2342
2343 if (ret)
2344 return ret;
2345
2346 return safexcel_ahash_finup(areq);
2347 }
2348
2349 struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2350 .type = SAFEXCEL_ALG_TYPE_AHASH,
2351 .algo_mask = SAFEXCEL_ALG_SM3,
2352 .alg.ahash = {
2353 .init = safexcel_hmac_sm3_init,
2354 .update = safexcel_ahash_update,
2355 .final = safexcel_ahash_final,
2356 .finup = safexcel_ahash_finup,
2357 .digest = safexcel_hmac_sm3_digest,
2358 .setkey = safexcel_hmac_sm3_setkey,
2359 .export = safexcel_ahash_export,
2360 .import = safexcel_ahash_import,
2361 .halg = {
2362 .digestsize = SM3_DIGEST_SIZE,
2363 .statesize = sizeof(struct safexcel_ahash_export_state),
2364 .base = {
2365 .cra_name = "hmac(sm3)",
2366 .cra_driver_name = "safexcel-hmac-sm3",
2367 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2368 .cra_flags = CRYPTO_ALG_ASYNC |
2369 CRYPTO_ALG_ALLOCATES_MEMORY |
2370 CRYPTO_ALG_KERN_DRIVER_ONLY,
2371 .cra_blocksize = SM3_BLOCK_SIZE,
2372 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2373 .cra_init = safexcel_ahash_cra_init,
2374 .cra_exit = safexcel_ahash_cra_exit,
2375 .cra_module = THIS_MODULE,
2376 },
2377 },
2378 },
2379 };
2380
safexcel_sha3_224_init(struct ahash_request * areq)2381 static int safexcel_sha3_224_init(struct ahash_request *areq)
2382 {
2383 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2384 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2385 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2386
2387 memset(req, 0, sizeof(*req));
2388
2389 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2390 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2391 req->state_sz = SHA3_224_DIGEST_SIZE;
2392 req->digest_sz = SHA3_224_DIGEST_SIZE;
2393 req->block_sz = SHA3_224_BLOCK_SIZE;
2394 ctx->do_fallback = false;
2395 ctx->fb_init_done = false;
2396 return 0;
2397 }
2398
safexcel_sha3_fbcheck(struct ahash_request * req)2399 static int safexcel_sha3_fbcheck(struct ahash_request *req)
2400 {
2401 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2402 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2403 struct ahash_request *subreq = ahash_request_ctx(req);
2404 int ret = 0;
2405
2406 if (ctx->do_fallback) {
2407 ahash_request_set_tfm(subreq, ctx->fback);
2408 ahash_request_set_callback(subreq, req->base.flags,
2409 req->base.complete, req->base.data);
2410 ahash_request_set_crypt(subreq, req->src, req->result,
2411 req->nbytes);
2412 if (!ctx->fb_init_done) {
2413 if (ctx->fb_do_setkey) {
2414 /* Set fallback cipher HMAC key */
2415 u8 key[SHA3_224_BLOCK_SIZE];
2416
2417 memcpy(key, &ctx->base.ipad,
2418 crypto_ahash_blocksize(ctx->fback) / 2);
2419 memcpy(key +
2420 crypto_ahash_blocksize(ctx->fback) / 2,
2421 &ctx->base.opad,
2422 crypto_ahash_blocksize(ctx->fback) / 2);
2423 ret = crypto_ahash_setkey(ctx->fback, key,
2424 crypto_ahash_blocksize(ctx->fback));
2425 memzero_explicit(key,
2426 crypto_ahash_blocksize(ctx->fback));
2427 ctx->fb_do_setkey = false;
2428 }
2429 ret = ret ?: crypto_ahash_init(subreq);
2430 ctx->fb_init_done = true;
2431 }
2432 }
2433 return ret;
2434 }
2435
safexcel_sha3_update(struct ahash_request * req)2436 static int safexcel_sha3_update(struct ahash_request *req)
2437 {
2438 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2439 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2440 struct ahash_request *subreq = ahash_request_ctx(req);
2441
2442 ctx->do_fallback = true;
2443 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2444 }
2445
safexcel_sha3_final(struct ahash_request * req)2446 static int safexcel_sha3_final(struct ahash_request *req)
2447 {
2448 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2449 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2450 struct ahash_request *subreq = ahash_request_ctx(req);
2451
2452 ctx->do_fallback = true;
2453 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2454 }
2455
safexcel_sha3_finup(struct ahash_request * req)2456 static int safexcel_sha3_finup(struct ahash_request *req)
2457 {
2458 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2459 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2460 struct ahash_request *subreq = ahash_request_ctx(req);
2461
2462 ctx->do_fallback |= !req->nbytes;
2463 if (ctx->do_fallback)
2464 /* Update or ex/import happened or len 0, cannot use the HW */
2465 return safexcel_sha3_fbcheck(req) ?:
2466 crypto_ahash_finup(subreq);
2467 else
2468 return safexcel_ahash_finup(req);
2469 }
2470
safexcel_sha3_digest_fallback(struct ahash_request * req)2471 static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2472 {
2473 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2474 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2475 struct ahash_request *subreq = ahash_request_ctx(req);
2476
2477 ctx->do_fallback = true;
2478 ctx->fb_init_done = false;
2479 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2480 }
2481
safexcel_sha3_224_digest(struct ahash_request * req)2482 static int safexcel_sha3_224_digest(struct ahash_request *req)
2483 {
2484 if (req->nbytes)
2485 return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2486
2487 /* HW cannot do zero length hash, use fallback instead */
2488 return safexcel_sha3_digest_fallback(req);
2489 }
2490
safexcel_sha3_export(struct ahash_request * req,void * out)2491 static int safexcel_sha3_export(struct ahash_request *req, void *out)
2492 {
2493 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2494 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2495 struct ahash_request *subreq = ahash_request_ctx(req);
2496
2497 ctx->do_fallback = true;
2498 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2499 }
2500
safexcel_sha3_import(struct ahash_request * req,const void * in)2501 static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2502 {
2503 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2504 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2505 struct ahash_request *subreq = ahash_request_ctx(req);
2506
2507 ctx->do_fallback = true;
2508 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2509 // return safexcel_ahash_import(req, in);
2510 }
2511
safexcel_sha3_cra_init(struct crypto_tfm * tfm)2512 static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2513 {
2514 struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2515 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2516
2517 safexcel_ahash_cra_init(tfm);
2518
2519 /* Allocate fallback implementation */
2520 ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2521 CRYPTO_ALG_ASYNC |
2522 CRYPTO_ALG_NEED_FALLBACK);
2523 if (IS_ERR(ctx->fback))
2524 return PTR_ERR(ctx->fback);
2525
2526 /* Update statesize from fallback algorithm! */
2527 crypto_hash_alg_common(ahash)->statesize =
2528 crypto_ahash_statesize(ctx->fback);
2529 crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
2530 sizeof(struct ahash_request) +
2531 crypto_ahash_reqsize(ctx->fback)));
2532 return 0;
2533 }
2534
safexcel_sha3_cra_exit(struct crypto_tfm * tfm)2535 static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2536 {
2537 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2538
2539 crypto_free_ahash(ctx->fback);
2540 safexcel_ahash_cra_exit(tfm);
2541 }
2542
2543 struct safexcel_alg_template safexcel_alg_sha3_224 = {
2544 .type = SAFEXCEL_ALG_TYPE_AHASH,
2545 .algo_mask = SAFEXCEL_ALG_SHA3,
2546 .alg.ahash = {
2547 .init = safexcel_sha3_224_init,
2548 .update = safexcel_sha3_update,
2549 .final = safexcel_sha3_final,
2550 .finup = safexcel_sha3_finup,
2551 .digest = safexcel_sha3_224_digest,
2552 .export = safexcel_sha3_export,
2553 .import = safexcel_sha3_import,
2554 .halg = {
2555 .digestsize = SHA3_224_DIGEST_SIZE,
2556 .statesize = sizeof(struct safexcel_ahash_export_state),
2557 .base = {
2558 .cra_name = "sha3-224",
2559 .cra_driver_name = "safexcel-sha3-224",
2560 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2561 .cra_flags = CRYPTO_ALG_ASYNC |
2562 CRYPTO_ALG_KERN_DRIVER_ONLY |
2563 CRYPTO_ALG_NEED_FALLBACK,
2564 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2565 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2566 .cra_init = safexcel_sha3_cra_init,
2567 .cra_exit = safexcel_sha3_cra_exit,
2568 .cra_module = THIS_MODULE,
2569 },
2570 },
2571 },
2572 };
2573
safexcel_sha3_256_init(struct ahash_request * areq)2574 static int safexcel_sha3_256_init(struct ahash_request *areq)
2575 {
2576 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2577 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2578 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2579
2580 memset(req, 0, sizeof(*req));
2581
2582 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2583 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2584 req->state_sz = SHA3_256_DIGEST_SIZE;
2585 req->digest_sz = SHA3_256_DIGEST_SIZE;
2586 req->block_sz = SHA3_256_BLOCK_SIZE;
2587 ctx->do_fallback = false;
2588 ctx->fb_init_done = false;
2589 return 0;
2590 }
2591
safexcel_sha3_256_digest(struct ahash_request * req)2592 static int safexcel_sha3_256_digest(struct ahash_request *req)
2593 {
2594 if (req->nbytes)
2595 return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2596
2597 /* HW cannot do zero length hash, use fallback instead */
2598 return safexcel_sha3_digest_fallback(req);
2599 }
2600
2601 struct safexcel_alg_template safexcel_alg_sha3_256 = {
2602 .type = SAFEXCEL_ALG_TYPE_AHASH,
2603 .algo_mask = SAFEXCEL_ALG_SHA3,
2604 .alg.ahash = {
2605 .init = safexcel_sha3_256_init,
2606 .update = safexcel_sha3_update,
2607 .final = safexcel_sha3_final,
2608 .finup = safexcel_sha3_finup,
2609 .digest = safexcel_sha3_256_digest,
2610 .export = safexcel_sha3_export,
2611 .import = safexcel_sha3_import,
2612 .halg = {
2613 .digestsize = SHA3_256_DIGEST_SIZE,
2614 .statesize = sizeof(struct safexcel_ahash_export_state),
2615 .base = {
2616 .cra_name = "sha3-256",
2617 .cra_driver_name = "safexcel-sha3-256",
2618 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2619 .cra_flags = CRYPTO_ALG_ASYNC |
2620 CRYPTO_ALG_KERN_DRIVER_ONLY |
2621 CRYPTO_ALG_NEED_FALLBACK,
2622 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2623 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2624 .cra_init = safexcel_sha3_cra_init,
2625 .cra_exit = safexcel_sha3_cra_exit,
2626 .cra_module = THIS_MODULE,
2627 },
2628 },
2629 },
2630 };
2631
safexcel_sha3_384_init(struct ahash_request * areq)2632 static int safexcel_sha3_384_init(struct ahash_request *areq)
2633 {
2634 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2635 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2636 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2637
2638 memset(req, 0, sizeof(*req));
2639
2640 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2641 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2642 req->state_sz = SHA3_384_DIGEST_SIZE;
2643 req->digest_sz = SHA3_384_DIGEST_SIZE;
2644 req->block_sz = SHA3_384_BLOCK_SIZE;
2645 ctx->do_fallback = false;
2646 ctx->fb_init_done = false;
2647 return 0;
2648 }
2649
safexcel_sha3_384_digest(struct ahash_request * req)2650 static int safexcel_sha3_384_digest(struct ahash_request *req)
2651 {
2652 if (req->nbytes)
2653 return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2654
2655 /* HW cannot do zero length hash, use fallback instead */
2656 return safexcel_sha3_digest_fallback(req);
2657 }
2658
2659 struct safexcel_alg_template safexcel_alg_sha3_384 = {
2660 .type = SAFEXCEL_ALG_TYPE_AHASH,
2661 .algo_mask = SAFEXCEL_ALG_SHA3,
2662 .alg.ahash = {
2663 .init = safexcel_sha3_384_init,
2664 .update = safexcel_sha3_update,
2665 .final = safexcel_sha3_final,
2666 .finup = safexcel_sha3_finup,
2667 .digest = safexcel_sha3_384_digest,
2668 .export = safexcel_sha3_export,
2669 .import = safexcel_sha3_import,
2670 .halg = {
2671 .digestsize = SHA3_384_DIGEST_SIZE,
2672 .statesize = sizeof(struct safexcel_ahash_export_state),
2673 .base = {
2674 .cra_name = "sha3-384",
2675 .cra_driver_name = "safexcel-sha3-384",
2676 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2677 .cra_flags = CRYPTO_ALG_ASYNC |
2678 CRYPTO_ALG_KERN_DRIVER_ONLY |
2679 CRYPTO_ALG_NEED_FALLBACK,
2680 .cra_blocksize = SHA3_384_BLOCK_SIZE,
2681 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2682 .cra_init = safexcel_sha3_cra_init,
2683 .cra_exit = safexcel_sha3_cra_exit,
2684 .cra_module = THIS_MODULE,
2685 },
2686 },
2687 },
2688 };
2689
safexcel_sha3_512_init(struct ahash_request * areq)2690 static int safexcel_sha3_512_init(struct ahash_request *areq)
2691 {
2692 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2693 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2694 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2695
2696 memset(req, 0, sizeof(*req));
2697
2698 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2699 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2700 req->state_sz = SHA3_512_DIGEST_SIZE;
2701 req->digest_sz = SHA3_512_DIGEST_SIZE;
2702 req->block_sz = SHA3_512_BLOCK_SIZE;
2703 ctx->do_fallback = false;
2704 ctx->fb_init_done = false;
2705 return 0;
2706 }
2707
safexcel_sha3_512_digest(struct ahash_request * req)2708 static int safexcel_sha3_512_digest(struct ahash_request *req)
2709 {
2710 if (req->nbytes)
2711 return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2712
2713 /* HW cannot do zero length hash, use fallback instead */
2714 return safexcel_sha3_digest_fallback(req);
2715 }
2716
2717 struct safexcel_alg_template safexcel_alg_sha3_512 = {
2718 .type = SAFEXCEL_ALG_TYPE_AHASH,
2719 .algo_mask = SAFEXCEL_ALG_SHA3,
2720 .alg.ahash = {
2721 .init = safexcel_sha3_512_init,
2722 .update = safexcel_sha3_update,
2723 .final = safexcel_sha3_final,
2724 .finup = safexcel_sha3_finup,
2725 .digest = safexcel_sha3_512_digest,
2726 .export = safexcel_sha3_export,
2727 .import = safexcel_sha3_import,
2728 .halg = {
2729 .digestsize = SHA3_512_DIGEST_SIZE,
2730 .statesize = sizeof(struct safexcel_ahash_export_state),
2731 .base = {
2732 .cra_name = "sha3-512",
2733 .cra_driver_name = "safexcel-sha3-512",
2734 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2735 .cra_flags = CRYPTO_ALG_ASYNC |
2736 CRYPTO_ALG_KERN_DRIVER_ONLY |
2737 CRYPTO_ALG_NEED_FALLBACK,
2738 .cra_blocksize = SHA3_512_BLOCK_SIZE,
2739 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2740 .cra_init = safexcel_sha3_cra_init,
2741 .cra_exit = safexcel_sha3_cra_exit,
2742 .cra_module = THIS_MODULE,
2743 },
2744 },
2745 },
2746 };
2747
safexcel_hmac_sha3_cra_init(struct crypto_tfm * tfm,const char * alg)2748 static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2749 {
2750 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2751 int ret;
2752
2753 ret = safexcel_sha3_cra_init(tfm);
2754 if (ret)
2755 return ret;
2756
2757 /* Allocate precalc basic digest implementation */
2758 ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2759 if (IS_ERR(ctx->shpre))
2760 return PTR_ERR(ctx->shpre);
2761
2762 ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2763 crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2764 if (!ctx->shdesc) {
2765 crypto_free_shash(ctx->shpre);
2766 return -ENOMEM;
2767 }
2768 ctx->shdesc->tfm = ctx->shpre;
2769 return 0;
2770 }
2771
safexcel_hmac_sha3_cra_exit(struct crypto_tfm * tfm)2772 static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2773 {
2774 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2775
2776 crypto_free_ahash(ctx->fback);
2777 crypto_free_shash(ctx->shpre);
2778 kfree(ctx->shdesc);
2779 safexcel_ahash_cra_exit(tfm);
2780 }
2781
safexcel_hmac_sha3_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)2782 static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2783 unsigned int keylen)
2784 {
2785 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2786 int ret = 0;
2787
2788 if (keylen > crypto_ahash_blocksize(tfm)) {
2789 /*
2790 * If the key is larger than the blocksize, then hash it
2791 * first using our fallback cipher
2792 */
2793 ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2794 ctx->base.ipad.byte);
2795 keylen = crypto_shash_digestsize(ctx->shpre);
2796
2797 /*
2798 * If the digest is larger than half the blocksize, we need to
2799 * move the rest to opad due to the way our HMAC infra works.
2800 */
2801 if (keylen > crypto_ahash_blocksize(tfm) / 2)
2802 /* Buffers overlap, need to use memmove iso memcpy! */
2803 memmove(&ctx->base.opad,
2804 ctx->base.ipad.byte +
2805 crypto_ahash_blocksize(tfm) / 2,
2806 keylen - crypto_ahash_blocksize(tfm) / 2);
2807 } else {
2808 /*
2809 * Copy the key to our ipad & opad buffers
2810 * Note that ipad and opad each contain one half of the key,
2811 * to match the existing HMAC driver infrastructure.
2812 */
2813 if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2814 memcpy(&ctx->base.ipad, key, keylen);
2815 } else {
2816 memcpy(&ctx->base.ipad, key,
2817 crypto_ahash_blocksize(tfm) / 2);
2818 memcpy(&ctx->base.opad,
2819 key + crypto_ahash_blocksize(tfm) / 2,
2820 keylen - crypto_ahash_blocksize(tfm) / 2);
2821 }
2822 }
2823
2824 /* Pad key with zeroes */
2825 if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2826 memset(ctx->base.ipad.byte + keylen, 0,
2827 crypto_ahash_blocksize(tfm) / 2 - keylen);
2828 memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2);
2829 } else {
2830 memset(ctx->base.opad.byte + keylen -
2831 crypto_ahash_blocksize(tfm) / 2, 0,
2832 crypto_ahash_blocksize(tfm) - keylen);
2833 }
2834
2835 /* If doing fallback, still need to set the new key! */
2836 ctx->fb_do_setkey = true;
2837 return ret;
2838 }
2839
safexcel_hmac_sha3_224_init(struct ahash_request * areq)2840 static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2841 {
2842 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2843 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2844 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2845
2846 memset(req, 0, sizeof(*req));
2847
2848 /* Copy (half of) the key */
2849 memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2);
2850 /* Start of HMAC should have len == processed == blocksize */
2851 req->len = SHA3_224_BLOCK_SIZE;
2852 req->processed = SHA3_224_BLOCK_SIZE;
2853 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2854 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2855 req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2856 req->digest_sz = SHA3_224_DIGEST_SIZE;
2857 req->block_sz = SHA3_224_BLOCK_SIZE;
2858 req->hmac = true;
2859 ctx->do_fallback = false;
2860 ctx->fb_init_done = false;
2861 return 0;
2862 }
2863
safexcel_hmac_sha3_224_digest(struct ahash_request * req)2864 static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2865 {
2866 if (req->nbytes)
2867 return safexcel_hmac_sha3_224_init(req) ?:
2868 safexcel_ahash_finup(req);
2869
2870 /* HW cannot do zero length HMAC, use fallback instead */
2871 return safexcel_sha3_digest_fallback(req);
2872 }
2873
safexcel_hmac_sha3_224_cra_init(struct crypto_tfm * tfm)2874 static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2875 {
2876 return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2877 }
2878
2879 struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2880 .type = SAFEXCEL_ALG_TYPE_AHASH,
2881 .algo_mask = SAFEXCEL_ALG_SHA3,
2882 .alg.ahash = {
2883 .init = safexcel_hmac_sha3_224_init,
2884 .update = safexcel_sha3_update,
2885 .final = safexcel_sha3_final,
2886 .finup = safexcel_sha3_finup,
2887 .digest = safexcel_hmac_sha3_224_digest,
2888 .setkey = safexcel_hmac_sha3_setkey,
2889 .export = safexcel_sha3_export,
2890 .import = safexcel_sha3_import,
2891 .halg = {
2892 .digestsize = SHA3_224_DIGEST_SIZE,
2893 .statesize = sizeof(struct safexcel_ahash_export_state),
2894 .base = {
2895 .cra_name = "hmac(sha3-224)",
2896 .cra_driver_name = "safexcel-hmac-sha3-224",
2897 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2898 .cra_flags = CRYPTO_ALG_ASYNC |
2899 CRYPTO_ALG_KERN_DRIVER_ONLY |
2900 CRYPTO_ALG_NEED_FALLBACK,
2901 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2902 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2903 .cra_init = safexcel_hmac_sha3_224_cra_init,
2904 .cra_exit = safexcel_hmac_sha3_cra_exit,
2905 .cra_module = THIS_MODULE,
2906 },
2907 },
2908 },
2909 };
2910
safexcel_hmac_sha3_256_init(struct ahash_request * areq)2911 static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2912 {
2913 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2914 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2915 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2916
2917 memset(req, 0, sizeof(*req));
2918
2919 /* Copy (half of) the key */
2920 memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2);
2921 /* Start of HMAC should have len == processed == blocksize */
2922 req->len = SHA3_256_BLOCK_SIZE;
2923 req->processed = SHA3_256_BLOCK_SIZE;
2924 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2925 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2926 req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2927 req->digest_sz = SHA3_256_DIGEST_SIZE;
2928 req->block_sz = SHA3_256_BLOCK_SIZE;
2929 req->hmac = true;
2930 ctx->do_fallback = false;
2931 ctx->fb_init_done = false;
2932 return 0;
2933 }
2934
safexcel_hmac_sha3_256_digest(struct ahash_request * req)2935 static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2936 {
2937 if (req->nbytes)
2938 return safexcel_hmac_sha3_256_init(req) ?:
2939 safexcel_ahash_finup(req);
2940
2941 /* HW cannot do zero length HMAC, use fallback instead */
2942 return safexcel_sha3_digest_fallback(req);
2943 }
2944
safexcel_hmac_sha3_256_cra_init(struct crypto_tfm * tfm)2945 static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2946 {
2947 return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2948 }
2949
2950 struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2951 .type = SAFEXCEL_ALG_TYPE_AHASH,
2952 .algo_mask = SAFEXCEL_ALG_SHA3,
2953 .alg.ahash = {
2954 .init = safexcel_hmac_sha3_256_init,
2955 .update = safexcel_sha3_update,
2956 .final = safexcel_sha3_final,
2957 .finup = safexcel_sha3_finup,
2958 .digest = safexcel_hmac_sha3_256_digest,
2959 .setkey = safexcel_hmac_sha3_setkey,
2960 .export = safexcel_sha3_export,
2961 .import = safexcel_sha3_import,
2962 .halg = {
2963 .digestsize = SHA3_256_DIGEST_SIZE,
2964 .statesize = sizeof(struct safexcel_ahash_export_state),
2965 .base = {
2966 .cra_name = "hmac(sha3-256)",
2967 .cra_driver_name = "safexcel-hmac-sha3-256",
2968 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2969 .cra_flags = CRYPTO_ALG_ASYNC |
2970 CRYPTO_ALG_KERN_DRIVER_ONLY |
2971 CRYPTO_ALG_NEED_FALLBACK,
2972 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2973 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2974 .cra_init = safexcel_hmac_sha3_256_cra_init,
2975 .cra_exit = safexcel_hmac_sha3_cra_exit,
2976 .cra_module = THIS_MODULE,
2977 },
2978 },
2979 },
2980 };
2981
safexcel_hmac_sha3_384_init(struct ahash_request * areq)2982 static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
2983 {
2984 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2985 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2986 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2987
2988 memset(req, 0, sizeof(*req));
2989
2990 /* Copy (half of) the key */
2991 memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2);
2992 /* Start of HMAC should have len == processed == blocksize */
2993 req->len = SHA3_384_BLOCK_SIZE;
2994 req->processed = SHA3_384_BLOCK_SIZE;
2995 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2996 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2997 req->state_sz = SHA3_384_BLOCK_SIZE / 2;
2998 req->digest_sz = SHA3_384_DIGEST_SIZE;
2999 req->block_sz = SHA3_384_BLOCK_SIZE;
3000 req->hmac = true;
3001 ctx->do_fallback = false;
3002 ctx->fb_init_done = false;
3003 return 0;
3004 }
3005
safexcel_hmac_sha3_384_digest(struct ahash_request * req)3006 static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
3007 {
3008 if (req->nbytes)
3009 return safexcel_hmac_sha3_384_init(req) ?:
3010 safexcel_ahash_finup(req);
3011
3012 /* HW cannot do zero length HMAC, use fallback instead */
3013 return safexcel_sha3_digest_fallback(req);
3014 }
3015
safexcel_hmac_sha3_384_cra_init(struct crypto_tfm * tfm)3016 static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
3017 {
3018 return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
3019 }
3020
3021 struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
3022 .type = SAFEXCEL_ALG_TYPE_AHASH,
3023 .algo_mask = SAFEXCEL_ALG_SHA3,
3024 .alg.ahash = {
3025 .init = safexcel_hmac_sha3_384_init,
3026 .update = safexcel_sha3_update,
3027 .final = safexcel_sha3_final,
3028 .finup = safexcel_sha3_finup,
3029 .digest = safexcel_hmac_sha3_384_digest,
3030 .setkey = safexcel_hmac_sha3_setkey,
3031 .export = safexcel_sha3_export,
3032 .import = safexcel_sha3_import,
3033 .halg = {
3034 .digestsize = SHA3_384_DIGEST_SIZE,
3035 .statesize = sizeof(struct safexcel_ahash_export_state),
3036 .base = {
3037 .cra_name = "hmac(sha3-384)",
3038 .cra_driver_name = "safexcel-hmac-sha3-384",
3039 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3040 .cra_flags = CRYPTO_ALG_ASYNC |
3041 CRYPTO_ALG_KERN_DRIVER_ONLY |
3042 CRYPTO_ALG_NEED_FALLBACK,
3043 .cra_blocksize = SHA3_384_BLOCK_SIZE,
3044 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3045 .cra_init = safexcel_hmac_sha3_384_cra_init,
3046 .cra_exit = safexcel_hmac_sha3_cra_exit,
3047 .cra_module = THIS_MODULE,
3048 },
3049 },
3050 },
3051 };
3052
safexcel_hmac_sha3_512_init(struct ahash_request * areq)3053 static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3054 {
3055 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3056 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3057 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3058
3059 memset(req, 0, sizeof(*req));
3060
3061 /* Copy (half of) the key */
3062 memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2);
3063 /* Start of HMAC should have len == processed == blocksize */
3064 req->len = SHA3_512_BLOCK_SIZE;
3065 req->processed = SHA3_512_BLOCK_SIZE;
3066 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3067 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3068 req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3069 req->digest_sz = SHA3_512_DIGEST_SIZE;
3070 req->block_sz = SHA3_512_BLOCK_SIZE;
3071 req->hmac = true;
3072 ctx->do_fallback = false;
3073 ctx->fb_init_done = false;
3074 return 0;
3075 }
3076
safexcel_hmac_sha3_512_digest(struct ahash_request * req)3077 static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3078 {
3079 if (req->nbytes)
3080 return safexcel_hmac_sha3_512_init(req) ?:
3081 safexcel_ahash_finup(req);
3082
3083 /* HW cannot do zero length HMAC, use fallback instead */
3084 return safexcel_sha3_digest_fallback(req);
3085 }
3086
safexcel_hmac_sha3_512_cra_init(struct crypto_tfm * tfm)3087 static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3088 {
3089 return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3090 }
3091 struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3092 .type = SAFEXCEL_ALG_TYPE_AHASH,
3093 .algo_mask = SAFEXCEL_ALG_SHA3,
3094 .alg.ahash = {
3095 .init = safexcel_hmac_sha3_512_init,
3096 .update = safexcel_sha3_update,
3097 .final = safexcel_sha3_final,
3098 .finup = safexcel_sha3_finup,
3099 .digest = safexcel_hmac_sha3_512_digest,
3100 .setkey = safexcel_hmac_sha3_setkey,
3101 .export = safexcel_sha3_export,
3102 .import = safexcel_sha3_import,
3103 .halg = {
3104 .digestsize = SHA3_512_DIGEST_SIZE,
3105 .statesize = sizeof(struct safexcel_ahash_export_state),
3106 .base = {
3107 .cra_name = "hmac(sha3-512)",
3108 .cra_driver_name = "safexcel-hmac-sha3-512",
3109 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3110 .cra_flags = CRYPTO_ALG_ASYNC |
3111 CRYPTO_ALG_KERN_DRIVER_ONLY |
3112 CRYPTO_ALG_NEED_FALLBACK,
3113 .cra_blocksize = SHA3_512_BLOCK_SIZE,
3114 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3115 .cra_init = safexcel_hmac_sha3_512_cra_init,
3116 .cra_exit = safexcel_hmac_sha3_cra_exit,
3117 .cra_module = THIS_MODULE,
3118 },
3119 },
3120 },
3121 };
3122