1 /*
2 * Synchronous Cryptographic Hash operations.
3 *
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22
23 #include "internal.h"
24
25 static const struct crypto_type crypto_shash_type;
26
shash_no_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)27 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
28 unsigned int keylen)
29 {
30 return -ENOSYS;
31 }
32
shash_setkey_unaligned(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)33 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
34 unsigned int keylen)
35 {
36 struct shash_alg *shash = crypto_shash_alg(tfm);
37 unsigned long alignmask = crypto_shash_alignmask(tfm);
38 unsigned long absize;
39 u8 *buffer, *alignbuffer;
40 int err;
41
42 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
43 buffer = kmalloc(absize, GFP_KERNEL);
44 if (!buffer)
45 return -ENOMEM;
46
47 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
48 memcpy(alignbuffer, key, keylen);
49 err = shash->setkey(tfm, alignbuffer, keylen);
50 kzfree(buffer);
51 return err;
52 }
53
crypto_shash_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)54 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
55 unsigned int keylen)
56 {
57 struct shash_alg *shash = crypto_shash_alg(tfm);
58 unsigned long alignmask = crypto_shash_alignmask(tfm);
59
60 if ((unsigned long)key & alignmask)
61 return shash_setkey_unaligned(tfm, key, keylen);
62
63 return shash->setkey(tfm, key, keylen);
64 }
65 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
66
shash_align_buffer_size(unsigned len,unsigned long mask)67 static inline unsigned int shash_align_buffer_size(unsigned len,
68 unsigned long mask)
69 {
70 return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
71 }
72
shash_update_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len)73 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
74 unsigned int len)
75 {
76 struct crypto_shash *tfm = desc->tfm;
77 struct shash_alg *shash = crypto_shash_alg(tfm);
78 unsigned long alignmask = crypto_shash_alignmask(tfm);
79 unsigned int unaligned_len = alignmask + 1 -
80 ((unsigned long)data & alignmask);
81 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
82 __attribute__ ((aligned));
83 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
84 int err;
85
86 if (unaligned_len > len)
87 unaligned_len = len;
88
89 memcpy(buf, data, unaligned_len);
90 err = shash->update(desc, buf, unaligned_len);
91 memset(buf, 0, unaligned_len);
92
93 return err ?:
94 shash->update(desc, data + unaligned_len, len - unaligned_len);
95 }
96
crypto_shash_update(struct shash_desc * desc,const u8 * data,unsigned int len)97 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
98 unsigned int len)
99 {
100 struct crypto_shash *tfm = desc->tfm;
101 struct shash_alg *shash = crypto_shash_alg(tfm);
102 unsigned long alignmask = crypto_shash_alignmask(tfm);
103
104 if ((unsigned long)data & alignmask)
105 return shash_update_unaligned(desc, data, len);
106
107 return shash->update(desc, data, len);
108 }
109 EXPORT_SYMBOL_GPL(crypto_shash_update);
110
shash_final_unaligned(struct shash_desc * desc,u8 * out)111 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
112 {
113 struct crypto_shash *tfm = desc->tfm;
114 unsigned long alignmask = crypto_shash_alignmask(tfm);
115 struct shash_alg *shash = crypto_shash_alg(tfm);
116 unsigned int ds = crypto_shash_digestsize(tfm);
117 u8 ubuf[shash_align_buffer_size(ds, alignmask)]
118 __attribute__ ((aligned));
119 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
120 int err;
121
122 err = shash->final(desc, buf);
123 if (err)
124 goto out;
125
126 memcpy(out, buf, ds);
127
128 out:
129 memset(buf, 0, ds);
130 return err;
131 }
132
crypto_shash_final(struct shash_desc * desc,u8 * out)133 int crypto_shash_final(struct shash_desc *desc, u8 *out)
134 {
135 struct crypto_shash *tfm = desc->tfm;
136 struct shash_alg *shash = crypto_shash_alg(tfm);
137 unsigned long alignmask = crypto_shash_alignmask(tfm);
138
139 if ((unsigned long)out & alignmask)
140 return shash_final_unaligned(desc, out);
141
142 return shash->final(desc, out);
143 }
144 EXPORT_SYMBOL_GPL(crypto_shash_final);
145
shash_finup_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)146 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
147 unsigned int len, u8 *out)
148 {
149 return crypto_shash_update(desc, data, len) ?:
150 crypto_shash_final(desc, out);
151 }
152
crypto_shash_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)153 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
154 unsigned int len, u8 *out)
155 {
156 struct crypto_shash *tfm = desc->tfm;
157 struct shash_alg *shash = crypto_shash_alg(tfm);
158 unsigned long alignmask = crypto_shash_alignmask(tfm);
159
160 if (((unsigned long)data | (unsigned long)out) & alignmask)
161 return shash_finup_unaligned(desc, data, len, out);
162
163 return shash->finup(desc, data, len, out);
164 }
165 EXPORT_SYMBOL_GPL(crypto_shash_finup);
166
shash_digest_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)167 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
168 unsigned int len, u8 *out)
169 {
170 return crypto_shash_init(desc) ?:
171 crypto_shash_finup(desc, data, len, out);
172 }
173
crypto_shash_digest(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)174 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
175 unsigned int len, u8 *out)
176 {
177 struct crypto_shash *tfm = desc->tfm;
178 struct shash_alg *shash = crypto_shash_alg(tfm);
179 unsigned long alignmask = crypto_shash_alignmask(tfm);
180
181 if (((unsigned long)data | (unsigned long)out) & alignmask)
182 return shash_digest_unaligned(desc, data, len, out);
183
184 return shash->digest(desc, data, len, out);
185 }
186 EXPORT_SYMBOL_GPL(crypto_shash_digest);
187
shash_default_export(struct shash_desc * desc,void * out)188 static int shash_default_export(struct shash_desc *desc, void *out)
189 {
190 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
191 return 0;
192 }
193
shash_default_import(struct shash_desc * desc,const void * in)194 static int shash_default_import(struct shash_desc *desc, const void *in)
195 {
196 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
197 return 0;
198 }
199
shash_async_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)200 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
201 unsigned int keylen)
202 {
203 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
204
205 return crypto_shash_setkey(*ctx, key, keylen);
206 }
207
shash_async_init(struct ahash_request * req)208 static int shash_async_init(struct ahash_request *req)
209 {
210 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
211 struct shash_desc *desc = ahash_request_ctx(req);
212
213 desc->tfm = *ctx;
214 desc->flags = req->base.flags;
215
216 return crypto_shash_init(desc);
217 }
218
shash_ahash_update(struct ahash_request * req,struct shash_desc * desc)219 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
220 {
221 struct crypto_hash_walk walk;
222 int nbytes;
223
224 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
225 nbytes = crypto_hash_walk_done(&walk, nbytes))
226 nbytes = crypto_shash_update(desc, walk.data, nbytes);
227
228 return nbytes;
229 }
230 EXPORT_SYMBOL_GPL(shash_ahash_update);
231
shash_async_update(struct ahash_request * req)232 static int shash_async_update(struct ahash_request *req)
233 {
234 return shash_ahash_update(req, ahash_request_ctx(req));
235 }
236
shash_async_final(struct ahash_request * req)237 static int shash_async_final(struct ahash_request *req)
238 {
239 return crypto_shash_final(ahash_request_ctx(req), req->result);
240 }
241
shash_ahash_finup(struct ahash_request * req,struct shash_desc * desc)242 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
243 {
244 struct crypto_hash_walk walk;
245 int nbytes;
246
247 nbytes = crypto_hash_walk_first(req, &walk);
248 if (!nbytes)
249 return crypto_shash_final(desc, req->result);
250
251 do {
252 nbytes = crypto_hash_walk_last(&walk) ?
253 crypto_shash_finup(desc, walk.data, nbytes,
254 req->result) :
255 crypto_shash_update(desc, walk.data, nbytes);
256 nbytes = crypto_hash_walk_done(&walk, nbytes);
257 } while (nbytes > 0);
258
259 return nbytes;
260 }
261 EXPORT_SYMBOL_GPL(shash_ahash_finup);
262
shash_async_finup(struct ahash_request * req)263 static int shash_async_finup(struct ahash_request *req)
264 {
265 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
266 struct shash_desc *desc = ahash_request_ctx(req);
267
268 desc->tfm = *ctx;
269 desc->flags = req->base.flags;
270
271 return shash_ahash_finup(req, desc);
272 }
273
shash_ahash_digest(struct ahash_request * req,struct shash_desc * desc)274 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
275 {
276 struct scatterlist *sg = req->src;
277 unsigned int offset = sg->offset;
278 unsigned int nbytes = req->nbytes;
279 int err;
280
281 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
282 void *data;
283
284 data = kmap_atomic(sg_page(sg));
285 err = crypto_shash_digest(desc, data + offset, nbytes,
286 req->result);
287 kunmap_atomic(data);
288 crypto_yield(desc->flags);
289 } else
290 err = crypto_shash_init(desc) ?:
291 shash_ahash_finup(req, desc);
292
293 return err;
294 }
295 EXPORT_SYMBOL_GPL(shash_ahash_digest);
296
shash_async_digest(struct ahash_request * req)297 static int shash_async_digest(struct ahash_request *req)
298 {
299 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
300 struct shash_desc *desc = ahash_request_ctx(req);
301
302 desc->tfm = *ctx;
303 desc->flags = req->base.flags;
304
305 return shash_ahash_digest(req, desc);
306 }
307
shash_async_export(struct ahash_request * req,void * out)308 static int shash_async_export(struct ahash_request *req, void *out)
309 {
310 return crypto_shash_export(ahash_request_ctx(req), out);
311 }
312
shash_async_import(struct ahash_request * req,const void * in)313 static int shash_async_import(struct ahash_request *req, const void *in)
314 {
315 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
316 struct shash_desc *desc = ahash_request_ctx(req);
317
318 desc->tfm = *ctx;
319 desc->flags = req->base.flags;
320
321 return crypto_shash_import(desc, in);
322 }
323
crypto_exit_shash_ops_async(struct crypto_tfm * tfm)324 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
325 {
326 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
327
328 crypto_free_shash(*ctx);
329 }
330
crypto_init_shash_ops_async(struct crypto_tfm * tfm)331 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
332 {
333 struct crypto_alg *calg = tfm->__crt_alg;
334 struct shash_alg *alg = __crypto_shash_alg(calg);
335 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
336 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
337 struct crypto_shash *shash;
338
339 if (!crypto_mod_get(calg))
340 return -EAGAIN;
341
342 shash = crypto_create_tfm(calg, &crypto_shash_type);
343 if (IS_ERR(shash)) {
344 crypto_mod_put(calg);
345 return PTR_ERR(shash);
346 }
347
348 *ctx = shash;
349 tfm->exit = crypto_exit_shash_ops_async;
350
351 crt->init = shash_async_init;
352 crt->update = shash_async_update;
353 crt->final = shash_async_final;
354 crt->finup = shash_async_finup;
355 crt->digest = shash_async_digest;
356
357 if (alg->setkey)
358 crt->setkey = shash_async_setkey;
359 if (alg->export)
360 crt->export = shash_async_export;
361 if (alg->import)
362 crt->import = shash_async_import;
363
364 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
365
366 return 0;
367 }
368
shash_compat_setkey(struct crypto_hash * tfm,const u8 * key,unsigned int keylen)369 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
370 unsigned int keylen)
371 {
372 struct shash_desc **descp = crypto_hash_ctx(tfm);
373 struct shash_desc *desc = *descp;
374
375 return crypto_shash_setkey(desc->tfm, key, keylen);
376 }
377
shash_compat_init(struct hash_desc * hdesc)378 static int shash_compat_init(struct hash_desc *hdesc)
379 {
380 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
381 struct shash_desc *desc = *descp;
382
383 desc->flags = hdesc->flags;
384
385 return crypto_shash_init(desc);
386 }
387
shash_compat_update(struct hash_desc * hdesc,struct scatterlist * sg,unsigned int len)388 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
389 unsigned int len)
390 {
391 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
392 struct shash_desc *desc = *descp;
393 struct crypto_hash_walk walk;
394 int nbytes;
395
396 for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
397 nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
398 nbytes = crypto_shash_update(desc, walk.data, nbytes);
399
400 return nbytes;
401 }
402
shash_compat_final(struct hash_desc * hdesc,u8 * out)403 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
404 {
405 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
406
407 return crypto_shash_final(*descp, out);
408 }
409
shash_compat_digest(struct hash_desc * hdesc,struct scatterlist * sg,unsigned int nbytes,u8 * out)410 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
411 unsigned int nbytes, u8 *out)
412 {
413 unsigned int offset = sg->offset;
414 int err;
415
416 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
417 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
418 struct shash_desc *desc = *descp;
419 void *data;
420
421 desc->flags = hdesc->flags;
422
423 data = kmap_atomic(sg_page(sg));
424 err = crypto_shash_digest(desc, data + offset, nbytes, out);
425 kunmap_atomic(data);
426 crypto_yield(desc->flags);
427 goto out;
428 }
429
430 err = shash_compat_init(hdesc);
431 if (err)
432 goto out;
433
434 err = shash_compat_update(hdesc, sg, nbytes);
435 if (err)
436 goto out;
437
438 err = shash_compat_final(hdesc, out);
439
440 out:
441 return err;
442 }
443
crypto_exit_shash_ops_compat(struct crypto_tfm * tfm)444 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
445 {
446 struct shash_desc **descp = crypto_tfm_ctx(tfm);
447 struct shash_desc *desc = *descp;
448
449 crypto_free_shash(desc->tfm);
450 kzfree(desc);
451 }
452
crypto_init_shash_ops_compat(struct crypto_tfm * tfm)453 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
454 {
455 struct hash_tfm *crt = &tfm->crt_hash;
456 struct crypto_alg *calg = tfm->__crt_alg;
457 struct shash_alg *alg = __crypto_shash_alg(calg);
458 struct shash_desc **descp = crypto_tfm_ctx(tfm);
459 struct crypto_shash *shash;
460 struct shash_desc *desc;
461
462 if (!crypto_mod_get(calg))
463 return -EAGAIN;
464
465 shash = crypto_create_tfm(calg, &crypto_shash_type);
466 if (IS_ERR(shash)) {
467 crypto_mod_put(calg);
468 return PTR_ERR(shash);
469 }
470
471 desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
472 GFP_KERNEL);
473 if (!desc) {
474 crypto_free_shash(shash);
475 return -ENOMEM;
476 }
477
478 *descp = desc;
479 desc->tfm = shash;
480 tfm->exit = crypto_exit_shash_ops_compat;
481
482 crt->init = shash_compat_init;
483 crt->update = shash_compat_update;
484 crt->final = shash_compat_final;
485 crt->digest = shash_compat_digest;
486 crt->setkey = shash_compat_setkey;
487
488 crt->digestsize = alg->digestsize;
489
490 return 0;
491 }
492
crypto_init_shash_ops(struct crypto_tfm * tfm,u32 type,u32 mask)493 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
494 {
495 switch (mask & CRYPTO_ALG_TYPE_MASK) {
496 case CRYPTO_ALG_TYPE_HASH_MASK:
497 return crypto_init_shash_ops_compat(tfm);
498 }
499
500 return -EINVAL;
501 }
502
crypto_shash_ctxsize(struct crypto_alg * alg,u32 type,u32 mask)503 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
504 u32 mask)
505 {
506 switch (mask & CRYPTO_ALG_TYPE_MASK) {
507 case CRYPTO_ALG_TYPE_HASH_MASK:
508 return sizeof(struct shash_desc *);
509 }
510
511 return 0;
512 }
513
crypto_shash_init_tfm(struct crypto_tfm * tfm)514 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
515 {
516 struct crypto_shash *hash = __crypto_shash_cast(tfm);
517
518 hash->descsize = crypto_shash_alg(hash)->descsize;
519 return 0;
520 }
521
crypto_shash_extsize(struct crypto_alg * alg)522 static unsigned int crypto_shash_extsize(struct crypto_alg *alg)
523 {
524 return alg->cra_ctxsize;
525 }
526
527 #ifdef CONFIG_NET
crypto_shash_report(struct sk_buff * skb,struct crypto_alg * alg)528 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
529 {
530 struct crypto_report_hash rhash;
531 struct shash_alg *salg = __crypto_shash_alg(alg);
532
533 strncpy(rhash.type, "shash", sizeof(rhash.type));
534
535 rhash.blocksize = alg->cra_blocksize;
536 rhash.digestsize = salg->digestsize;
537
538 NLA_PUT(skb, CRYPTOCFGA_REPORT_HASH,
539 sizeof(struct crypto_report_hash), &rhash);
540
541 return 0;
542
543 nla_put_failure:
544 return -EMSGSIZE;
545 }
546 #else
crypto_shash_report(struct sk_buff * skb,struct crypto_alg * alg)547 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
548 {
549 return -ENOSYS;
550 }
551 #endif
552
553 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
554 __attribute__ ((unused));
crypto_shash_show(struct seq_file * m,struct crypto_alg * alg)555 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
556 {
557 struct shash_alg *salg = __crypto_shash_alg(alg);
558
559 seq_printf(m, "type : shash\n");
560 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
561 seq_printf(m, "digestsize : %u\n", salg->digestsize);
562 }
563
564 static const struct crypto_type crypto_shash_type = {
565 .ctxsize = crypto_shash_ctxsize,
566 .extsize = crypto_shash_extsize,
567 .init = crypto_init_shash_ops,
568 .init_tfm = crypto_shash_init_tfm,
569 #ifdef CONFIG_PROC_FS
570 .show = crypto_shash_show,
571 #endif
572 .report = crypto_shash_report,
573 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
574 .maskset = CRYPTO_ALG_TYPE_MASK,
575 .type = CRYPTO_ALG_TYPE_SHASH,
576 .tfmsize = offsetof(struct crypto_shash, base),
577 };
578
crypto_alloc_shash(const char * alg_name,u32 type,u32 mask)579 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
580 u32 mask)
581 {
582 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
583 }
584 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
585
shash_prepare_alg(struct shash_alg * alg)586 static int shash_prepare_alg(struct shash_alg *alg)
587 {
588 struct crypto_alg *base = &alg->base;
589
590 if (alg->digestsize > PAGE_SIZE / 8 ||
591 alg->descsize > PAGE_SIZE / 8 ||
592 alg->statesize > PAGE_SIZE / 8)
593 return -EINVAL;
594
595 base->cra_type = &crypto_shash_type;
596 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
597 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
598
599 if (!alg->finup)
600 alg->finup = shash_finup_unaligned;
601 if (!alg->digest)
602 alg->digest = shash_digest_unaligned;
603 if (!alg->export) {
604 alg->export = shash_default_export;
605 alg->import = shash_default_import;
606 alg->statesize = alg->descsize;
607 }
608 if (!alg->setkey)
609 alg->setkey = shash_no_setkey;
610
611 return 0;
612 }
613
crypto_register_shash(struct shash_alg * alg)614 int crypto_register_shash(struct shash_alg *alg)
615 {
616 struct crypto_alg *base = &alg->base;
617 int err;
618
619 err = shash_prepare_alg(alg);
620 if (err)
621 return err;
622
623 return crypto_register_alg(base);
624 }
625 EXPORT_SYMBOL_GPL(crypto_register_shash);
626
crypto_unregister_shash(struct shash_alg * alg)627 int crypto_unregister_shash(struct shash_alg *alg)
628 {
629 return crypto_unregister_alg(&alg->base);
630 }
631 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
632
shash_register_instance(struct crypto_template * tmpl,struct shash_instance * inst)633 int shash_register_instance(struct crypto_template *tmpl,
634 struct shash_instance *inst)
635 {
636 int err;
637
638 err = shash_prepare_alg(&inst->alg);
639 if (err)
640 return err;
641
642 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
643 }
644 EXPORT_SYMBOL_GPL(shash_register_instance);
645
shash_free_instance(struct crypto_instance * inst)646 void shash_free_instance(struct crypto_instance *inst)
647 {
648 crypto_drop_spawn(crypto_instance_ctx(inst));
649 kfree(shash_instance(inst));
650 }
651 EXPORT_SYMBOL_GPL(shash_free_instance);
652
crypto_init_shash_spawn(struct crypto_shash_spawn * spawn,struct shash_alg * alg,struct crypto_instance * inst)653 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
654 struct shash_alg *alg,
655 struct crypto_instance *inst)
656 {
657 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
658 &crypto_shash_type);
659 }
660 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
661
shash_attr_alg(struct rtattr * rta,u32 type,u32 mask)662 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
663 {
664 struct crypto_alg *alg;
665
666 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
667 return IS_ERR(alg) ? ERR_CAST(alg) :
668 container_of(alg, struct shash_alg, base);
669 }
670 EXPORT_SYMBOL_GPL(shash_attr_alg);
671
672 MODULE_LICENSE("GPL");
673 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
674