1 /*
2  * Synchronous Cryptographic Hash operations.
3  *
4  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12 
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 
21 #include "internal.h"
22 
23 static const struct crypto_type crypto_shash_type;
24 
shash_no_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)25 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
26 			   unsigned int keylen)
27 {
28 	return -ENOSYS;
29 }
30 
shash_setkey_unaligned(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)31 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
32 				  unsigned int keylen)
33 {
34 	struct shash_alg *shash = crypto_shash_alg(tfm);
35 	unsigned long alignmask = crypto_shash_alignmask(tfm);
36 	unsigned long absize;
37 	u8 *buffer, *alignbuffer;
38 	int err;
39 
40 	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
41 	buffer = kmalloc(absize, GFP_KERNEL);
42 	if (!buffer)
43 		return -ENOMEM;
44 
45 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
46 	memcpy(alignbuffer, key, keylen);
47 	err = shash->setkey(tfm, alignbuffer, keylen);
48 	kzfree(buffer);
49 	return err;
50 }
51 
crypto_shash_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)52 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
53 			unsigned int keylen)
54 {
55 	struct shash_alg *shash = crypto_shash_alg(tfm);
56 	unsigned long alignmask = crypto_shash_alignmask(tfm);
57 
58 	if ((unsigned long)key & alignmask)
59 		return shash_setkey_unaligned(tfm, key, keylen);
60 
61 	return shash->setkey(tfm, key, keylen);
62 }
63 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
64 
shash_align_buffer_size(unsigned len,unsigned long mask)65 static inline unsigned int shash_align_buffer_size(unsigned len,
66 						   unsigned long mask)
67 {
68 	return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
69 }
70 
shash_update_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len)71 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
72 				  unsigned int len)
73 {
74 	struct crypto_shash *tfm = desc->tfm;
75 	struct shash_alg *shash = crypto_shash_alg(tfm);
76 	unsigned long alignmask = crypto_shash_alignmask(tfm);
77 	unsigned int unaligned_len = alignmask + 1 -
78 				     ((unsigned long)data & alignmask);
79 	u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
80 		__attribute__ ((aligned));
81 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
82 	int err;
83 
84 	if (unaligned_len > len)
85 		unaligned_len = len;
86 
87 	memcpy(buf, data, unaligned_len);
88 	err = shash->update(desc, buf, unaligned_len);
89 	memset(buf, 0, unaligned_len);
90 
91 	return err ?:
92 	       shash->update(desc, data + unaligned_len, len - unaligned_len);
93 }
94 
crypto_shash_update(struct shash_desc * desc,const u8 * data,unsigned int len)95 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
96 			unsigned int len)
97 {
98 	struct crypto_shash *tfm = desc->tfm;
99 	struct shash_alg *shash = crypto_shash_alg(tfm);
100 	unsigned long alignmask = crypto_shash_alignmask(tfm);
101 
102 	if ((unsigned long)data & alignmask)
103 		return shash_update_unaligned(desc, data, len);
104 
105 	return shash->update(desc, data, len);
106 }
107 EXPORT_SYMBOL_GPL(crypto_shash_update);
108 
shash_final_unaligned(struct shash_desc * desc,u8 * out)109 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
110 {
111 	struct crypto_shash *tfm = desc->tfm;
112 	unsigned long alignmask = crypto_shash_alignmask(tfm);
113 	struct shash_alg *shash = crypto_shash_alg(tfm);
114 	unsigned int ds = crypto_shash_digestsize(tfm);
115 	u8 ubuf[shash_align_buffer_size(ds, alignmask)]
116 		__attribute__ ((aligned));
117 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
118 	int err;
119 
120 	err = shash->final(desc, buf);
121 	if (err)
122 		goto out;
123 
124 	memcpy(out, buf, ds);
125 
126 out:
127 	memset(buf, 0, ds);
128 	return err;
129 }
130 
crypto_shash_final(struct shash_desc * desc,u8 * out)131 int crypto_shash_final(struct shash_desc *desc, u8 *out)
132 {
133 	struct crypto_shash *tfm = desc->tfm;
134 	struct shash_alg *shash = crypto_shash_alg(tfm);
135 	unsigned long alignmask = crypto_shash_alignmask(tfm);
136 
137 	if ((unsigned long)out & alignmask)
138 		return shash_final_unaligned(desc, out);
139 
140 	return shash->final(desc, out);
141 }
142 EXPORT_SYMBOL_GPL(crypto_shash_final);
143 
shash_finup_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)144 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
145 				 unsigned int len, u8 *out)
146 {
147 	return crypto_shash_update(desc, data, len) ?:
148 	       crypto_shash_final(desc, out);
149 }
150 
crypto_shash_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)151 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
152 		       unsigned int len, u8 *out)
153 {
154 	struct crypto_shash *tfm = desc->tfm;
155 	struct shash_alg *shash = crypto_shash_alg(tfm);
156 	unsigned long alignmask = crypto_shash_alignmask(tfm);
157 
158 	if (((unsigned long)data | (unsigned long)out) & alignmask)
159 		return shash_finup_unaligned(desc, data, len, out);
160 
161 	return shash->finup(desc, data, len, out);
162 }
163 EXPORT_SYMBOL_GPL(crypto_shash_finup);
164 
shash_digest_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)165 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
166 				  unsigned int len, u8 *out)
167 {
168 	return crypto_shash_init(desc) ?:
169 	       crypto_shash_finup(desc, data, len, out);
170 }
171 
crypto_shash_digest(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)172 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
173 			unsigned int len, u8 *out)
174 {
175 	struct crypto_shash *tfm = desc->tfm;
176 	struct shash_alg *shash = crypto_shash_alg(tfm);
177 	unsigned long alignmask = crypto_shash_alignmask(tfm);
178 
179 	if (((unsigned long)data | (unsigned long)out) & alignmask)
180 		return shash_digest_unaligned(desc, data, len, out);
181 
182 	return shash->digest(desc, data, len, out);
183 }
184 EXPORT_SYMBOL_GPL(crypto_shash_digest);
185 
shash_default_export(struct shash_desc * desc,void * out)186 static int shash_default_export(struct shash_desc *desc, void *out)
187 {
188 	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
189 	return 0;
190 }
191 
shash_default_import(struct shash_desc * desc,const void * in)192 static int shash_default_import(struct shash_desc *desc, const void *in)
193 {
194 	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
195 	return 0;
196 }
197 
shash_async_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)198 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
199 			      unsigned int keylen)
200 {
201 	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
202 
203 	return crypto_shash_setkey(*ctx, key, keylen);
204 }
205 
shash_async_init(struct ahash_request * req)206 static int shash_async_init(struct ahash_request *req)
207 {
208 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
209 	struct shash_desc *desc = ahash_request_ctx(req);
210 
211 	desc->tfm = *ctx;
212 	desc->flags = req->base.flags;
213 
214 	return crypto_shash_init(desc);
215 }
216 
shash_ahash_update(struct ahash_request * req,struct shash_desc * desc)217 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
218 {
219 	struct crypto_hash_walk walk;
220 	int nbytes;
221 
222 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
223 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
224 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
225 
226 	return nbytes;
227 }
228 EXPORT_SYMBOL_GPL(shash_ahash_update);
229 
shash_async_update(struct ahash_request * req)230 static int shash_async_update(struct ahash_request *req)
231 {
232 	return shash_ahash_update(req, ahash_request_ctx(req));
233 }
234 
shash_async_final(struct ahash_request * req)235 static int shash_async_final(struct ahash_request *req)
236 {
237 	return crypto_shash_final(ahash_request_ctx(req), req->result);
238 }
239 
shash_ahash_finup(struct ahash_request * req,struct shash_desc * desc)240 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
241 {
242 	struct crypto_hash_walk walk;
243 	int nbytes;
244 
245 	nbytes = crypto_hash_walk_first(req, &walk);
246 	if (!nbytes)
247 		return crypto_shash_final(desc, req->result);
248 
249 	do {
250 		nbytes = crypto_hash_walk_last(&walk) ?
251 			 crypto_shash_finup(desc, walk.data, nbytes,
252 					    req->result) :
253 			 crypto_shash_update(desc, walk.data, nbytes);
254 		nbytes = crypto_hash_walk_done(&walk, nbytes);
255 	} while (nbytes > 0);
256 
257 	return nbytes;
258 }
259 EXPORT_SYMBOL_GPL(shash_ahash_finup);
260 
shash_async_finup(struct ahash_request * req)261 static int shash_async_finup(struct ahash_request *req)
262 {
263 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
264 	struct shash_desc *desc = ahash_request_ctx(req);
265 
266 	desc->tfm = *ctx;
267 	desc->flags = req->base.flags;
268 
269 	return shash_ahash_finup(req, desc);
270 }
271 
shash_ahash_digest(struct ahash_request * req,struct shash_desc * desc)272 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
273 {
274 	struct scatterlist *sg = req->src;
275 	unsigned int offset = sg->offset;
276 	unsigned int nbytes = req->nbytes;
277 	int err;
278 
279 	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
280 		void *data;
281 
282 		data = crypto_kmap(sg_page(sg), 0);
283 		err = crypto_shash_digest(desc, data + offset, nbytes,
284 					  req->result);
285 		crypto_kunmap(data, 0);
286 		crypto_yield(desc->flags);
287 	} else
288 		err = crypto_shash_init(desc) ?:
289 		      shash_ahash_finup(req, desc);
290 
291 	return err;
292 }
293 EXPORT_SYMBOL_GPL(shash_ahash_digest);
294 
shash_async_digest(struct ahash_request * req)295 static int shash_async_digest(struct ahash_request *req)
296 {
297 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
298 	struct shash_desc *desc = ahash_request_ctx(req);
299 
300 	desc->tfm = *ctx;
301 	desc->flags = req->base.flags;
302 
303 	return shash_ahash_digest(req, desc);
304 }
305 
shash_async_export(struct ahash_request * req,void * out)306 static int shash_async_export(struct ahash_request *req, void *out)
307 {
308 	return crypto_shash_export(ahash_request_ctx(req), out);
309 }
310 
shash_async_import(struct ahash_request * req,const void * in)311 static int shash_async_import(struct ahash_request *req, const void *in)
312 {
313 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
314 	struct shash_desc *desc = ahash_request_ctx(req);
315 
316 	desc->tfm = *ctx;
317 	desc->flags = req->base.flags;
318 
319 	return crypto_shash_import(desc, in);
320 }
321 
crypto_exit_shash_ops_async(struct crypto_tfm * tfm)322 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
323 {
324 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
325 
326 	crypto_free_shash(*ctx);
327 }
328 
crypto_init_shash_ops_async(struct crypto_tfm * tfm)329 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
330 {
331 	struct crypto_alg *calg = tfm->__crt_alg;
332 	struct shash_alg *alg = __crypto_shash_alg(calg);
333 	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
334 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
335 	struct crypto_shash *shash;
336 
337 	if (!crypto_mod_get(calg))
338 		return -EAGAIN;
339 
340 	shash = crypto_create_tfm(calg, &crypto_shash_type);
341 	if (IS_ERR(shash)) {
342 		crypto_mod_put(calg);
343 		return PTR_ERR(shash);
344 	}
345 
346 	*ctx = shash;
347 	tfm->exit = crypto_exit_shash_ops_async;
348 
349 	crt->init = shash_async_init;
350 	crt->update = shash_async_update;
351 	crt->final = shash_async_final;
352 	crt->finup = shash_async_finup;
353 	crt->digest = shash_async_digest;
354 
355 	if (alg->setkey)
356 		crt->setkey = shash_async_setkey;
357 	if (alg->export)
358 		crt->export = shash_async_export;
359 	if (alg->import)
360 		crt->import = shash_async_import;
361 
362 	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
363 
364 	return 0;
365 }
366 
shash_compat_setkey(struct crypto_hash * tfm,const u8 * key,unsigned int keylen)367 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
368 			       unsigned int keylen)
369 {
370 	struct shash_desc **descp = crypto_hash_ctx(tfm);
371 	struct shash_desc *desc = *descp;
372 
373 	return crypto_shash_setkey(desc->tfm, key, keylen);
374 }
375 
shash_compat_init(struct hash_desc * hdesc)376 static int shash_compat_init(struct hash_desc *hdesc)
377 {
378 	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
379 	struct shash_desc *desc = *descp;
380 
381 	desc->flags = hdesc->flags;
382 
383 	return crypto_shash_init(desc);
384 }
385 
shash_compat_update(struct hash_desc * hdesc,struct scatterlist * sg,unsigned int len)386 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
387 			       unsigned int len)
388 {
389 	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
390 	struct shash_desc *desc = *descp;
391 	struct crypto_hash_walk walk;
392 	int nbytes;
393 
394 	for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
395 	     nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
396 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
397 
398 	return nbytes;
399 }
400 
shash_compat_final(struct hash_desc * hdesc,u8 * out)401 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
402 {
403 	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
404 
405 	return crypto_shash_final(*descp, out);
406 }
407 
shash_compat_digest(struct hash_desc * hdesc,struct scatterlist * sg,unsigned int nbytes,u8 * out)408 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
409 			       unsigned int nbytes, u8 *out)
410 {
411 	unsigned int offset = sg->offset;
412 	int err;
413 
414 	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
415 		struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
416 		struct shash_desc *desc = *descp;
417 		void *data;
418 
419 		desc->flags = hdesc->flags;
420 
421 		data = crypto_kmap(sg_page(sg), 0);
422 		err = crypto_shash_digest(desc, data + offset, nbytes, out);
423 		crypto_kunmap(data, 0);
424 		crypto_yield(desc->flags);
425 		goto out;
426 	}
427 
428 	err = shash_compat_init(hdesc);
429 	if (err)
430 		goto out;
431 
432 	err = shash_compat_update(hdesc, sg, nbytes);
433 	if (err)
434 		goto out;
435 
436 	err = shash_compat_final(hdesc, out);
437 
438 out:
439 	return err;
440 }
441 
crypto_exit_shash_ops_compat(struct crypto_tfm * tfm)442 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
443 {
444 	struct shash_desc **descp = crypto_tfm_ctx(tfm);
445 	struct shash_desc *desc = *descp;
446 
447 	crypto_free_shash(desc->tfm);
448 	kzfree(desc);
449 }
450 
crypto_init_shash_ops_compat(struct crypto_tfm * tfm)451 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
452 {
453 	struct hash_tfm *crt = &tfm->crt_hash;
454 	struct crypto_alg *calg = tfm->__crt_alg;
455 	struct shash_alg *alg = __crypto_shash_alg(calg);
456 	struct shash_desc **descp = crypto_tfm_ctx(tfm);
457 	struct crypto_shash *shash;
458 	struct shash_desc *desc;
459 
460 	if (!crypto_mod_get(calg))
461 		return -EAGAIN;
462 
463 	shash = crypto_create_tfm(calg, &crypto_shash_type);
464 	if (IS_ERR(shash)) {
465 		crypto_mod_put(calg);
466 		return PTR_ERR(shash);
467 	}
468 
469 	desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
470 		       GFP_KERNEL);
471 	if (!desc) {
472 		crypto_free_shash(shash);
473 		return -ENOMEM;
474 	}
475 
476 	*descp = desc;
477 	desc->tfm = shash;
478 	tfm->exit = crypto_exit_shash_ops_compat;
479 
480 	crt->init = shash_compat_init;
481 	crt->update = shash_compat_update;
482 	crt->final  = shash_compat_final;
483 	crt->digest = shash_compat_digest;
484 	crt->setkey = shash_compat_setkey;
485 
486 	crt->digestsize = alg->digestsize;
487 
488 	return 0;
489 }
490 
crypto_init_shash_ops(struct crypto_tfm * tfm,u32 type,u32 mask)491 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
492 {
493 	switch (mask & CRYPTO_ALG_TYPE_MASK) {
494 	case CRYPTO_ALG_TYPE_HASH_MASK:
495 		return crypto_init_shash_ops_compat(tfm);
496 	}
497 
498 	return -EINVAL;
499 }
500 
crypto_shash_ctxsize(struct crypto_alg * alg,u32 type,u32 mask)501 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
502 					 u32 mask)
503 {
504 	switch (mask & CRYPTO_ALG_TYPE_MASK) {
505 	case CRYPTO_ALG_TYPE_HASH_MASK:
506 		return sizeof(struct shash_desc *);
507 	}
508 
509 	return 0;
510 }
511 
crypto_shash_init_tfm(struct crypto_tfm * tfm)512 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
513 {
514 	struct crypto_shash *hash = __crypto_shash_cast(tfm);
515 
516 	hash->descsize = crypto_shash_alg(hash)->descsize;
517 	return 0;
518 }
519 
crypto_shash_extsize(struct crypto_alg * alg)520 static unsigned int crypto_shash_extsize(struct crypto_alg *alg)
521 {
522 	return alg->cra_ctxsize;
523 }
524 
525 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
526 	__attribute__ ((unused));
crypto_shash_show(struct seq_file * m,struct crypto_alg * alg)527 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
528 {
529 	struct shash_alg *salg = __crypto_shash_alg(alg);
530 
531 	seq_printf(m, "type         : shash\n");
532 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
533 	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
534 }
535 
536 static const struct crypto_type crypto_shash_type = {
537 	.ctxsize = crypto_shash_ctxsize,
538 	.extsize = crypto_shash_extsize,
539 	.init = crypto_init_shash_ops,
540 	.init_tfm = crypto_shash_init_tfm,
541 #ifdef CONFIG_PROC_FS
542 	.show = crypto_shash_show,
543 #endif
544 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
545 	.maskset = CRYPTO_ALG_TYPE_MASK,
546 	.type = CRYPTO_ALG_TYPE_SHASH,
547 	.tfmsize = offsetof(struct crypto_shash, base),
548 };
549 
crypto_alloc_shash(const char * alg_name,u32 type,u32 mask)550 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
551 					u32 mask)
552 {
553 	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
554 }
555 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
556 
shash_prepare_alg(struct shash_alg * alg)557 static int shash_prepare_alg(struct shash_alg *alg)
558 {
559 	struct crypto_alg *base = &alg->base;
560 
561 	if (alg->digestsize > PAGE_SIZE / 8 ||
562 	    alg->descsize > PAGE_SIZE / 8 ||
563 	    alg->statesize > PAGE_SIZE / 8)
564 		return -EINVAL;
565 
566 	base->cra_type = &crypto_shash_type;
567 	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
568 	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
569 
570 	if (!alg->finup)
571 		alg->finup = shash_finup_unaligned;
572 	if (!alg->digest)
573 		alg->digest = shash_digest_unaligned;
574 	if (!alg->export) {
575 		alg->export = shash_default_export;
576 		alg->import = shash_default_import;
577 		alg->statesize = alg->descsize;
578 	}
579 	if (!alg->setkey)
580 		alg->setkey = shash_no_setkey;
581 
582 	return 0;
583 }
584 
crypto_register_shash(struct shash_alg * alg)585 int crypto_register_shash(struct shash_alg *alg)
586 {
587 	struct crypto_alg *base = &alg->base;
588 	int err;
589 
590 	err = shash_prepare_alg(alg);
591 	if (err)
592 		return err;
593 
594 	return crypto_register_alg(base);
595 }
596 EXPORT_SYMBOL_GPL(crypto_register_shash);
597 
crypto_unregister_shash(struct shash_alg * alg)598 int crypto_unregister_shash(struct shash_alg *alg)
599 {
600 	return crypto_unregister_alg(&alg->base);
601 }
602 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
603 
shash_register_instance(struct crypto_template * tmpl,struct shash_instance * inst)604 int shash_register_instance(struct crypto_template *tmpl,
605 			    struct shash_instance *inst)
606 {
607 	int err;
608 
609 	err = shash_prepare_alg(&inst->alg);
610 	if (err)
611 		return err;
612 
613 	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
614 }
615 EXPORT_SYMBOL_GPL(shash_register_instance);
616 
shash_free_instance(struct crypto_instance * inst)617 void shash_free_instance(struct crypto_instance *inst)
618 {
619 	crypto_drop_spawn(crypto_instance_ctx(inst));
620 	kfree(shash_instance(inst));
621 }
622 EXPORT_SYMBOL_GPL(shash_free_instance);
623 
crypto_init_shash_spawn(struct crypto_shash_spawn * spawn,struct shash_alg * alg,struct crypto_instance * inst)624 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
625 			    struct shash_alg *alg,
626 			    struct crypto_instance *inst)
627 {
628 	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
629 				  &crypto_shash_type);
630 }
631 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
632 
shash_attr_alg(struct rtattr * rta,u32 type,u32 mask)633 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
634 {
635 	struct crypto_alg *alg;
636 
637 	alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
638 	return IS_ERR(alg) ? ERR_CAST(alg) :
639 	       container_of(alg, struct shash_alg, base);
640 }
641 EXPORT_SYMBOL_GPL(shash_attr_alg);
642 
643 MODULE_LICENSE("GPL");
644 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
645