Merge tag '6.9-rc-smb3-client-fixes-part2' of git://git.samba.org/sfrench/cifs-2.6
[sfrench/cifs-2.6.git] / crypto / ahash.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Asynchronous Cryptographic Hash operations.
4  *
5  * This is the implementation of the ahash (asynchronous hash) API.  It differs
6  * from shash (synchronous hash) in that ahash supports asynchronous operations,
7  * and it hashes data from scatterlists instead of virtually addressed buffers.
8  *
9  * The ahash API provides access to both ahash and shash algorithms.  The shash
10  * API only provides access to shash algorithms.
11  *
12  * Copyright (c) 2008 Loc Ho <lho@amcc.com>
13  */
14
15 #include <crypto/scatterwalk.h>
16 #include <linux/cryptouser.h>
17 #include <linux/err.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/sched.h>
21 #include <linux/slab.h>
22 #include <linux/seq_file.h>
23 #include <linux/string.h>
24 #include <net/netlink.h>
25
26 #include "hash.h"
27
28 #define CRYPTO_ALG_TYPE_AHASH_MASK      0x0000000e
29
30 static inline struct crypto_istat_hash *ahash_get_stat(struct ahash_alg *alg)
31 {
32         return hash_get_stat(&alg->halg);
33 }
34
35 static inline int crypto_ahash_errstat(struct ahash_alg *alg, int err)
36 {
37         if (!IS_ENABLED(CONFIG_CRYPTO_STATS))
38                 return err;
39
40         if (err && err != -EINPROGRESS && err != -EBUSY)
41                 atomic64_inc(&ahash_get_stat(alg)->err_cnt);
42
43         return err;
44 }
45
46 /*
47  * For an ahash tfm that is using an shash algorithm (instead of an ahash
48  * algorithm), this returns the underlying shash tfm.
49  */
50 static inline struct crypto_shash *ahash_to_shash(struct crypto_ahash *tfm)
51 {
52         return *(struct crypto_shash **)crypto_ahash_ctx(tfm);
53 }
54
55 static inline struct shash_desc *prepare_shash_desc(struct ahash_request *req,
56                                                     struct crypto_ahash *tfm)
57 {
58         struct shash_desc *desc = ahash_request_ctx(req);
59
60         desc->tfm = ahash_to_shash(tfm);
61         return desc;
62 }
63
64 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
65 {
66         struct crypto_hash_walk walk;
67         int nbytes;
68
69         for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
70              nbytes = crypto_hash_walk_done(&walk, nbytes))
71                 nbytes = crypto_shash_update(desc, walk.data, nbytes);
72
73         return nbytes;
74 }
75 EXPORT_SYMBOL_GPL(shash_ahash_update);
76
77 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
78 {
79         struct crypto_hash_walk walk;
80         int nbytes;
81
82         nbytes = crypto_hash_walk_first(req, &walk);
83         if (!nbytes)
84                 return crypto_shash_final(desc, req->result);
85
86         do {
87                 nbytes = crypto_hash_walk_last(&walk) ?
88                          crypto_shash_finup(desc, walk.data, nbytes,
89                                             req->result) :
90                          crypto_shash_update(desc, walk.data, nbytes);
91                 nbytes = crypto_hash_walk_done(&walk, nbytes);
92         } while (nbytes > 0);
93
94         return nbytes;
95 }
96 EXPORT_SYMBOL_GPL(shash_ahash_finup);
97
98 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
99 {
100         unsigned int nbytes = req->nbytes;
101         struct scatterlist *sg;
102         unsigned int offset;
103         int err;
104
105         if (nbytes &&
106             (sg = req->src, offset = sg->offset,
107              nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
108                 void *data;
109
110                 data = kmap_local_page(sg_page(sg));
111                 err = crypto_shash_digest(desc, data + offset, nbytes,
112                                           req->result);
113                 kunmap_local(data);
114         } else
115                 err = crypto_shash_init(desc) ?:
116                       shash_ahash_finup(req, desc);
117
118         return err;
119 }
120 EXPORT_SYMBOL_GPL(shash_ahash_digest);
121
122 static void crypto_exit_ahash_using_shash(struct crypto_tfm *tfm)
123 {
124         struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
125
126         crypto_free_shash(*ctx);
127 }
128
129 static int crypto_init_ahash_using_shash(struct crypto_tfm *tfm)
130 {
131         struct crypto_alg *calg = tfm->__crt_alg;
132         struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
133         struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
134         struct crypto_shash *shash;
135
136         if (!crypto_mod_get(calg))
137                 return -EAGAIN;
138
139         shash = crypto_create_tfm(calg, &crypto_shash_type);
140         if (IS_ERR(shash)) {
141                 crypto_mod_put(calg);
142                 return PTR_ERR(shash);
143         }
144
145         crt->using_shash = true;
146         *ctx = shash;
147         tfm->exit = crypto_exit_ahash_using_shash;
148
149         crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
150                                     CRYPTO_TFM_NEED_KEY);
151         crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
152
153         return 0;
154 }
155
156 static int hash_walk_next(struct crypto_hash_walk *walk)
157 {
158         unsigned int offset = walk->offset;
159         unsigned int nbytes = min(walk->entrylen,
160                                   ((unsigned int)(PAGE_SIZE)) - offset);
161
162         walk->data = kmap_local_page(walk->pg);
163         walk->data += offset;
164         walk->entrylen -= nbytes;
165         return nbytes;
166 }
167
168 static int hash_walk_new_entry(struct crypto_hash_walk *walk)
169 {
170         struct scatterlist *sg;
171
172         sg = walk->sg;
173         walk->offset = sg->offset;
174         walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
175         walk->offset = offset_in_page(walk->offset);
176         walk->entrylen = sg->length;
177
178         if (walk->entrylen > walk->total)
179                 walk->entrylen = walk->total;
180         walk->total -= walk->entrylen;
181
182         return hash_walk_next(walk);
183 }
184
185 int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
186 {
187         walk->data -= walk->offset;
188
189         kunmap_local(walk->data);
190         crypto_yield(walk->flags);
191
192         if (err)
193                 return err;
194
195         if (walk->entrylen) {
196                 walk->offset = 0;
197                 walk->pg++;
198                 return hash_walk_next(walk);
199         }
200
201         if (!walk->total)
202                 return 0;
203
204         walk->sg = sg_next(walk->sg);
205
206         return hash_walk_new_entry(walk);
207 }
208 EXPORT_SYMBOL_GPL(crypto_hash_walk_done);
209
210 int crypto_hash_walk_first(struct ahash_request *req,
211                            struct crypto_hash_walk *walk)
212 {
213         walk->total = req->nbytes;
214
215         if (!walk->total) {
216                 walk->entrylen = 0;
217                 return 0;
218         }
219
220         walk->sg = req->src;
221         walk->flags = req->base.flags;
222
223         return hash_walk_new_entry(walk);
224 }
225 EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
226
227 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
228                           unsigned int keylen)
229 {
230         return -ENOSYS;
231 }
232
233 static void ahash_set_needkey(struct crypto_ahash *tfm, struct ahash_alg *alg)
234 {
235         if (alg->setkey != ahash_nosetkey &&
236             !(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
237                 crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
238 }
239
240 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
241                         unsigned int keylen)
242 {
243         if (likely(tfm->using_shash)) {
244                 struct crypto_shash *shash = ahash_to_shash(tfm);
245                 int err;
246
247                 err = crypto_shash_setkey(shash, key, keylen);
248                 if (unlikely(err)) {
249                         crypto_ahash_set_flags(tfm,
250                                                crypto_shash_get_flags(shash) &
251                                                CRYPTO_TFM_NEED_KEY);
252                         return err;
253                 }
254         } else {
255                 struct ahash_alg *alg = crypto_ahash_alg(tfm);
256                 int err;
257
258                 err = alg->setkey(tfm, key, keylen);
259                 if (unlikely(err)) {
260                         ahash_set_needkey(tfm, alg);
261                         return err;
262                 }
263         }
264         crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
265         return 0;
266 }
267 EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
268
269 int crypto_ahash_init(struct ahash_request *req)
270 {
271         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
272
273         if (likely(tfm->using_shash))
274                 return crypto_shash_init(prepare_shash_desc(req, tfm));
275         if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
276                 return -ENOKEY;
277         return crypto_ahash_alg(tfm)->init(req);
278 }
279 EXPORT_SYMBOL_GPL(crypto_ahash_init);
280
281 static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt,
282                           bool has_state)
283 {
284         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
285         unsigned int ds = crypto_ahash_digestsize(tfm);
286         struct ahash_request *subreq;
287         unsigned int subreq_size;
288         unsigned int reqsize;
289         u8 *result;
290         gfp_t gfp;
291         u32 flags;
292
293         subreq_size = sizeof(*subreq);
294         reqsize = crypto_ahash_reqsize(tfm);
295         reqsize = ALIGN(reqsize, crypto_tfm_ctx_alignment());
296         subreq_size += reqsize;
297         subreq_size += ds;
298
299         flags = ahash_request_flags(req);
300         gfp = (flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?  GFP_KERNEL : GFP_ATOMIC;
301         subreq = kmalloc(subreq_size, gfp);
302         if (!subreq)
303                 return -ENOMEM;
304
305         ahash_request_set_tfm(subreq, tfm);
306         ahash_request_set_callback(subreq, flags, cplt, req);
307
308         result = (u8 *)(subreq + 1) + reqsize;
309
310         ahash_request_set_crypt(subreq, req->src, result, req->nbytes);
311
312         if (has_state) {
313                 void *state;
314
315                 state = kmalloc(crypto_ahash_statesize(tfm), gfp);
316                 if (!state) {
317                         kfree(subreq);
318                         return -ENOMEM;
319                 }
320
321                 crypto_ahash_export(req, state);
322                 crypto_ahash_import(subreq, state);
323                 kfree_sensitive(state);
324         }
325
326         req->priv = subreq;
327
328         return 0;
329 }
330
331 static void ahash_restore_req(struct ahash_request *req, int err)
332 {
333         struct ahash_request *subreq = req->priv;
334
335         if (!err)
336                 memcpy(req->result, subreq->result,
337                        crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
338
339         req->priv = NULL;
340
341         kfree_sensitive(subreq);
342 }
343
344 int crypto_ahash_update(struct ahash_request *req)
345 {
346         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
347         struct ahash_alg *alg;
348
349         if (likely(tfm->using_shash))
350                 return shash_ahash_update(req, ahash_request_ctx(req));
351
352         alg = crypto_ahash_alg(tfm);
353         if (IS_ENABLED(CONFIG_CRYPTO_STATS))
354                 atomic64_add(req->nbytes, &ahash_get_stat(alg)->hash_tlen);
355         return crypto_ahash_errstat(alg, alg->update(req));
356 }
357 EXPORT_SYMBOL_GPL(crypto_ahash_update);
358
359 int crypto_ahash_final(struct ahash_request *req)
360 {
361         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
362         struct ahash_alg *alg;
363
364         if (likely(tfm->using_shash))
365                 return crypto_shash_final(ahash_request_ctx(req), req->result);
366
367         alg = crypto_ahash_alg(tfm);
368         if (IS_ENABLED(CONFIG_CRYPTO_STATS))
369                 atomic64_inc(&ahash_get_stat(alg)->hash_cnt);
370         return crypto_ahash_errstat(alg, alg->final(req));
371 }
372 EXPORT_SYMBOL_GPL(crypto_ahash_final);
373
374 int crypto_ahash_finup(struct ahash_request *req)
375 {
376         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
377         struct ahash_alg *alg;
378
379         if (likely(tfm->using_shash))
380                 return shash_ahash_finup(req, ahash_request_ctx(req));
381
382         alg = crypto_ahash_alg(tfm);
383         if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
384                 struct crypto_istat_hash *istat = ahash_get_stat(alg);
385
386                 atomic64_inc(&istat->hash_cnt);
387                 atomic64_add(req->nbytes, &istat->hash_tlen);
388         }
389         return crypto_ahash_errstat(alg, alg->finup(req));
390 }
391 EXPORT_SYMBOL_GPL(crypto_ahash_finup);
392
393 int crypto_ahash_digest(struct ahash_request *req)
394 {
395         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
396         struct ahash_alg *alg;
397         int err;
398
399         if (likely(tfm->using_shash))
400                 return shash_ahash_digest(req, prepare_shash_desc(req, tfm));
401
402         alg = crypto_ahash_alg(tfm);
403         if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
404                 struct crypto_istat_hash *istat = ahash_get_stat(alg);
405
406                 atomic64_inc(&istat->hash_cnt);
407                 atomic64_add(req->nbytes, &istat->hash_tlen);
408         }
409
410         if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
411                 err = -ENOKEY;
412         else
413                 err = alg->digest(req);
414
415         return crypto_ahash_errstat(alg, err);
416 }
417 EXPORT_SYMBOL_GPL(crypto_ahash_digest);
418
419 static void ahash_def_finup_done2(void *data, int err)
420 {
421         struct ahash_request *areq = data;
422
423         if (err == -EINPROGRESS)
424                 return;
425
426         ahash_restore_req(areq, err);
427
428         ahash_request_complete(areq, err);
429 }
430
431 static int ahash_def_finup_finish1(struct ahash_request *req, int err)
432 {
433         struct ahash_request *subreq = req->priv;
434
435         if (err)
436                 goto out;
437
438         subreq->base.complete = ahash_def_finup_done2;
439
440         err = crypto_ahash_alg(crypto_ahash_reqtfm(req))->final(subreq);
441         if (err == -EINPROGRESS || err == -EBUSY)
442                 return err;
443
444 out:
445         ahash_restore_req(req, err);
446         return err;
447 }
448
449 static void ahash_def_finup_done1(void *data, int err)
450 {
451         struct ahash_request *areq = data;
452         struct ahash_request *subreq;
453
454         if (err == -EINPROGRESS)
455                 goto out;
456
457         subreq = areq->priv;
458         subreq->base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG;
459
460         err = ahash_def_finup_finish1(areq, err);
461         if (err == -EINPROGRESS || err == -EBUSY)
462                 return;
463
464 out:
465         ahash_request_complete(areq, err);
466 }
467
468 static int ahash_def_finup(struct ahash_request *req)
469 {
470         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
471         int err;
472
473         err = ahash_save_req(req, ahash_def_finup_done1, true);
474         if (err)
475                 return err;
476
477         err = crypto_ahash_alg(tfm)->update(req->priv);
478         if (err == -EINPROGRESS || err == -EBUSY)
479                 return err;
480
481         return ahash_def_finup_finish1(req, err);
482 }
483
484 int crypto_ahash_export(struct ahash_request *req, void *out)
485 {
486         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
487
488         if (likely(tfm->using_shash))
489                 return crypto_shash_export(ahash_request_ctx(req), out);
490         return crypto_ahash_alg(tfm)->export(req, out);
491 }
492 EXPORT_SYMBOL_GPL(crypto_ahash_export);
493
494 int crypto_ahash_import(struct ahash_request *req, const void *in)
495 {
496         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
497
498         if (likely(tfm->using_shash))
499                 return crypto_shash_import(prepare_shash_desc(req, tfm), in);
500         if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
501                 return -ENOKEY;
502         return crypto_ahash_alg(tfm)->import(req, in);
503 }
504 EXPORT_SYMBOL_GPL(crypto_ahash_import);
505
506 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
507 {
508         struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
509         struct ahash_alg *alg = crypto_ahash_alg(hash);
510
511         alg->exit_tfm(hash);
512 }
513
514 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
515 {
516         struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
517         struct ahash_alg *alg = crypto_ahash_alg(hash);
518
519         crypto_ahash_set_statesize(hash, alg->halg.statesize);
520
521         if (tfm->__crt_alg->cra_type == &crypto_shash_type)
522                 return crypto_init_ahash_using_shash(tfm);
523
524         ahash_set_needkey(hash, alg);
525
526         if (alg->exit_tfm)
527                 tfm->exit = crypto_ahash_exit_tfm;
528
529         return alg->init_tfm ? alg->init_tfm(hash) : 0;
530 }
531
532 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
533 {
534         if (alg->cra_type == &crypto_shash_type)
535                 return sizeof(struct crypto_shash *);
536
537         return crypto_alg_extsize(alg);
538 }
539
540 static void crypto_ahash_free_instance(struct crypto_instance *inst)
541 {
542         struct ahash_instance *ahash = ahash_instance(inst);
543
544         ahash->free(ahash);
545 }
546
547 static int __maybe_unused crypto_ahash_report(
548         struct sk_buff *skb, struct crypto_alg *alg)
549 {
550         struct crypto_report_hash rhash;
551
552         memset(&rhash, 0, sizeof(rhash));
553
554         strscpy(rhash.type, "ahash", sizeof(rhash.type));
555
556         rhash.blocksize = alg->cra_blocksize;
557         rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
558
559         return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
560 }
561
562 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
563         __maybe_unused;
564 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
565 {
566         seq_printf(m, "type         : ahash\n");
567         seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
568                                              "yes" : "no");
569         seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
570         seq_printf(m, "digestsize   : %u\n",
571                    __crypto_hash_alg_common(alg)->digestsize);
572 }
573
574 static int __maybe_unused crypto_ahash_report_stat(
575         struct sk_buff *skb, struct crypto_alg *alg)
576 {
577         return crypto_hash_report_stat(skb, alg, "ahash");
578 }
579
580 static const struct crypto_type crypto_ahash_type = {
581         .extsize = crypto_ahash_extsize,
582         .init_tfm = crypto_ahash_init_tfm,
583         .free = crypto_ahash_free_instance,
584 #ifdef CONFIG_PROC_FS
585         .show = crypto_ahash_show,
586 #endif
587 #if IS_ENABLED(CONFIG_CRYPTO_USER)
588         .report = crypto_ahash_report,
589 #endif
590 #ifdef CONFIG_CRYPTO_STATS
591         .report_stat = crypto_ahash_report_stat,
592 #endif
593         .maskclear = ~CRYPTO_ALG_TYPE_MASK,
594         .maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
595         .type = CRYPTO_ALG_TYPE_AHASH,
596         .tfmsize = offsetof(struct crypto_ahash, base),
597 };
598
599 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
600                       struct crypto_instance *inst,
601                       const char *name, u32 type, u32 mask)
602 {
603         spawn->base.frontend = &crypto_ahash_type;
604         return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
605 }
606 EXPORT_SYMBOL_GPL(crypto_grab_ahash);
607
608 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
609                                         u32 mask)
610 {
611         return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
612 }
613 EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
614
615 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
616 {
617         return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
618 }
619 EXPORT_SYMBOL_GPL(crypto_has_ahash);
620
621 static bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
622 {
623         struct crypto_alg *alg = &halg->base;
624
625         if (alg->cra_type == &crypto_shash_type)
626                 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
627
628         return __crypto_ahash_alg(alg)->setkey != ahash_nosetkey;
629 }
630
631 struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash)
632 {
633         struct hash_alg_common *halg = crypto_hash_alg_common(hash);
634         struct crypto_tfm *tfm = crypto_ahash_tfm(hash);
635         struct crypto_ahash *nhash;
636         struct ahash_alg *alg;
637         int err;
638
639         if (!crypto_hash_alg_has_setkey(halg)) {
640                 tfm = crypto_tfm_get(tfm);
641                 if (IS_ERR(tfm))
642                         return ERR_CAST(tfm);
643
644                 return hash;
645         }
646
647         nhash = crypto_clone_tfm(&crypto_ahash_type, tfm);
648
649         if (IS_ERR(nhash))
650                 return nhash;
651
652         nhash->reqsize = hash->reqsize;
653         nhash->statesize = hash->statesize;
654
655         if (likely(hash->using_shash)) {
656                 struct crypto_shash **nctx = crypto_ahash_ctx(nhash);
657                 struct crypto_shash *shash;
658
659                 shash = crypto_clone_shash(ahash_to_shash(hash));
660                 if (IS_ERR(shash)) {
661                         err = PTR_ERR(shash);
662                         goto out_free_nhash;
663                 }
664                 nhash->using_shash = true;
665                 *nctx = shash;
666                 return nhash;
667         }
668
669         err = -ENOSYS;
670         alg = crypto_ahash_alg(hash);
671         if (!alg->clone_tfm)
672                 goto out_free_nhash;
673
674         err = alg->clone_tfm(nhash, hash);
675         if (err)
676                 goto out_free_nhash;
677
678         return nhash;
679
680 out_free_nhash:
681         crypto_free_ahash(nhash);
682         return ERR_PTR(err);
683 }
684 EXPORT_SYMBOL_GPL(crypto_clone_ahash);
685
686 static int ahash_prepare_alg(struct ahash_alg *alg)
687 {
688         struct crypto_alg *base = &alg->halg.base;
689         int err;
690
691         if (alg->halg.statesize == 0)
692                 return -EINVAL;
693
694         err = hash_prepare_alg(&alg->halg);
695         if (err)
696                 return err;
697
698         base->cra_type = &crypto_ahash_type;
699         base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
700
701         if (!alg->finup)
702                 alg->finup = ahash_def_finup;
703         if (!alg->setkey)
704                 alg->setkey = ahash_nosetkey;
705
706         return 0;
707 }
708
709 int crypto_register_ahash(struct ahash_alg *alg)
710 {
711         struct crypto_alg *base = &alg->halg.base;
712         int err;
713
714         err = ahash_prepare_alg(alg);
715         if (err)
716                 return err;
717
718         return crypto_register_alg(base);
719 }
720 EXPORT_SYMBOL_GPL(crypto_register_ahash);
721
722 void crypto_unregister_ahash(struct ahash_alg *alg)
723 {
724         crypto_unregister_alg(&alg->halg.base);
725 }
726 EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
727
728 int crypto_register_ahashes(struct ahash_alg *algs, int count)
729 {
730         int i, ret;
731
732         for (i = 0; i < count; i++) {
733                 ret = crypto_register_ahash(&algs[i]);
734                 if (ret)
735                         goto err;
736         }
737
738         return 0;
739
740 err:
741         for (--i; i >= 0; --i)
742                 crypto_unregister_ahash(&algs[i]);
743
744         return ret;
745 }
746 EXPORT_SYMBOL_GPL(crypto_register_ahashes);
747
748 void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
749 {
750         int i;
751
752         for (i = count - 1; i >= 0; --i)
753                 crypto_unregister_ahash(&algs[i]);
754 }
755 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
756
757 int ahash_register_instance(struct crypto_template *tmpl,
758                             struct ahash_instance *inst)
759 {
760         int err;
761
762         if (WARN_ON(!inst->free))
763                 return -EINVAL;
764
765         err = ahash_prepare_alg(&inst->alg);
766         if (err)
767                 return err;
768
769         return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
770 }
771 EXPORT_SYMBOL_GPL(ahash_register_instance);
772
773 MODULE_LICENSE("GPL");
774 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");