1 | // SPDX-License-Identifier: GPL-2.0-or-later |
2 | /* |
3 | * Synchronous Cryptographic Hash operations. |
4 | * |
5 | * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> |
6 | */ |
7 | |
8 | #include <crypto/scatterwalk.h> |
9 | #include <linux/cryptouser.h> |
10 | #include <linux/err.h> |
11 | #include <linux/kernel.h> |
12 | #include <linux/module.h> |
13 | #include <linux/seq_file.h> |
14 | #include <linux/string.h> |
15 | #include <net/netlink.h> |
16 | |
17 | #include "hash.h" |
18 | |
19 | static inline struct crypto_istat_hash *shash_get_stat(struct shash_alg *alg) |
20 | { |
21 | return hash_get_stat(alg: &alg->halg); |
22 | } |
23 | |
24 | static inline int crypto_shash_errstat(struct shash_alg *alg, int err) |
25 | { |
26 | if (IS_ENABLED(CONFIG_CRYPTO_STATS) && err) |
27 | atomic64_inc(v: &shash_get_stat(alg)->err_cnt); |
28 | return err; |
29 | } |
30 | |
31 | int shash_no_setkey(struct crypto_shash *tfm, const u8 *key, |
32 | unsigned int keylen) |
33 | { |
34 | return -ENOSYS; |
35 | } |
36 | EXPORT_SYMBOL_GPL(shash_no_setkey); |
37 | |
38 | static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg) |
39 | { |
40 | if (crypto_shash_alg_needs_key(alg)) |
41 | crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY); |
42 | } |
43 | |
44 | int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, |
45 | unsigned int keylen) |
46 | { |
47 | struct shash_alg *shash = crypto_shash_alg(tfm); |
48 | int err; |
49 | |
50 | err = shash->setkey(tfm, key, keylen); |
51 | if (unlikely(err)) { |
52 | shash_set_needkey(tfm, alg: shash); |
53 | return err; |
54 | } |
55 | |
56 | crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY); |
57 | return 0; |
58 | } |
59 | EXPORT_SYMBOL_GPL(crypto_shash_setkey); |
60 | |
61 | int crypto_shash_update(struct shash_desc *desc, const u8 *data, |
62 | unsigned int len) |
63 | { |
64 | struct shash_alg *shash = crypto_shash_alg(tfm: desc->tfm); |
65 | int err; |
66 | |
67 | if (IS_ENABLED(CONFIG_CRYPTO_STATS)) |
68 | atomic64_add(i: len, v: &shash_get_stat(alg: shash)->hash_tlen); |
69 | |
70 | err = shash->update(desc, data, len); |
71 | |
72 | return crypto_shash_errstat(alg: shash, err); |
73 | } |
74 | EXPORT_SYMBOL_GPL(crypto_shash_update); |
75 | |
76 | int crypto_shash_final(struct shash_desc *desc, u8 *out) |
77 | { |
78 | struct shash_alg *shash = crypto_shash_alg(tfm: desc->tfm); |
79 | int err; |
80 | |
81 | if (IS_ENABLED(CONFIG_CRYPTO_STATS)) |
82 | atomic64_inc(v: &shash_get_stat(alg: shash)->hash_cnt); |
83 | |
84 | err = shash->final(desc, out); |
85 | |
86 | return crypto_shash_errstat(alg: shash, err); |
87 | } |
88 | EXPORT_SYMBOL_GPL(crypto_shash_final); |
89 | |
90 | static int shash_default_finup(struct shash_desc *desc, const u8 *data, |
91 | unsigned int len, u8 *out) |
92 | { |
93 | struct shash_alg *shash = crypto_shash_alg(tfm: desc->tfm); |
94 | |
95 | return shash->update(desc, data, len) ?: |
96 | shash->final(desc, out); |
97 | } |
98 | |
99 | int crypto_shash_finup(struct shash_desc *desc, const u8 *data, |
100 | unsigned int len, u8 *out) |
101 | { |
102 | struct crypto_shash *tfm = desc->tfm; |
103 | struct shash_alg *shash = crypto_shash_alg(tfm); |
104 | int err; |
105 | |
106 | if (IS_ENABLED(CONFIG_CRYPTO_STATS)) { |
107 | struct crypto_istat_hash *istat = shash_get_stat(alg: shash); |
108 | |
109 | atomic64_inc(v: &istat->hash_cnt); |
110 | atomic64_add(i: len, v: &istat->hash_tlen); |
111 | } |
112 | |
113 | err = shash->finup(desc, data, len, out); |
114 | |
115 | return crypto_shash_errstat(alg: shash, err); |
116 | } |
117 | EXPORT_SYMBOL_GPL(crypto_shash_finup); |
118 | |
119 | static int shash_default_digest(struct shash_desc *desc, const u8 *data, |
120 | unsigned int len, u8 *out) |
121 | { |
122 | struct shash_alg *shash = crypto_shash_alg(tfm: desc->tfm); |
123 | |
124 | return shash->init(desc) ?: |
125 | shash->finup(desc, data, len, out); |
126 | } |
127 | |
128 | int crypto_shash_digest(struct shash_desc *desc, const u8 *data, |
129 | unsigned int len, u8 *out) |
130 | { |
131 | struct crypto_shash *tfm = desc->tfm; |
132 | struct shash_alg *shash = crypto_shash_alg(tfm); |
133 | int err; |
134 | |
135 | if (IS_ENABLED(CONFIG_CRYPTO_STATS)) { |
136 | struct crypto_istat_hash *istat = shash_get_stat(alg: shash); |
137 | |
138 | atomic64_inc(v: &istat->hash_cnt); |
139 | atomic64_add(i: len, v: &istat->hash_tlen); |
140 | } |
141 | |
142 | if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) |
143 | err = -ENOKEY; |
144 | else |
145 | err = shash->digest(desc, data, len, out); |
146 | |
147 | return crypto_shash_errstat(alg: shash, err); |
148 | } |
149 | EXPORT_SYMBOL_GPL(crypto_shash_digest); |
150 | |
151 | int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data, |
152 | unsigned int len, u8 *out) |
153 | { |
154 | SHASH_DESC_ON_STACK(desc, tfm); |
155 | int err; |
156 | |
157 | desc->tfm = tfm; |
158 | |
159 | err = crypto_shash_digest(desc, data, len, out); |
160 | |
161 | shash_desc_zero(desc); |
162 | |
163 | return err; |
164 | } |
165 | EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest); |
166 | |
167 | int crypto_shash_export(struct shash_desc *desc, void *out) |
168 | { |
169 | struct crypto_shash *tfm = desc->tfm; |
170 | struct shash_alg *shash = crypto_shash_alg(tfm); |
171 | |
172 | if (shash->export) |
173 | return shash->export(desc, out); |
174 | |
175 | memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(tfm)); |
176 | return 0; |
177 | } |
178 | EXPORT_SYMBOL_GPL(crypto_shash_export); |
179 | |
180 | int crypto_shash_import(struct shash_desc *desc, const void *in) |
181 | { |
182 | struct crypto_shash *tfm = desc->tfm; |
183 | struct shash_alg *shash = crypto_shash_alg(tfm); |
184 | |
185 | if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) |
186 | return -ENOKEY; |
187 | |
188 | if (shash->import) |
189 | return shash->import(desc, in); |
190 | |
191 | memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm)); |
192 | return 0; |
193 | } |
194 | EXPORT_SYMBOL_GPL(crypto_shash_import); |
195 | |
196 | static void crypto_shash_exit_tfm(struct crypto_tfm *tfm) |
197 | { |
198 | struct crypto_shash *hash = __crypto_shash_cast(tfm); |
199 | struct shash_alg *alg = crypto_shash_alg(tfm: hash); |
200 | |
201 | alg->exit_tfm(hash); |
202 | } |
203 | |
204 | static int crypto_shash_init_tfm(struct crypto_tfm *tfm) |
205 | { |
206 | struct crypto_shash *hash = __crypto_shash_cast(tfm); |
207 | struct shash_alg *alg = crypto_shash_alg(tfm: hash); |
208 | int err; |
209 | |
210 | hash->descsize = alg->descsize; |
211 | |
212 | shash_set_needkey(tfm: hash, alg); |
213 | |
214 | if (alg->exit_tfm) |
215 | tfm->exit = crypto_shash_exit_tfm; |
216 | |
217 | if (!alg->init_tfm) |
218 | return 0; |
219 | |
220 | err = alg->init_tfm(hash); |
221 | if (err) |
222 | return err; |
223 | |
224 | /* ->init_tfm() may have increased the descsize. */ |
225 | if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) { |
226 | if (alg->exit_tfm) |
227 | alg->exit_tfm(hash); |
228 | return -EINVAL; |
229 | } |
230 | |
231 | return 0; |
232 | } |
233 | |
234 | static void crypto_shash_free_instance(struct crypto_instance *inst) |
235 | { |
236 | struct shash_instance *shash = shash_instance(inst); |
237 | |
238 | shash->free(shash); |
239 | } |
240 | |
241 | static int __maybe_unused crypto_shash_report( |
242 | struct sk_buff *skb, struct crypto_alg *alg) |
243 | { |
244 | struct crypto_report_hash rhash; |
245 | struct shash_alg *salg = __crypto_shash_alg(alg); |
246 | |
247 | memset(&rhash, 0, sizeof(rhash)); |
248 | |
249 | strscpy(rhash.type, "shash" , sizeof(rhash.type)); |
250 | |
251 | rhash.blocksize = alg->cra_blocksize; |
252 | rhash.digestsize = salg->digestsize; |
253 | |
254 | return nla_put(skb, attrtype: CRYPTOCFGA_REPORT_HASH, attrlen: sizeof(rhash), data: &rhash); |
255 | } |
256 | |
257 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) |
258 | __maybe_unused; |
259 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) |
260 | { |
261 | struct shash_alg *salg = __crypto_shash_alg(alg); |
262 | |
263 | seq_printf(m, fmt: "type : shash\n" ); |
264 | seq_printf(m, fmt: "blocksize : %u\n" , alg->cra_blocksize); |
265 | seq_printf(m, fmt: "digestsize : %u\n" , salg->digestsize); |
266 | } |
267 | |
268 | static int __maybe_unused crypto_shash_report_stat( |
269 | struct sk_buff *skb, struct crypto_alg *alg) |
270 | { |
271 | return crypto_hash_report_stat(skb, alg, type: "shash" ); |
272 | } |
273 | |
274 | const struct crypto_type crypto_shash_type = { |
275 | .extsize = crypto_alg_extsize, |
276 | .init_tfm = crypto_shash_init_tfm, |
277 | .free = crypto_shash_free_instance, |
278 | #ifdef CONFIG_PROC_FS |
279 | .show = crypto_shash_show, |
280 | #endif |
281 | #if IS_ENABLED(CONFIG_CRYPTO_USER) |
282 | .report = crypto_shash_report, |
283 | #endif |
284 | #ifdef CONFIG_CRYPTO_STATS |
285 | .report_stat = crypto_shash_report_stat, |
286 | #endif |
287 | .maskclear = ~CRYPTO_ALG_TYPE_MASK, |
288 | .maskset = CRYPTO_ALG_TYPE_MASK, |
289 | .type = CRYPTO_ALG_TYPE_SHASH, |
290 | .tfmsize = offsetof(struct crypto_shash, base), |
291 | }; |
292 | |
293 | int crypto_grab_shash(struct crypto_shash_spawn *spawn, |
294 | struct crypto_instance *inst, |
295 | const char *name, u32 type, u32 mask) |
296 | { |
297 | spawn->base.frontend = &crypto_shash_type; |
298 | return crypto_grab_spawn(spawn: &spawn->base, inst, name, type, mask); |
299 | } |
300 | EXPORT_SYMBOL_GPL(crypto_grab_shash); |
301 | |
302 | struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, |
303 | u32 mask) |
304 | { |
305 | return crypto_alloc_tfm(alg_name, frontend: &crypto_shash_type, type, mask); |
306 | } |
307 | EXPORT_SYMBOL_GPL(crypto_alloc_shash); |
308 | |
309 | int crypto_has_shash(const char *alg_name, u32 type, u32 mask) |
310 | { |
311 | return crypto_type_has_alg(name: alg_name, frontend: &crypto_shash_type, type, mask); |
312 | } |
313 | EXPORT_SYMBOL_GPL(crypto_has_shash); |
314 | |
315 | struct crypto_shash *crypto_clone_shash(struct crypto_shash *hash) |
316 | { |
317 | struct crypto_tfm *tfm = crypto_shash_tfm(tfm: hash); |
318 | struct shash_alg *alg = crypto_shash_alg(tfm: hash); |
319 | struct crypto_shash *nhash; |
320 | int err; |
321 | |
322 | if (!crypto_shash_alg_has_setkey(alg)) { |
323 | tfm = crypto_tfm_get(tfm); |
324 | if (IS_ERR(ptr: tfm)) |
325 | return ERR_CAST(ptr: tfm); |
326 | |
327 | return hash; |
328 | } |
329 | |
330 | if (!alg->clone_tfm && (alg->init_tfm || alg->base.cra_init)) |
331 | return ERR_PTR(error: -ENOSYS); |
332 | |
333 | nhash = crypto_clone_tfm(frontend: &crypto_shash_type, otfm: tfm); |
334 | if (IS_ERR(ptr: nhash)) |
335 | return nhash; |
336 | |
337 | nhash->descsize = hash->descsize; |
338 | |
339 | if (alg->clone_tfm) { |
340 | err = alg->clone_tfm(nhash, hash); |
341 | if (err) { |
342 | crypto_free_shash(tfm: nhash); |
343 | return ERR_PTR(error: err); |
344 | } |
345 | } |
346 | |
347 | return nhash; |
348 | } |
349 | EXPORT_SYMBOL_GPL(crypto_clone_shash); |
350 | |
351 | int hash_prepare_alg(struct hash_alg_common *alg) |
352 | { |
353 | struct crypto_istat_hash *istat = hash_get_stat(alg); |
354 | struct crypto_alg *base = &alg->base; |
355 | |
356 | if (alg->digestsize > HASH_MAX_DIGESTSIZE) |
357 | return -EINVAL; |
358 | |
359 | /* alignmask is not useful for hashes, so it is not supported. */ |
360 | if (base->cra_alignmask) |
361 | return -EINVAL; |
362 | |
363 | base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; |
364 | |
365 | if (IS_ENABLED(CONFIG_CRYPTO_STATS)) |
366 | memset(istat, 0, sizeof(*istat)); |
367 | |
368 | return 0; |
369 | } |
370 | |
371 | static int shash_prepare_alg(struct shash_alg *alg) |
372 | { |
373 | struct crypto_alg *base = &alg->halg.base; |
374 | int err; |
375 | |
376 | if (alg->descsize > HASH_MAX_DESCSIZE) |
377 | return -EINVAL; |
378 | |
379 | if ((alg->export && !alg->import) || (alg->import && !alg->export)) |
380 | return -EINVAL; |
381 | |
382 | err = hash_prepare_alg(alg: &alg->halg); |
383 | if (err) |
384 | return err; |
385 | |
386 | base->cra_type = &crypto_shash_type; |
387 | base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; |
388 | |
389 | /* |
390 | * Handle missing optional functions. For each one we can either |
391 | * install a default here, or we can leave the pointer as NULL and check |
392 | * the pointer for NULL in crypto_shash_*(), avoiding an indirect call |
393 | * when the default behavior is desired. For ->finup and ->digest we |
394 | * install defaults, since for optimal performance algorithms should |
395 | * implement these anyway. On the other hand, for ->import and |
396 | * ->export the common case and best performance comes from the simple |
397 | * memcpy of the shash_desc_ctx, so when those pointers are NULL we |
398 | * leave them NULL and provide the memcpy with no indirect call. |
399 | */ |
400 | if (!alg->finup) |
401 | alg->finup = shash_default_finup; |
402 | if (!alg->digest) |
403 | alg->digest = shash_default_digest; |
404 | if (!alg->export) |
405 | alg->halg.statesize = alg->descsize; |
406 | if (!alg->setkey) |
407 | alg->setkey = shash_no_setkey; |
408 | |
409 | return 0; |
410 | } |
411 | |
412 | int crypto_register_shash(struct shash_alg *alg) |
413 | { |
414 | struct crypto_alg *base = &alg->base; |
415 | int err; |
416 | |
417 | err = shash_prepare_alg(alg); |
418 | if (err) |
419 | return err; |
420 | |
421 | return crypto_register_alg(alg: base); |
422 | } |
423 | EXPORT_SYMBOL_GPL(crypto_register_shash); |
424 | |
425 | void crypto_unregister_shash(struct shash_alg *alg) |
426 | { |
427 | crypto_unregister_alg(alg: &alg->base); |
428 | } |
429 | EXPORT_SYMBOL_GPL(crypto_unregister_shash); |
430 | |
431 | int crypto_register_shashes(struct shash_alg *algs, int count) |
432 | { |
433 | int i, ret; |
434 | |
435 | for (i = 0; i < count; i++) { |
436 | ret = crypto_register_shash(&algs[i]); |
437 | if (ret) |
438 | goto err; |
439 | } |
440 | |
441 | return 0; |
442 | |
443 | err: |
444 | for (--i; i >= 0; --i) |
445 | crypto_unregister_shash(&algs[i]); |
446 | |
447 | return ret; |
448 | } |
449 | EXPORT_SYMBOL_GPL(crypto_register_shashes); |
450 | |
451 | void crypto_unregister_shashes(struct shash_alg *algs, int count) |
452 | { |
453 | int i; |
454 | |
455 | for (i = count - 1; i >= 0; --i) |
456 | crypto_unregister_shash(&algs[i]); |
457 | } |
458 | EXPORT_SYMBOL_GPL(crypto_unregister_shashes); |
459 | |
460 | int shash_register_instance(struct crypto_template *tmpl, |
461 | struct shash_instance *inst) |
462 | { |
463 | int err; |
464 | |
465 | if (WARN_ON(!inst->free)) |
466 | return -EINVAL; |
467 | |
468 | err = shash_prepare_alg(alg: &inst->alg); |
469 | if (err) |
470 | return err; |
471 | |
472 | return crypto_register_instance(tmpl, inst: shash_crypto_instance(inst)); |
473 | } |
474 | EXPORT_SYMBOL_GPL(shash_register_instance); |
475 | |
476 | void shash_free_singlespawn_instance(struct shash_instance *inst) |
477 | { |
478 | crypto_drop_spawn(spawn: shash_instance_ctx(inst)); |
479 | kfree(objp: inst); |
480 | } |
481 | EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance); |
482 | |
483 | MODULE_LICENSE("GPL" ); |
484 | MODULE_DESCRIPTION("Synchronous cryptographic hash type" ); |
485 | |