| 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
| 2 | /* |
| 3 | * BLAKE2b digest algorithm, NEON accelerated |
| 4 | * |
| 5 | * Copyright 2020 Google LLC |
| 6 | */ |
| 7 | |
| 8 | #include <crypto/internal/blake2b.h> |
| 9 | #include <crypto/internal/hash.h> |
| 10 | |
| 11 | #include <linux/module.h> |
| 12 | #include <linux/sizes.h> |
| 13 | |
| 14 | #include <asm/neon.h> |
| 15 | #include <asm/simd.h> |
| 16 | |
| 17 | asmlinkage void blake2b_compress_neon(struct blake2b_state *state, |
| 18 | const u8 *block, size_t nblocks, u32 inc); |
| 19 | |
| 20 | static void blake2b_compress_arch(struct blake2b_state *state, |
| 21 | const u8 *block, size_t nblocks, u32 inc) |
| 22 | { |
| 23 | do { |
| 24 | const size_t blocks = min_t(size_t, nblocks, |
| 25 | SZ_4K / BLAKE2B_BLOCK_SIZE); |
| 26 | |
| 27 | kernel_neon_begin(); |
| 28 | blake2b_compress_neon(state, block, nblocks: blocks, inc); |
| 29 | kernel_neon_end(); |
| 30 | |
| 31 | nblocks -= blocks; |
| 32 | block += blocks * BLAKE2B_BLOCK_SIZE; |
| 33 | } while (nblocks); |
| 34 | } |
| 35 | |
| 36 | static int crypto_blake2b_update_neon(struct shash_desc *desc, |
| 37 | const u8 *in, unsigned int inlen) |
| 38 | { |
| 39 | return crypto_blake2b_update_bo(desc, in, inlen, compress: blake2b_compress_arch); |
| 40 | } |
| 41 | |
| 42 | static int crypto_blake2b_finup_neon(struct shash_desc *desc, const u8 *in, |
| 43 | unsigned int inlen, u8 *out) |
| 44 | { |
| 45 | return crypto_blake2b_finup(desc, in, inlen, out, |
| 46 | compress: blake2b_compress_arch); |
| 47 | } |
| 48 | |
| 49 | #define BLAKE2B_ALG(name, driver_name, digest_size) \ |
| 50 | { \ |
| 51 | .base.cra_name = name, \ |
| 52 | .base.cra_driver_name = driver_name, \ |
| 53 | .base.cra_priority = 200, \ |
| 54 | .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY | \ |
| 55 | CRYPTO_AHASH_ALG_BLOCK_ONLY | \ |
| 56 | CRYPTO_AHASH_ALG_FINAL_NONZERO, \ |
| 57 | .base.cra_blocksize = BLAKE2B_BLOCK_SIZE, \ |
| 58 | .base.cra_ctxsize = sizeof(struct blake2b_tfm_ctx), \ |
| 59 | .base.cra_module = THIS_MODULE, \ |
| 60 | .digestsize = digest_size, \ |
| 61 | .setkey = crypto_blake2b_setkey, \ |
| 62 | .init = crypto_blake2b_init, \ |
| 63 | .update = crypto_blake2b_update_neon, \ |
| 64 | .finup = crypto_blake2b_finup_neon, \ |
| 65 | .descsize = sizeof(struct blake2b_state), \ |
| 66 | .statesize = BLAKE2B_STATE_SIZE, \ |
| 67 | } |
| 68 | |
| 69 | static struct shash_alg blake2b_neon_algs[] = { |
| 70 | BLAKE2B_ALG("blake2b-160" , "blake2b-160-neon" , BLAKE2B_160_HASH_SIZE), |
| 71 | BLAKE2B_ALG("blake2b-256" , "blake2b-256-neon" , BLAKE2B_256_HASH_SIZE), |
| 72 | BLAKE2B_ALG("blake2b-384" , "blake2b-384-neon" , BLAKE2B_384_HASH_SIZE), |
| 73 | BLAKE2B_ALG("blake2b-512" , "blake2b-512-neon" , BLAKE2B_512_HASH_SIZE), |
| 74 | }; |
| 75 | |
| 76 | static int __init blake2b_neon_mod_init(void) |
| 77 | { |
| 78 | if (!(elf_hwcap & HWCAP_NEON)) |
| 79 | return -ENODEV; |
| 80 | |
| 81 | return crypto_register_shashes(algs: blake2b_neon_algs, |
| 82 | ARRAY_SIZE(blake2b_neon_algs)); |
| 83 | } |
| 84 | |
| 85 | static void __exit blake2b_neon_mod_exit(void) |
| 86 | { |
| 87 | crypto_unregister_shashes(algs: blake2b_neon_algs, |
| 88 | ARRAY_SIZE(blake2b_neon_algs)); |
| 89 | } |
| 90 | |
| 91 | module_init(blake2b_neon_mod_init); |
| 92 | module_exit(blake2b_neon_mod_exit); |
| 93 | |
| 94 | MODULE_DESCRIPTION("BLAKE2b digest algorithm, NEON accelerated" ); |
| 95 | MODULE_LICENSE("GPL" ); |
| 96 | MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>" ); |
| 97 | MODULE_ALIAS_CRYPTO("blake2b-160" ); |
| 98 | MODULE_ALIAS_CRYPTO("blake2b-160-neon" ); |
| 99 | MODULE_ALIAS_CRYPTO("blake2b-256" ); |
| 100 | MODULE_ALIAS_CRYPTO("blake2b-256-neon" ); |
| 101 | MODULE_ALIAS_CRYPTO("blake2b-384" ); |
| 102 | MODULE_ALIAS_CRYPTO("blake2b-384-neon" ); |
| 103 | MODULE_ALIAS_CRYPTO("blake2b-512" ); |
| 104 | MODULE_ALIAS_CRYPTO("blake2b-512-neon" ); |
| 105 | |