xref: /linux/arch/arm/crypto/blake2b-neon-glue.c (revision 8be98d2f2a0a262f8bf8a0bc1fdf522b3c7aab17)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * BLAKE2b digest algorithm, NEON accelerated
4  *
5  * Copyright 2020 Google LLC
6  */
7 
8 #include <crypto/internal/blake2b.h>
9 #include <crypto/internal/hash.h>
10 #include <crypto/internal/simd.h>
11 
12 #include <linux/module.h>
13 #include <linux/sizes.h>
14 
15 #include <asm/neon.h>
16 #include <asm/simd.h>
17 
18 asmlinkage void blake2b_compress_neon(struct blake2b_state *state,
19 				      const u8 *block, size_t nblocks, u32 inc);
20 
blake2b_compress_arch(struct blake2b_state * state,const u8 * block,size_t nblocks,u32 inc)21 static void blake2b_compress_arch(struct blake2b_state *state,
22 				  const u8 *block, size_t nblocks, u32 inc)
23 {
24 	if (!crypto_simd_usable()) {
25 		blake2b_compress_generic(state, block, nblocks, inc);
26 		return;
27 	}
28 
29 	do {
30 		const size_t blocks = min_t(size_t, nblocks,
31 					    SZ_4K / BLAKE2B_BLOCK_SIZE);
32 
33 		kernel_neon_begin();
34 		blake2b_compress_neon(state, block, blocks, inc);
35 		kernel_neon_end();
36 
37 		nblocks -= blocks;
38 		block += blocks * BLAKE2B_BLOCK_SIZE;
39 	} while (nblocks);
40 }
41 
crypto_blake2b_update_neon(struct shash_desc * desc,const u8 * in,unsigned int inlen)42 static int crypto_blake2b_update_neon(struct shash_desc *desc,
43 				      const u8 *in, unsigned int inlen)
44 {
45 	return crypto_blake2b_update(desc, in, inlen, blake2b_compress_arch);
46 }
47 
crypto_blake2b_final_neon(struct shash_desc * desc,u8 * out)48 static int crypto_blake2b_final_neon(struct shash_desc *desc, u8 *out)
49 {
50 	return crypto_blake2b_final(desc, out, blake2b_compress_arch);
51 }
52 
53 #define BLAKE2B_ALG(name, driver_name, digest_size)			\
54 	{								\
55 		.base.cra_name		= name,				\
56 		.base.cra_driver_name	= driver_name,			\
57 		.base.cra_priority	= 200,				\
58 		.base.cra_flags		= CRYPTO_ALG_OPTIONAL_KEY,	\
59 		.base.cra_blocksize	= BLAKE2B_BLOCK_SIZE,		\
60 		.base.cra_ctxsize	= sizeof(struct blake2b_tfm_ctx), \
61 		.base.cra_module	= THIS_MODULE,			\
62 		.digestsize		= digest_size,			\
63 		.setkey			= crypto_blake2b_setkey,	\
64 		.init			= crypto_blake2b_init,		\
65 		.update			= crypto_blake2b_update_neon,	\
66 		.final			= crypto_blake2b_final_neon,	\
67 		.descsize		= sizeof(struct blake2b_state),	\
68 	}
69 
70 static struct shash_alg blake2b_neon_algs[] = {
71 	BLAKE2B_ALG("blake2b-160", "blake2b-160-neon", BLAKE2B_160_HASH_SIZE),
72 	BLAKE2B_ALG("blake2b-256", "blake2b-256-neon", BLAKE2B_256_HASH_SIZE),
73 	BLAKE2B_ALG("blake2b-384", "blake2b-384-neon", BLAKE2B_384_HASH_SIZE),
74 	BLAKE2B_ALG("blake2b-512", "blake2b-512-neon", BLAKE2B_512_HASH_SIZE),
75 };
76 
blake2b_neon_mod_init(void)77 static int __init blake2b_neon_mod_init(void)
78 {
79 	if (!(elf_hwcap & HWCAP_NEON))
80 		return -ENODEV;
81 
82 	return crypto_register_shashes(blake2b_neon_algs,
83 				       ARRAY_SIZE(blake2b_neon_algs));
84 }
85 
blake2b_neon_mod_exit(void)86 static void __exit blake2b_neon_mod_exit(void)
87 {
88 	crypto_unregister_shashes(blake2b_neon_algs,
89 				  ARRAY_SIZE(blake2b_neon_algs));
90 }
91 
92 module_init(blake2b_neon_mod_init);
93 module_exit(blake2b_neon_mod_exit);
94 
95 MODULE_DESCRIPTION("BLAKE2b digest algorithm, NEON accelerated");
96 MODULE_LICENSE("GPL");
97 MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>");
98 MODULE_ALIAS_CRYPTO("blake2b-160");
99 MODULE_ALIAS_CRYPTO("blake2b-160-neon");
100 MODULE_ALIAS_CRYPTO("blake2b-256");
101 MODULE_ALIAS_CRYPTO("blake2b-256-neon");
102 MODULE_ALIAS_CRYPTO("blake2b-384");
103 MODULE_ALIAS_CRYPTO("blake2b-384-neon");
104 MODULE_ALIAS_CRYPTO("blake2b-512");
105 MODULE_ALIAS_CRYPTO("blake2b-512-neon");
106