1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Glue code for the SHA256 Secure Hash Algorithm assembly implementation
4 * using optimized ARM assembler and NEON instructions.
5 *
6 * Copyright © 2015 Google Inc.
7 *
8 * This file is based on sha256_ssse3_glue.c:
9 * Copyright (C) 2013 Intel Corporation
10 * Author: Tim Chen <tim.c.chen@linux.intel.com>
11 */
12
13 #include <crypto/internal/hash.h>
14 #include <linux/crypto.h>
15 #include <linux/init.h>
16 #include <linux/module.h>
17 #include <linux/mm.h>
18 #include <linux/types.h>
19 #include <linux/string.h>
20 #include <crypto/sha2.h>
21 #include <crypto/sha256_base.h>
22 #include <asm/simd.h>
23 #include <asm/neon.h>
24
25 #include "sha256_glue.h"
26
27 asmlinkage void sha256_block_data_order(struct sha256_state *state,
28 const u8 *data, int num_blks);
29
crypto_sha256_arm_update(struct shash_desc * desc,const u8 * data,unsigned int len)30 int crypto_sha256_arm_update(struct shash_desc *desc, const u8 *data,
31 unsigned int len)
32 {
33 /* make sure casting to sha256_block_fn() is safe */
34 BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
35
36 return sha256_base_do_update(desc, data, len, sha256_block_data_order);
37 }
38 EXPORT_SYMBOL(crypto_sha256_arm_update);
39
crypto_sha256_arm_final(struct shash_desc * desc,u8 * out)40 static int crypto_sha256_arm_final(struct shash_desc *desc, u8 *out)
41 {
42 sha256_base_do_finalize(desc, sha256_block_data_order);
43 return sha256_base_finish(desc, out);
44 }
45
crypto_sha256_arm_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)46 int crypto_sha256_arm_finup(struct shash_desc *desc, const u8 *data,
47 unsigned int len, u8 *out)
48 {
49 sha256_base_do_update(desc, data, len, sha256_block_data_order);
50 return crypto_sha256_arm_final(desc, out);
51 }
52 EXPORT_SYMBOL(crypto_sha256_arm_finup);
53
54 static struct shash_alg algs[] = { {
55 .digestsize = SHA256_DIGEST_SIZE,
56 .init = sha256_base_init,
57 .update = crypto_sha256_arm_update,
58 .final = crypto_sha256_arm_final,
59 .finup = crypto_sha256_arm_finup,
60 .descsize = sizeof(struct sha256_state),
61 .base = {
62 .cra_name = "sha256",
63 .cra_driver_name = "sha256-asm",
64 .cra_priority = 150,
65 .cra_blocksize = SHA256_BLOCK_SIZE,
66 .cra_module = THIS_MODULE,
67 }
68 }, {
69 .digestsize = SHA224_DIGEST_SIZE,
70 .init = sha224_base_init,
71 .update = crypto_sha256_arm_update,
72 .final = crypto_sha256_arm_final,
73 .finup = crypto_sha256_arm_finup,
74 .descsize = sizeof(struct sha256_state),
75 .base = {
76 .cra_name = "sha224",
77 .cra_driver_name = "sha224-asm",
78 .cra_priority = 150,
79 .cra_blocksize = SHA224_BLOCK_SIZE,
80 .cra_module = THIS_MODULE,
81 }
82 } };
83
sha256_mod_init(void)84 static int __init sha256_mod_init(void)
85 {
86 int res = crypto_register_shashes(algs, ARRAY_SIZE(algs));
87
88 if (res < 0)
89 return res;
90
91 if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon()) {
92 res = crypto_register_shashes(sha256_neon_algs,
93 ARRAY_SIZE(sha256_neon_algs));
94
95 if (res < 0)
96 crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
97 }
98
99 return res;
100 }
101
sha256_mod_fini(void)102 static void __exit sha256_mod_fini(void)
103 {
104 crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
105
106 if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon())
107 crypto_unregister_shashes(sha256_neon_algs,
108 ARRAY_SIZE(sha256_neon_algs));
109 }
110
111 module_init(sha256_mod_init);
112 module_exit(sha256_mod_fini);
113
114 MODULE_LICENSE("GPL");
115 MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm (ARM), including NEON");
116
117 MODULE_ALIAS_CRYPTO("sha256");
118