xref: /linux/arch/riscv/crypto/sm4-riscv64-glue.c (revision 79790b6818e96c58fe2bffee1b418c16e64e7b80)
1*b8d06352SJerry Shih // SPDX-License-Identifier: GPL-2.0-only
2*b8d06352SJerry Shih /*
3*b8d06352SJerry Shih  * SM4 using the RISC-V vector crypto extensions
4*b8d06352SJerry Shih  *
5*b8d06352SJerry Shih  * Copyright (C) 2023 VRULL GmbH
6*b8d06352SJerry Shih  * Author: Heiko Stuebner <heiko.stuebner@vrull.eu>
7*b8d06352SJerry Shih  *
8*b8d06352SJerry Shih  * Copyright (C) 2023 SiFive, Inc.
9*b8d06352SJerry Shih  * Author: Jerry Shih <jerry.shih@sifive.com>
10*b8d06352SJerry Shih  */
11*b8d06352SJerry Shih 
12*b8d06352SJerry Shih #include <asm/simd.h>
13*b8d06352SJerry Shih #include <asm/vector.h>
14*b8d06352SJerry Shih #include <crypto/internal/cipher.h>
15*b8d06352SJerry Shih #include <crypto/internal/simd.h>
16*b8d06352SJerry Shih #include <crypto/sm4.h>
17*b8d06352SJerry Shih #include <linux/linkage.h>
18*b8d06352SJerry Shih #include <linux/module.h>
19*b8d06352SJerry Shih 
20*b8d06352SJerry Shih asmlinkage void sm4_expandkey_zvksed_zvkb(const u8 user_key[SM4_KEY_SIZE],
21*b8d06352SJerry Shih 					  u32 rkey_enc[SM4_RKEY_WORDS],
22*b8d06352SJerry Shih 					  u32 rkey_dec[SM4_RKEY_WORDS]);
23*b8d06352SJerry Shih asmlinkage void sm4_crypt_zvksed_zvkb(const u32 rkey[SM4_RKEY_WORDS],
24*b8d06352SJerry Shih 				      const u8 in[SM4_BLOCK_SIZE],
25*b8d06352SJerry Shih 				      u8 out[SM4_BLOCK_SIZE]);
26*b8d06352SJerry Shih 
riscv64_sm4_setkey(struct crypto_tfm * tfm,const u8 * key,unsigned int keylen)27*b8d06352SJerry Shih static int riscv64_sm4_setkey(struct crypto_tfm *tfm, const u8 *key,
28*b8d06352SJerry Shih 			      unsigned int keylen)
29*b8d06352SJerry Shih {
30*b8d06352SJerry Shih 	struct sm4_ctx *ctx = crypto_tfm_ctx(tfm);
31*b8d06352SJerry Shih 
32*b8d06352SJerry Shih 	if (crypto_simd_usable()) {
33*b8d06352SJerry Shih 		if (keylen != SM4_KEY_SIZE)
34*b8d06352SJerry Shih 			return -EINVAL;
35*b8d06352SJerry Shih 		kernel_vector_begin();
36*b8d06352SJerry Shih 		sm4_expandkey_zvksed_zvkb(key, ctx->rkey_enc, ctx->rkey_dec);
37*b8d06352SJerry Shih 		kernel_vector_end();
38*b8d06352SJerry Shih 		return 0;
39*b8d06352SJerry Shih 	}
40*b8d06352SJerry Shih 	return sm4_expandkey(ctx, key, keylen);
41*b8d06352SJerry Shih }
42*b8d06352SJerry Shih 
riscv64_sm4_encrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)43*b8d06352SJerry Shih static void riscv64_sm4_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
44*b8d06352SJerry Shih {
45*b8d06352SJerry Shih 	const struct sm4_ctx *ctx = crypto_tfm_ctx(tfm);
46*b8d06352SJerry Shih 
47*b8d06352SJerry Shih 	if (crypto_simd_usable()) {
48*b8d06352SJerry Shih 		kernel_vector_begin();
49*b8d06352SJerry Shih 		sm4_crypt_zvksed_zvkb(ctx->rkey_enc, src, dst);
50*b8d06352SJerry Shih 		kernel_vector_end();
51*b8d06352SJerry Shih 	} else {
52*b8d06352SJerry Shih 		sm4_crypt_block(ctx->rkey_enc, dst, src);
53*b8d06352SJerry Shih 	}
54*b8d06352SJerry Shih }
55*b8d06352SJerry Shih 
riscv64_sm4_decrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)56*b8d06352SJerry Shih static void riscv64_sm4_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
57*b8d06352SJerry Shih {
58*b8d06352SJerry Shih 	const struct sm4_ctx *ctx = crypto_tfm_ctx(tfm);
59*b8d06352SJerry Shih 
60*b8d06352SJerry Shih 	if (crypto_simd_usable()) {
61*b8d06352SJerry Shih 		kernel_vector_begin();
62*b8d06352SJerry Shih 		sm4_crypt_zvksed_zvkb(ctx->rkey_dec, src, dst);
63*b8d06352SJerry Shih 		kernel_vector_end();
64*b8d06352SJerry Shih 	} else {
65*b8d06352SJerry Shih 		sm4_crypt_block(ctx->rkey_dec, dst, src);
66*b8d06352SJerry Shih 	}
67*b8d06352SJerry Shih }
68*b8d06352SJerry Shih 
69*b8d06352SJerry Shih static struct crypto_alg riscv64_sm4_alg = {
70*b8d06352SJerry Shih 	.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
71*b8d06352SJerry Shih 	.cra_blocksize = SM4_BLOCK_SIZE,
72*b8d06352SJerry Shih 	.cra_ctxsize = sizeof(struct sm4_ctx),
73*b8d06352SJerry Shih 	.cra_priority = 300,
74*b8d06352SJerry Shih 	.cra_name = "sm4",
75*b8d06352SJerry Shih 	.cra_driver_name = "sm4-riscv64-zvksed-zvkb",
76*b8d06352SJerry Shih 	.cra_cipher = {
77*b8d06352SJerry Shih 		.cia_min_keysize = SM4_KEY_SIZE,
78*b8d06352SJerry Shih 		.cia_max_keysize = SM4_KEY_SIZE,
79*b8d06352SJerry Shih 		.cia_setkey = riscv64_sm4_setkey,
80*b8d06352SJerry Shih 		.cia_encrypt = riscv64_sm4_encrypt,
81*b8d06352SJerry Shih 		.cia_decrypt = riscv64_sm4_decrypt,
82*b8d06352SJerry Shih 	},
83*b8d06352SJerry Shih 	.cra_module = THIS_MODULE,
84*b8d06352SJerry Shih };
85*b8d06352SJerry Shih 
riscv64_sm4_mod_init(void)86*b8d06352SJerry Shih static int __init riscv64_sm4_mod_init(void)
87*b8d06352SJerry Shih {
88*b8d06352SJerry Shih 	if (riscv_isa_extension_available(NULL, ZVKSED) &&
89*b8d06352SJerry Shih 	    riscv_isa_extension_available(NULL, ZVKB) &&
90*b8d06352SJerry Shih 	    riscv_vector_vlen() >= 128)
91*b8d06352SJerry Shih 		return crypto_register_alg(&riscv64_sm4_alg);
92*b8d06352SJerry Shih 
93*b8d06352SJerry Shih 	return -ENODEV;
94*b8d06352SJerry Shih }
95*b8d06352SJerry Shih 
riscv64_sm4_mod_exit(void)96*b8d06352SJerry Shih static void __exit riscv64_sm4_mod_exit(void)
97*b8d06352SJerry Shih {
98*b8d06352SJerry Shih 	crypto_unregister_alg(&riscv64_sm4_alg);
99*b8d06352SJerry Shih }
100*b8d06352SJerry Shih 
101*b8d06352SJerry Shih module_init(riscv64_sm4_mod_init);
102*b8d06352SJerry Shih module_exit(riscv64_sm4_mod_exit);
103*b8d06352SJerry Shih 
104*b8d06352SJerry Shih MODULE_DESCRIPTION("SM4 (RISC-V accelerated)");
105*b8d06352SJerry Shih MODULE_AUTHOR("Heiko Stuebner <heiko.stuebner@vrull.eu>");
106*b8d06352SJerry Shih MODULE_LICENSE("GPL");
107*b8d06352SJerry Shih MODULE_ALIAS_CRYPTO("sm4");
108