1 /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 /* 3 * SM4 Cipher Algorithm, using ARMv8 NEON 4 * as specified in 5 * https://tools.ietf.org/id/draft-ribose-cfrg-sm4-10.html 6 * 7 * Copyright (C) 2022, Alibaba Group. 8 * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com> 9 */ 10 11 #include <linux/module.h> 12 #include <linux/crypto.h> 13 #include <linux/kernel.h> 14 #include <linux/cpufeature.h> 15 #include <asm/neon.h> 16 #include <asm/simd.h> 17 #include <crypto/internal/simd.h> 18 #include <crypto/internal/skcipher.h> 19 #include <crypto/sm4.h> 20 21 asmlinkage void sm4_neon_crypt(const u32 *rkey, u8 *dst, const u8 *src, 22 unsigned int nblocks); 23 asmlinkage void sm4_neon_cbc_dec(const u32 *rkey_dec, u8 *dst, const u8 *src, 24 u8 *iv, unsigned int nblocks); 25 asmlinkage void sm4_neon_ctr_crypt(const u32 *rkey_enc, u8 *dst, const u8 *src, 26 u8 *iv, unsigned int nblocks); 27 28 static int sm4_setkey(struct crypto_skcipher *tfm, const u8 *key, 29 unsigned int key_len) 30 { 31 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm); 32 33 return sm4_expandkey(ctx, key, key_len); 34 } 35 36 static int sm4_ecb_do_crypt(struct skcipher_request *req, const u32 *rkey) 37 { 38 struct skcipher_walk walk; 39 unsigned int nbytes; 40 int err; 41 42 err = skcipher_walk_virt(&walk, req, false); 43 44 while ((nbytes = walk.nbytes) > 0) { 45 const u8 *src = walk.src.virt.addr; 46 u8 *dst = walk.dst.virt.addr; 47 unsigned int nblocks; 48 49 nblocks = nbytes / SM4_BLOCK_SIZE; 50 if (nblocks) { 51 scoped_ksimd() 52 sm4_neon_crypt(rkey, dst, src, nblocks); 53 } 54 55 err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE); 56 } 57 58 return err; 59 } 60 61 static int sm4_ecb_encrypt(struct skcipher_request *req) 62 { 63 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 64 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm); 65 66 return sm4_ecb_do_crypt(req, ctx->rkey_enc); 67 } 68 69 static int sm4_ecb_decrypt(struct skcipher_request *req) 70 { 71 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 72 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm); 73 74 return sm4_ecb_do_crypt(req, ctx->rkey_dec); 75 } 76 77 static int sm4_cbc_encrypt(struct skcipher_request *req) 78 { 79 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 80 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm); 81 struct skcipher_walk walk; 82 unsigned int nbytes; 83 int err; 84 85 err = skcipher_walk_virt(&walk, req, false); 86 87 while ((nbytes = walk.nbytes) > 0) { 88 const u8 *iv = walk.iv; 89 const u8 *src = walk.src.virt.addr; 90 u8 *dst = walk.dst.virt.addr; 91 92 while (nbytes >= SM4_BLOCK_SIZE) { 93 crypto_xor_cpy(dst, src, iv, SM4_BLOCK_SIZE); 94 sm4_crypt_block(ctx->rkey_enc, dst, dst); 95 iv = dst; 96 src += SM4_BLOCK_SIZE; 97 dst += SM4_BLOCK_SIZE; 98 nbytes -= SM4_BLOCK_SIZE; 99 } 100 if (iv != walk.iv) 101 memcpy(walk.iv, iv, SM4_BLOCK_SIZE); 102 103 err = skcipher_walk_done(&walk, nbytes); 104 } 105 106 return err; 107 } 108 109 static int sm4_cbc_decrypt(struct skcipher_request *req) 110 { 111 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 112 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm); 113 struct skcipher_walk walk; 114 unsigned int nbytes; 115 int err; 116 117 err = skcipher_walk_virt(&walk, req, false); 118 119 while ((nbytes = walk.nbytes) > 0) { 120 const u8 *src = walk.src.virt.addr; 121 u8 *dst = walk.dst.virt.addr; 122 unsigned int nblocks; 123 124 nblocks = nbytes / SM4_BLOCK_SIZE; 125 if (nblocks) { 126 scoped_ksimd() 127 sm4_neon_cbc_dec(ctx->rkey_dec, dst, src, 128 walk.iv, nblocks); 129 } 130 131 err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE); 132 } 133 134 return err; 135 } 136 137 static int sm4_ctr_crypt(struct skcipher_request *req) 138 { 139 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 140 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm); 141 struct skcipher_walk walk; 142 unsigned int nbytes; 143 int err; 144 145 err = skcipher_walk_virt(&walk, req, false); 146 147 while ((nbytes = walk.nbytes) > 0) { 148 const u8 *src = walk.src.virt.addr; 149 u8 *dst = walk.dst.virt.addr; 150 unsigned int nblocks; 151 152 nblocks = nbytes / SM4_BLOCK_SIZE; 153 if (nblocks) { 154 scoped_ksimd() 155 sm4_neon_ctr_crypt(ctx->rkey_enc, dst, src, 156 walk.iv, nblocks); 157 158 dst += nblocks * SM4_BLOCK_SIZE; 159 src += nblocks * SM4_BLOCK_SIZE; 160 nbytes -= nblocks * SM4_BLOCK_SIZE; 161 } 162 163 /* tail */ 164 if (walk.nbytes == walk.total && nbytes > 0) { 165 u8 keystream[SM4_BLOCK_SIZE]; 166 167 sm4_crypt_block(ctx->rkey_enc, keystream, walk.iv); 168 crypto_inc(walk.iv, SM4_BLOCK_SIZE); 169 crypto_xor_cpy(dst, src, keystream, nbytes); 170 nbytes = 0; 171 } 172 173 err = skcipher_walk_done(&walk, nbytes); 174 } 175 176 return err; 177 } 178 179 static struct skcipher_alg sm4_algs[] = { 180 { 181 .base = { 182 .cra_name = "ecb(sm4)", 183 .cra_driver_name = "ecb-sm4-neon", 184 .cra_priority = 200, 185 .cra_blocksize = SM4_BLOCK_SIZE, 186 .cra_ctxsize = sizeof(struct sm4_ctx), 187 .cra_module = THIS_MODULE, 188 }, 189 .min_keysize = SM4_KEY_SIZE, 190 .max_keysize = SM4_KEY_SIZE, 191 .setkey = sm4_setkey, 192 .encrypt = sm4_ecb_encrypt, 193 .decrypt = sm4_ecb_decrypt, 194 }, { 195 .base = { 196 .cra_name = "cbc(sm4)", 197 .cra_driver_name = "cbc-sm4-neon", 198 .cra_priority = 200, 199 .cra_blocksize = SM4_BLOCK_SIZE, 200 .cra_ctxsize = sizeof(struct sm4_ctx), 201 .cra_module = THIS_MODULE, 202 }, 203 .min_keysize = SM4_KEY_SIZE, 204 .max_keysize = SM4_KEY_SIZE, 205 .ivsize = SM4_BLOCK_SIZE, 206 .setkey = sm4_setkey, 207 .encrypt = sm4_cbc_encrypt, 208 .decrypt = sm4_cbc_decrypt, 209 }, { 210 .base = { 211 .cra_name = "ctr(sm4)", 212 .cra_driver_name = "ctr-sm4-neon", 213 .cra_priority = 200, 214 .cra_blocksize = 1, 215 .cra_ctxsize = sizeof(struct sm4_ctx), 216 .cra_module = THIS_MODULE, 217 }, 218 .min_keysize = SM4_KEY_SIZE, 219 .max_keysize = SM4_KEY_SIZE, 220 .ivsize = SM4_BLOCK_SIZE, 221 .chunksize = SM4_BLOCK_SIZE, 222 .setkey = sm4_setkey, 223 .encrypt = sm4_ctr_crypt, 224 .decrypt = sm4_ctr_crypt, 225 } 226 }; 227 228 static int __init sm4_init(void) 229 { 230 return crypto_register_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs)); 231 } 232 233 static void __exit sm4_exit(void) 234 { 235 crypto_unregister_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs)); 236 } 237 238 module_init(sm4_init); 239 module_exit(sm4_exit); 240 241 MODULE_DESCRIPTION("SM4 ECB/CBC/CTR using ARMv8 NEON"); 242 MODULE_ALIAS_CRYPTO("sm4-neon"); 243 MODULE_ALIAS_CRYPTO("sm4"); 244 MODULE_ALIAS_CRYPTO("ecb(sm4)"); 245 MODULE_ALIAS_CRYPTO("cbc(sm4)"); 246 MODULE_ALIAS_CRYPTO("ctr(sm4)"); 247 MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>"); 248 MODULE_LICENSE("GPL v2"); 249