1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Xilinx ZynqMP SHA Driver. 4 * Copyright (c) 2022 Xilinx Inc. 5 */ 6 #include <linux/cacheflush.h> 7 #include <crypto/hash.h> 8 #include <crypto/internal/hash.h> 9 #include <crypto/sha3.h> 10 #include <linux/crypto.h> 11 #include <linux/device.h> 12 #include <linux/dma-mapping.h> 13 #include <linux/firmware/xlnx-zynqmp.h> 14 #include <linux/init.h> 15 #include <linux/io.h> 16 #include <linux/kernel.h> 17 #include <linux/module.h> 18 #include <linux/platform_device.h> 19 20 #define ZYNQMP_DMA_BIT_MASK 32U 21 #define ZYNQMP_DMA_ALLOC_FIXED_SIZE 0x1000U 22 23 enum zynqmp_sha_op { 24 ZYNQMP_SHA3_INIT = 1, 25 ZYNQMP_SHA3_UPDATE = 2, 26 ZYNQMP_SHA3_FINAL = 4, 27 }; 28 29 struct zynqmp_sha_drv_ctx { 30 struct shash_alg sha3_384; 31 struct device *dev; 32 }; 33 34 struct zynqmp_sha_tfm_ctx { 35 struct device *dev; 36 struct crypto_shash *fbk_tfm; 37 }; 38 39 struct zynqmp_sha_desc_ctx { 40 struct shash_desc fbk_req; 41 }; 42 43 static dma_addr_t update_dma_addr, final_dma_addr; 44 static char *ubuf, *fbuf; 45 46 static int zynqmp_sha_init_tfm(struct crypto_shash *hash) 47 { 48 const char *fallback_driver_name = crypto_shash_alg_name(hash); 49 struct zynqmp_sha_tfm_ctx *tfm_ctx = crypto_shash_ctx(hash); 50 struct shash_alg *alg = crypto_shash_alg(hash); 51 struct crypto_shash *fallback_tfm; 52 struct zynqmp_sha_drv_ctx *drv_ctx; 53 54 drv_ctx = container_of(alg, struct zynqmp_sha_drv_ctx, sha3_384); 55 tfm_ctx->dev = drv_ctx->dev; 56 57 /* Allocate a fallback and abort if it failed. */ 58 fallback_tfm = crypto_alloc_shash(fallback_driver_name, 0, 59 CRYPTO_ALG_NEED_FALLBACK); 60 if (IS_ERR(fallback_tfm)) 61 return PTR_ERR(fallback_tfm); 62 63 tfm_ctx->fbk_tfm = fallback_tfm; 64 hash->descsize += crypto_shash_descsize(tfm_ctx->fbk_tfm); 65 66 return 0; 67 } 68 69 static void zynqmp_sha_exit_tfm(struct crypto_shash *hash) 70 { 71 struct zynqmp_sha_tfm_ctx *tfm_ctx = crypto_shash_ctx(hash); 72 73 if (tfm_ctx->fbk_tfm) { 74 crypto_free_shash(tfm_ctx->fbk_tfm); 75 tfm_ctx->fbk_tfm = NULL; 76 } 77 78 memzero_explicit(tfm_ctx, sizeof(struct zynqmp_sha_tfm_ctx)); 79 } 80 81 static int zynqmp_sha_init(struct shash_desc *desc) 82 { 83 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); 84 struct zynqmp_sha_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); 85 86 dctx->fbk_req.tfm = tctx->fbk_tfm; 87 return crypto_shash_init(&dctx->fbk_req); 88 } 89 90 static int zynqmp_sha_update(struct shash_desc *desc, const u8 *data, unsigned int length) 91 { 92 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); 93 94 return crypto_shash_update(&dctx->fbk_req, data, length); 95 } 96 97 static int zynqmp_sha_final(struct shash_desc *desc, u8 *out) 98 { 99 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); 100 101 return crypto_shash_final(&dctx->fbk_req, out); 102 } 103 104 static int zynqmp_sha_finup(struct shash_desc *desc, const u8 *data, unsigned int length, u8 *out) 105 { 106 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); 107 108 return crypto_shash_finup(&dctx->fbk_req, data, length, out); 109 } 110 111 static int zynqmp_sha_import(struct shash_desc *desc, const void *in) 112 { 113 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); 114 struct zynqmp_sha_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); 115 116 dctx->fbk_req.tfm = tctx->fbk_tfm; 117 return crypto_shash_import(&dctx->fbk_req, in); 118 } 119 120 static int zynqmp_sha_export(struct shash_desc *desc, void *out) 121 { 122 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); 123 124 return crypto_shash_export(&dctx->fbk_req, out); 125 } 126 127 static int zynqmp_sha_digest(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out) 128 { 129 unsigned int remaining_len = len; 130 int update_size; 131 int ret; 132 133 ret = zynqmp_pm_sha_hash(0, 0, ZYNQMP_SHA3_INIT); 134 if (ret) 135 return ret; 136 137 while (remaining_len != 0) { 138 memzero_explicit(ubuf, ZYNQMP_DMA_ALLOC_FIXED_SIZE); 139 if (remaining_len >= ZYNQMP_DMA_ALLOC_FIXED_SIZE) { 140 update_size = ZYNQMP_DMA_ALLOC_FIXED_SIZE; 141 remaining_len -= ZYNQMP_DMA_ALLOC_FIXED_SIZE; 142 } else { 143 update_size = remaining_len; 144 remaining_len = 0; 145 } 146 memcpy(ubuf, data, update_size); 147 flush_icache_range((unsigned long)ubuf, (unsigned long)ubuf + update_size); 148 ret = zynqmp_pm_sha_hash(update_dma_addr, update_size, ZYNQMP_SHA3_UPDATE); 149 if (ret) 150 return ret; 151 152 data += update_size; 153 } 154 155 ret = zynqmp_pm_sha_hash(final_dma_addr, SHA3_384_DIGEST_SIZE, ZYNQMP_SHA3_FINAL); 156 memcpy(out, fbuf, SHA3_384_DIGEST_SIZE); 157 memzero_explicit(fbuf, SHA3_384_DIGEST_SIZE); 158 159 return ret; 160 } 161 162 static struct zynqmp_sha_drv_ctx sha3_drv_ctx = { 163 .sha3_384 = { 164 .init = zynqmp_sha_init, 165 .update = zynqmp_sha_update, 166 .final = zynqmp_sha_final, 167 .finup = zynqmp_sha_finup, 168 .digest = zynqmp_sha_digest, 169 .export = zynqmp_sha_export, 170 .import = zynqmp_sha_import, 171 .init_tfm = zynqmp_sha_init_tfm, 172 .exit_tfm = zynqmp_sha_exit_tfm, 173 .descsize = sizeof(struct zynqmp_sha_desc_ctx), 174 .statesize = sizeof(struct sha3_state), 175 .digestsize = SHA3_384_DIGEST_SIZE, 176 .base = { 177 .cra_name = "sha3-384", 178 .cra_driver_name = "zynqmp-sha3-384", 179 .cra_priority = 300, 180 .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | 181 CRYPTO_ALG_ALLOCATES_MEMORY | 182 CRYPTO_ALG_NEED_FALLBACK, 183 .cra_blocksize = SHA3_384_BLOCK_SIZE, 184 .cra_ctxsize = sizeof(struct zynqmp_sha_tfm_ctx), 185 .cra_module = THIS_MODULE, 186 } 187 } 188 }; 189 190 static int zynqmp_sha_probe(struct platform_device *pdev) 191 { 192 struct device *dev = &pdev->dev; 193 int err; 194 u32 v; 195 196 /* Verify the hardware is present */ 197 err = zynqmp_pm_get_api_version(&v); 198 if (err) 199 return err; 200 201 202 err = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(ZYNQMP_DMA_BIT_MASK)); 203 if (err < 0) { 204 dev_err(dev, "No usable DMA configuration\n"); 205 return err; 206 } 207 208 err = crypto_register_shash(&sha3_drv_ctx.sha3_384); 209 if (err < 0) { 210 dev_err(dev, "Failed to register shash alg.\n"); 211 return err; 212 } 213 214 sha3_drv_ctx.dev = dev; 215 platform_set_drvdata(pdev, &sha3_drv_ctx); 216 217 ubuf = dma_alloc_coherent(dev, ZYNQMP_DMA_ALLOC_FIXED_SIZE, &update_dma_addr, GFP_KERNEL); 218 if (!ubuf) { 219 err = -ENOMEM; 220 goto err_shash; 221 } 222 223 fbuf = dma_alloc_coherent(dev, SHA3_384_DIGEST_SIZE, &final_dma_addr, GFP_KERNEL); 224 if (!fbuf) { 225 err = -ENOMEM; 226 goto err_mem; 227 } 228 229 return 0; 230 231 err_mem: 232 dma_free_coherent(sha3_drv_ctx.dev, ZYNQMP_DMA_ALLOC_FIXED_SIZE, ubuf, update_dma_addr); 233 234 err_shash: 235 crypto_unregister_shash(&sha3_drv_ctx.sha3_384); 236 237 return err; 238 } 239 240 static void zynqmp_sha_remove(struct platform_device *pdev) 241 { 242 sha3_drv_ctx.dev = platform_get_drvdata(pdev); 243 244 dma_free_coherent(sha3_drv_ctx.dev, ZYNQMP_DMA_ALLOC_FIXED_SIZE, ubuf, update_dma_addr); 245 dma_free_coherent(sha3_drv_ctx.dev, SHA3_384_DIGEST_SIZE, fbuf, final_dma_addr); 246 crypto_unregister_shash(&sha3_drv_ctx.sha3_384); 247 } 248 249 static struct platform_driver zynqmp_sha_driver = { 250 .probe = zynqmp_sha_probe, 251 .remove_new = zynqmp_sha_remove, 252 .driver = { 253 .name = "zynqmp-sha3-384", 254 }, 255 }; 256 257 module_platform_driver(zynqmp_sha_driver); 258 MODULE_DESCRIPTION("ZynqMP SHA3 hardware acceleration support."); 259 MODULE_LICENSE("GPL v2"); 260 MODULE_AUTHOR("Harsha <harsha.harsha@xilinx.com>"); 261