1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * Copyright (c) 2021 Aspeed Technology Inc. 4 */ 5 6 #include "aspeed-hace.h" 7 #include <crypto/engine.h> 8 #include <linux/clk.h> 9 #include <linux/dma-mapping.h> 10 #include <linux/err.h> 11 #include <linux/interrupt.h> 12 #include <linux/io.h> 13 #include <linux/kernel.h> 14 #include <linux/module.h> 15 #include <linux/of.h> 16 #include <linux/platform_device.h> 17 #include <linux/property.h> 18 19 #ifdef CONFIG_CRYPTO_DEV_ASPEED_DEBUG 20 #define HACE_DBG(d, fmt, ...) \ 21 dev_info((d)->dev, "%s() " fmt, __func__, ##__VA_ARGS__) 22 #else 23 #define HACE_DBG(d, fmt, ...) \ 24 dev_dbg((d)->dev, "%s() " fmt, __func__, ##__VA_ARGS__) 25 #endif 26 27 /* HACE interrupt service routine */ 28 static irqreturn_t aspeed_hace_irq(int irq, void *dev) 29 { 30 struct aspeed_hace_dev *hace_dev = (struct aspeed_hace_dev *)dev; 31 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine; 32 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine; 33 u32 sts; 34 35 sts = ast_hace_read(hace_dev, ASPEED_HACE_STS); 36 ast_hace_write(hace_dev, sts, ASPEED_HACE_STS); 37 38 HACE_DBG(hace_dev, "irq status: 0x%x\n", sts); 39 40 if (sts & HACE_HASH_ISR) { 41 if (hash_engine->flags & CRYPTO_FLAGS_BUSY) 42 tasklet_schedule(&hash_engine->done_task); 43 else 44 dev_warn(hace_dev->dev, "HASH no active requests.\n"); 45 } 46 47 if (sts & HACE_CRYPTO_ISR) { 48 if (crypto_engine->flags & CRYPTO_FLAGS_BUSY) 49 tasklet_schedule(&crypto_engine->done_task); 50 else 51 dev_warn(hace_dev->dev, "CRYPTO no active requests.\n"); 52 } 53 54 return IRQ_HANDLED; 55 } 56 57 static void aspeed_hace_crypto_done_task(unsigned long data) 58 { 59 struct aspeed_hace_dev *hace_dev = (struct aspeed_hace_dev *)data; 60 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine; 61 62 crypto_engine->resume(hace_dev); 63 } 64 65 static void aspeed_hace_hash_done_task(unsigned long data) 66 { 67 struct aspeed_hace_dev *hace_dev = (struct aspeed_hace_dev *)data; 68 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine; 69 70 hash_engine->resume(hace_dev); 71 } 72 73 static void aspeed_hace_register(struct aspeed_hace_dev *hace_dev) 74 { 75 #ifdef CONFIG_CRYPTO_DEV_ASPEED_HACE_HASH 76 aspeed_register_hace_hash_algs(hace_dev); 77 #endif 78 #ifdef CONFIG_CRYPTO_DEV_ASPEED_HACE_CRYPTO 79 aspeed_register_hace_crypto_algs(hace_dev); 80 #endif 81 } 82 83 static void aspeed_hace_unregister(struct aspeed_hace_dev *hace_dev) 84 { 85 #ifdef CONFIG_CRYPTO_DEV_ASPEED_HACE_HASH 86 aspeed_unregister_hace_hash_algs(hace_dev); 87 #endif 88 #ifdef CONFIG_CRYPTO_DEV_ASPEED_HACE_CRYPTO 89 aspeed_unregister_hace_crypto_algs(hace_dev); 90 #endif 91 } 92 93 static const struct of_device_id aspeed_hace_of_matches[] = { 94 { .compatible = "aspeed,ast2500-hace", .data = (void *)5, }, 95 { .compatible = "aspeed,ast2600-hace", .data = (void *)6, }, 96 {}, 97 }; 98 99 static int aspeed_hace_probe(struct platform_device *pdev) 100 { 101 struct aspeed_engine_crypto *crypto_engine; 102 struct aspeed_engine_hash *hash_engine; 103 struct aspeed_hace_dev *hace_dev; 104 int rc; 105 106 hace_dev = devm_kzalloc(&pdev->dev, sizeof(struct aspeed_hace_dev), 107 GFP_KERNEL); 108 if (!hace_dev) 109 return -ENOMEM; 110 111 hace_dev->version = (uintptr_t)device_get_match_data(&pdev->dev); 112 if (!hace_dev->version) { 113 dev_err(&pdev->dev, "Failed to match hace dev id\n"); 114 return -EINVAL; 115 } 116 117 hace_dev->dev = &pdev->dev; 118 hash_engine = &hace_dev->hash_engine; 119 crypto_engine = &hace_dev->crypto_engine; 120 121 platform_set_drvdata(pdev, hace_dev); 122 123 hace_dev->regs = devm_platform_get_and_ioremap_resource(pdev, 0, NULL); 124 if (IS_ERR(hace_dev->regs)) 125 return PTR_ERR(hace_dev->regs); 126 127 /* Get irq number and register it */ 128 hace_dev->irq = platform_get_irq(pdev, 0); 129 if (hace_dev->irq < 0) 130 return -ENXIO; 131 132 rc = devm_request_irq(&pdev->dev, hace_dev->irq, aspeed_hace_irq, 0, 133 dev_name(&pdev->dev), hace_dev); 134 if (rc) { 135 dev_err(&pdev->dev, "Failed to request interrupt\n"); 136 return rc; 137 } 138 139 /* Get clk and enable it */ 140 hace_dev->clk = devm_clk_get(&pdev->dev, NULL); 141 if (IS_ERR(hace_dev->clk)) { 142 dev_err(&pdev->dev, "Failed to get clk\n"); 143 return -ENODEV; 144 } 145 146 rc = clk_prepare_enable(hace_dev->clk); 147 if (rc) { 148 dev_err(&pdev->dev, "Failed to enable clock 0x%x\n", rc); 149 return rc; 150 } 151 152 /* Initialize crypto hardware engine structure for hash */ 153 hace_dev->crypt_engine_hash = crypto_engine_alloc_init(hace_dev->dev, 154 true); 155 if (!hace_dev->crypt_engine_hash) { 156 rc = -ENOMEM; 157 goto clk_exit; 158 } 159 160 rc = crypto_engine_start(hace_dev->crypt_engine_hash); 161 if (rc) 162 goto err_engine_hash_start; 163 164 tasklet_init(&hash_engine->done_task, aspeed_hace_hash_done_task, 165 (unsigned long)hace_dev); 166 167 /* Initialize crypto hardware engine structure for crypto */ 168 hace_dev->crypt_engine_crypto = crypto_engine_alloc_init(hace_dev->dev, 169 true); 170 if (!hace_dev->crypt_engine_crypto) { 171 rc = -ENOMEM; 172 goto err_engine_hash_start; 173 } 174 175 rc = crypto_engine_start(hace_dev->crypt_engine_crypto); 176 if (rc) 177 goto err_engine_crypto_start; 178 179 tasklet_init(&crypto_engine->done_task, aspeed_hace_crypto_done_task, 180 (unsigned long)hace_dev); 181 182 /* Allocate DMA buffer for hash engine input used */ 183 hash_engine->ahash_src_addr = 184 dmam_alloc_coherent(&pdev->dev, 185 ASPEED_HASH_SRC_DMA_BUF_LEN, 186 &hash_engine->ahash_src_dma_addr, 187 GFP_KERNEL); 188 if (!hash_engine->ahash_src_addr) { 189 dev_err(&pdev->dev, "Failed to allocate dma buffer\n"); 190 rc = -ENOMEM; 191 goto err_engine_crypto_start; 192 } 193 194 /* Allocate DMA buffer for crypto engine context used */ 195 crypto_engine->cipher_ctx = 196 dmam_alloc_coherent(&pdev->dev, 197 PAGE_SIZE, 198 &crypto_engine->cipher_ctx_dma, 199 GFP_KERNEL); 200 if (!crypto_engine->cipher_ctx) { 201 dev_err(&pdev->dev, "Failed to allocate cipher ctx dma\n"); 202 rc = -ENOMEM; 203 goto err_engine_crypto_start; 204 } 205 206 /* Allocate DMA buffer for crypto engine input used */ 207 crypto_engine->cipher_addr = 208 dmam_alloc_coherent(&pdev->dev, 209 ASPEED_CRYPTO_SRC_DMA_BUF_LEN, 210 &crypto_engine->cipher_dma_addr, 211 GFP_KERNEL); 212 if (!crypto_engine->cipher_addr) { 213 dev_err(&pdev->dev, "Failed to allocate cipher addr dma\n"); 214 rc = -ENOMEM; 215 goto err_engine_crypto_start; 216 } 217 218 /* Allocate DMA buffer for crypto engine output used */ 219 if (hace_dev->version == AST2600_VERSION) { 220 crypto_engine->dst_sg_addr = 221 dmam_alloc_coherent(&pdev->dev, 222 ASPEED_CRYPTO_DST_DMA_BUF_LEN, 223 &crypto_engine->dst_sg_dma_addr, 224 GFP_KERNEL); 225 if (!crypto_engine->dst_sg_addr) { 226 dev_err(&pdev->dev, "Failed to allocate dst_sg dma\n"); 227 rc = -ENOMEM; 228 goto err_engine_crypto_start; 229 } 230 } 231 232 aspeed_hace_register(hace_dev); 233 234 dev_info(&pdev->dev, "Aspeed Crypto Accelerator successfully registered\n"); 235 236 return 0; 237 238 err_engine_crypto_start: 239 crypto_engine_exit(hace_dev->crypt_engine_crypto); 240 err_engine_hash_start: 241 crypto_engine_exit(hace_dev->crypt_engine_hash); 242 clk_exit: 243 clk_disable_unprepare(hace_dev->clk); 244 245 return rc; 246 } 247 248 static void aspeed_hace_remove(struct platform_device *pdev) 249 { 250 struct aspeed_hace_dev *hace_dev = platform_get_drvdata(pdev); 251 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine; 252 struct aspeed_engine_hash *hash_engine = &hace_dev->hash_engine; 253 254 aspeed_hace_unregister(hace_dev); 255 256 crypto_engine_exit(hace_dev->crypt_engine_hash); 257 crypto_engine_exit(hace_dev->crypt_engine_crypto); 258 259 tasklet_kill(&hash_engine->done_task); 260 tasklet_kill(&crypto_engine->done_task); 261 262 clk_disable_unprepare(hace_dev->clk); 263 } 264 265 MODULE_DEVICE_TABLE(of, aspeed_hace_of_matches); 266 267 static struct platform_driver aspeed_hace_driver = { 268 .probe = aspeed_hace_probe, 269 .remove_new = aspeed_hace_remove, 270 .driver = { 271 .name = KBUILD_MODNAME, 272 .of_match_table = aspeed_hace_of_matches, 273 }, 274 }; 275 276 module_platform_driver(aspeed_hace_driver); 277 278 MODULE_AUTHOR("Neal Liu <neal_liu@aspeedtech.com>"); 279 MODULE_DESCRIPTION("Aspeed HACE driver Crypto Accelerator"); 280 MODULE_LICENSE("GPL"); 281