1 /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 /* 3 * AMCC SoC PPC4xx Crypto Driver 4 * 5 * Copyright (c) 2008 Applied Micro Circuits Corporation. 6 * All rights reserved. James Hsiao <jhsiao@amcc.com> 7 * 8 * This is the header file for AMCC Crypto offload Linux device driver for 9 * use with Linux CryptoAPI. 10 11 */ 12 13 #ifndef __CRYPTO4XX_CORE_H__ 14 #define __CRYPTO4XX_CORE_H__ 15 16 #include <linux/ratelimit.h> 17 #include <linux/mutex.h> 18 #include <linux/scatterlist.h> 19 #include <crypto/internal/hash.h> 20 #include <crypto/internal/aead.h> 21 #include <crypto/internal/rng.h> 22 #include <crypto/internal/skcipher.h> 23 #include "crypto4xx_reg_def.h" 24 #include "crypto4xx_sa.h" 25 26 #define PPC460SX_SDR0_SRST 0x201 27 #define PPC405EX_SDR0_SRST 0x200 28 #define PPC460EX_SDR0_SRST 0x201 29 #define PPC460EX_CE_RESET 0x08000000 30 #define PPC460SX_CE_RESET 0x20000000 31 #define PPC405EX_CE_RESET 0x00000008 32 33 #define CRYPTO4XX_CRYPTO_PRIORITY 300 34 #define PPC4XX_NUM_PD 256 35 #define PPC4XX_LAST_PD (PPC4XX_NUM_PD - 1) 36 #define PPC4XX_NUM_GD 1024 37 #define PPC4XX_LAST_GD (PPC4XX_NUM_GD - 1) 38 #define PPC4XX_NUM_SD 256 39 #define PPC4XX_LAST_SD (PPC4XX_NUM_SD - 1) 40 #define PPC4XX_SD_BUFFER_SIZE 2048 41 42 #define PD_ENTRY_BUSY BIT(1) 43 #define PD_ENTRY_INUSE BIT(0) 44 #define PD_ENTRY_FREE 0 45 #define ERING_WAS_FULL 0xffffffff 46 47 struct crypto4xx_device; 48 49 union shadow_sa_buf { 50 struct dynamic_sa_ctl sa; 51 52 /* alloc 256 bytes which is enough for any kind of dynamic sa */ 53 u8 buf[256]; 54 } __packed; 55 56 struct pd_uinfo { 57 struct crypto4xx_device *dev; 58 u32 state; 59 u32 first_gd; /* first gather discriptor 60 used by this packet */ 61 u32 num_gd; /* number of gather discriptor 62 used by this packet */ 63 u32 first_sd; /* first scatter discriptor 64 used by this packet */ 65 u32 num_sd; /* number of scatter discriptors 66 used by this packet */ 67 struct dynamic_sa_ctl *sa_va; /* shadow sa */ 68 struct sa_state_record *sr_va; /* state record for shadow sa */ 69 u32 sr_pa; 70 struct scatterlist *dest_va; 71 struct crypto_async_request *async_req; /* base crypto request 72 for this packet */ 73 }; 74 75 struct crypto4xx_device { 76 struct crypto4xx_core_device *core_dev; 77 void __iomem *ce_base; 78 void __iomem *trng_base; 79 80 struct ce_pd *pdr; /* base address of packet descriptor ring */ 81 dma_addr_t pdr_pa; /* physical address of pdr_base_register */ 82 struct ce_gd *gdr; /* gather descriptor ring */ 83 dma_addr_t gdr_pa; /* physical address of gdr_base_register */ 84 struct ce_sd *sdr; /* scatter descriptor ring */ 85 dma_addr_t sdr_pa; /* physical address of sdr_base_register */ 86 void *scatter_buffer_va; 87 dma_addr_t scatter_buffer_pa; 88 89 union shadow_sa_buf *shadow_sa_pool; 90 dma_addr_t shadow_sa_pool_pa; 91 struct sa_state_record *shadow_sr_pool; 92 dma_addr_t shadow_sr_pool_pa; 93 u32 pdr_tail; 94 u32 pdr_head; 95 u32 gdr_tail; 96 u32 gdr_head; 97 u32 sdr_tail; 98 u32 sdr_head; 99 struct pd_uinfo *pdr_uinfo; 100 struct list_head alg_list; /* List of algorithm supported 101 by this device */ 102 struct ratelimit_state aead_ratelimit; 103 bool is_revb; 104 }; 105 106 struct crypto4xx_core_device { 107 struct device *device; 108 struct platform_device *ofdev; 109 struct crypto4xx_device *dev; 110 struct hwrng *trng; 111 u32 int_status; 112 u32 irq; 113 struct tasklet_struct tasklet; 114 spinlock_t lock; 115 struct mutex rng_lock; 116 }; 117 118 struct crypto4xx_ctx { 119 struct crypto4xx_device *dev; 120 struct dynamic_sa_ctl *sa_in; 121 struct dynamic_sa_ctl *sa_out; 122 __le32 iv_nonce; 123 u32 sa_len; 124 union { 125 struct crypto_sync_skcipher *cipher; 126 struct crypto_aead *aead; 127 } sw_cipher; 128 }; 129 130 struct crypto4xx_aead_reqctx { 131 struct scatterlist dst[2]; 132 }; 133 134 struct crypto4xx_alg_common { 135 u32 type; 136 union { 137 struct skcipher_alg cipher; 138 struct ahash_alg hash; 139 struct aead_alg aead; 140 struct rng_alg rng; 141 } u; 142 }; 143 144 struct crypto4xx_alg { 145 struct list_head entry; 146 struct crypto4xx_alg_common alg; 147 struct crypto4xx_device *dev; 148 }; 149 150 int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size); 151 void crypto4xx_free_sa(struct crypto4xx_ctx *ctx); 152 int crypto4xx_build_pd(struct crypto_async_request *req, 153 struct crypto4xx_ctx *ctx, 154 struct scatterlist *src, 155 struct scatterlist *dst, 156 const unsigned int datalen, 157 const __le32 *iv, const u32 iv_len, 158 const struct dynamic_sa_ctl *sa, 159 const unsigned int sa_len, 160 const unsigned int assoclen, 161 struct scatterlist *dst_tmp); 162 int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher, 163 const u8 *key, unsigned int keylen); 164 int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher, 165 const u8 *key, unsigned int keylen); 166 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher, 167 const u8 *key, unsigned int keylen); 168 int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher, 169 const u8 *key, unsigned int keylen); 170 int crypto4xx_encrypt_ctr(struct skcipher_request *req); 171 int crypto4xx_decrypt_ctr(struct skcipher_request *req); 172 int crypto4xx_encrypt_iv_stream(struct skcipher_request *req); 173 int crypto4xx_decrypt_iv_stream(struct skcipher_request *req); 174 int crypto4xx_encrypt_iv_block(struct skcipher_request *req); 175 int crypto4xx_decrypt_iv_block(struct skcipher_request *req); 176 int crypto4xx_encrypt_noiv_block(struct skcipher_request *req); 177 int crypto4xx_decrypt_noiv_block(struct skcipher_request *req); 178 int crypto4xx_rfc3686_encrypt(struct skcipher_request *req); 179 int crypto4xx_rfc3686_decrypt(struct skcipher_request *req); 180 int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm); 181 int crypto4xx_hash_digest(struct ahash_request *req); 182 int crypto4xx_hash_final(struct ahash_request *req); 183 int crypto4xx_hash_update(struct ahash_request *req); 184 int crypto4xx_hash_init(struct ahash_request *req); 185 186 /* 187 * Note: Only use this function to copy items that is word aligned. 188 */ 189 static inline void crypto4xx_memcpy_swab32(u32 *dst, const void *buf, 190 size_t len) 191 { 192 for (; len >= 4; buf += 4, len -= 4) 193 *dst++ = __swab32p((u32 *) buf); 194 195 if (len) { 196 const u8 *tmp = (u8 *)buf; 197 198 switch (len) { 199 case 3: 200 *dst = (tmp[2] << 16) | 201 (tmp[1] << 8) | 202 tmp[0]; 203 break; 204 case 2: 205 *dst = (tmp[1] << 8) | 206 tmp[0]; 207 break; 208 case 1: 209 *dst = tmp[0]; 210 break; 211 default: 212 break; 213 } 214 } 215 } 216 217 static inline void crypto4xx_memcpy_from_le32(u32 *dst, const void *buf, 218 size_t len) 219 { 220 crypto4xx_memcpy_swab32(dst, buf, len); 221 } 222 223 static inline void crypto4xx_memcpy_to_le32(__le32 *dst, const void *buf, 224 size_t len) 225 { 226 crypto4xx_memcpy_swab32((u32 *)dst, buf, len); 227 } 228 229 int crypto4xx_setauthsize_aead(struct crypto_aead *ciper, 230 unsigned int authsize); 231 int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, 232 const u8 *key, unsigned int keylen); 233 int crypto4xx_encrypt_aes_ccm(struct aead_request *req); 234 int crypto4xx_decrypt_aes_ccm(struct aead_request *req); 235 int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher, 236 const u8 *key, unsigned int keylen); 237 int crypto4xx_encrypt_aes_gcm(struct aead_request *req); 238 int crypto4xx_decrypt_aes_gcm(struct aead_request *req); 239 240 #endif 241