1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Copyright (C) 2019 Samsung Electronics Co., Ltd.
4 */
5
6 #include <linux/kernel.h>
7 #include <linux/string.h>
8 #include <linux/err.h>
9 #include <linux/slab.h>
10 #include <linux/wait.h>
11 #include <linux/sched.h>
12
13 #include "glob.h"
14 #include "crypto_ctx.h"
15
16 struct crypto_ctx_list {
17 spinlock_t ctx_lock;
18 int avail_ctx;
19 struct list_head idle_ctx;
20 wait_queue_head_t ctx_wait;
21 };
22
23 static struct crypto_ctx_list ctx_list;
24
free_aead(struct crypto_aead * aead)25 static inline void free_aead(struct crypto_aead *aead)
26 {
27 if (aead)
28 crypto_free_aead(aead);
29 }
30
free_shash(struct shash_desc * shash)31 static void free_shash(struct shash_desc *shash)
32 {
33 if (shash) {
34 crypto_free_shash(shash->tfm);
35 kfree(shash);
36 }
37 }
38
alloc_aead(int id)39 static struct crypto_aead *alloc_aead(int id)
40 {
41 struct crypto_aead *tfm = NULL;
42
43 switch (id) {
44 case CRYPTO_AEAD_AES_GCM:
45 tfm = crypto_alloc_aead("gcm(aes)", 0, 0);
46 break;
47 case CRYPTO_AEAD_AES_CCM:
48 tfm = crypto_alloc_aead("ccm(aes)", 0, 0);
49 break;
50 default:
51 pr_err("Does not support encrypt ahead(id : %d)\n", id);
52 return NULL;
53 }
54
55 if (IS_ERR(tfm)) {
56 pr_err("Failed to alloc encrypt aead : %ld\n", PTR_ERR(tfm));
57 return NULL;
58 }
59
60 return tfm;
61 }
62
alloc_shash_desc(int id)63 static struct shash_desc *alloc_shash_desc(int id)
64 {
65 struct crypto_shash *tfm = NULL;
66 struct shash_desc *shash;
67
68 switch (id) {
69 case CRYPTO_SHASH_CMACAES:
70 tfm = crypto_alloc_shash("cmac(aes)", 0, 0);
71 break;
72 default:
73 return NULL;
74 }
75
76 if (IS_ERR(tfm))
77 return NULL;
78
79 shash = kzalloc(sizeof(*shash) + crypto_shash_descsize(tfm),
80 KSMBD_DEFAULT_GFP);
81 if (!shash)
82 crypto_free_shash(tfm);
83 else
84 shash->tfm = tfm;
85 return shash;
86 }
87
ctx_free(struct ksmbd_crypto_ctx * ctx)88 static void ctx_free(struct ksmbd_crypto_ctx *ctx)
89 {
90 int i;
91
92 for (i = 0; i < CRYPTO_SHASH_MAX; i++)
93 free_shash(ctx->desc[i]);
94 for (i = 0; i < CRYPTO_AEAD_MAX; i++)
95 free_aead(ctx->ccmaes[i]);
96 kfree(ctx);
97 }
98
ksmbd_find_crypto_ctx(void)99 static struct ksmbd_crypto_ctx *ksmbd_find_crypto_ctx(void)
100 {
101 struct ksmbd_crypto_ctx *ctx;
102
103 while (1) {
104 spin_lock(&ctx_list.ctx_lock);
105 if (!list_empty(&ctx_list.idle_ctx)) {
106 ctx = list_entry(ctx_list.idle_ctx.next,
107 struct ksmbd_crypto_ctx,
108 list);
109 list_del(&ctx->list);
110 spin_unlock(&ctx_list.ctx_lock);
111 return ctx;
112 }
113
114 if (ctx_list.avail_ctx > num_online_cpus()) {
115 spin_unlock(&ctx_list.ctx_lock);
116 wait_event(ctx_list.ctx_wait,
117 !list_empty(&ctx_list.idle_ctx));
118 continue;
119 }
120
121 ctx_list.avail_ctx++;
122 spin_unlock(&ctx_list.ctx_lock);
123
124 ctx = kzalloc_obj(struct ksmbd_crypto_ctx, KSMBD_DEFAULT_GFP);
125 if (!ctx) {
126 spin_lock(&ctx_list.ctx_lock);
127 ctx_list.avail_ctx--;
128 spin_unlock(&ctx_list.ctx_lock);
129 wait_event(ctx_list.ctx_wait,
130 !list_empty(&ctx_list.idle_ctx));
131 continue;
132 }
133 break;
134 }
135 return ctx;
136 }
137
ksmbd_release_crypto_ctx(struct ksmbd_crypto_ctx * ctx)138 void ksmbd_release_crypto_ctx(struct ksmbd_crypto_ctx *ctx)
139 {
140 if (!ctx)
141 return;
142
143 spin_lock(&ctx_list.ctx_lock);
144 if (ctx_list.avail_ctx <= num_online_cpus()) {
145 list_add(&ctx->list, &ctx_list.idle_ctx);
146 spin_unlock(&ctx_list.ctx_lock);
147 wake_up(&ctx_list.ctx_wait);
148 return;
149 }
150
151 ctx_list.avail_ctx--;
152 spin_unlock(&ctx_list.ctx_lock);
153 ctx_free(ctx);
154 }
155
____crypto_shash_ctx_find(int id)156 static struct ksmbd_crypto_ctx *____crypto_shash_ctx_find(int id)
157 {
158 struct ksmbd_crypto_ctx *ctx;
159
160 if (id >= CRYPTO_SHASH_MAX)
161 return NULL;
162
163 ctx = ksmbd_find_crypto_ctx();
164 if (ctx->desc[id])
165 return ctx;
166
167 ctx->desc[id] = alloc_shash_desc(id);
168 if (ctx->desc[id])
169 return ctx;
170 ksmbd_release_crypto_ctx(ctx);
171 return NULL;
172 }
173
ksmbd_crypto_ctx_find_cmacaes(void)174 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_cmacaes(void)
175 {
176 return ____crypto_shash_ctx_find(CRYPTO_SHASH_CMACAES);
177 }
178
____crypto_aead_ctx_find(int id)179 static struct ksmbd_crypto_ctx *____crypto_aead_ctx_find(int id)
180 {
181 struct ksmbd_crypto_ctx *ctx;
182
183 if (id >= CRYPTO_AEAD_MAX)
184 return NULL;
185
186 ctx = ksmbd_find_crypto_ctx();
187 if (ctx->ccmaes[id])
188 return ctx;
189
190 ctx->ccmaes[id] = alloc_aead(id);
191 if (ctx->ccmaes[id])
192 return ctx;
193 ksmbd_release_crypto_ctx(ctx);
194 return NULL;
195 }
196
ksmbd_crypto_ctx_find_gcm(void)197 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_gcm(void)
198 {
199 return ____crypto_aead_ctx_find(CRYPTO_AEAD_AES_GCM);
200 }
201
ksmbd_crypto_ctx_find_ccm(void)202 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_ccm(void)
203 {
204 return ____crypto_aead_ctx_find(CRYPTO_AEAD_AES_CCM);
205 }
206
ksmbd_crypto_destroy(void)207 void ksmbd_crypto_destroy(void)
208 {
209 struct ksmbd_crypto_ctx *ctx;
210
211 while (!list_empty(&ctx_list.idle_ctx)) {
212 ctx = list_entry(ctx_list.idle_ctx.next,
213 struct ksmbd_crypto_ctx,
214 list);
215 list_del(&ctx->list);
216 ctx_free(ctx);
217 }
218 }
219
ksmbd_crypto_create(void)220 int ksmbd_crypto_create(void)
221 {
222 struct ksmbd_crypto_ctx *ctx;
223
224 spin_lock_init(&ctx_list.ctx_lock);
225 INIT_LIST_HEAD(&ctx_list.idle_ctx);
226 init_waitqueue_head(&ctx_list.ctx_wait);
227 ctx_list.avail_ctx = 1;
228
229 ctx = kzalloc_obj(struct ksmbd_crypto_ctx, KSMBD_DEFAULT_GFP);
230 if (!ctx)
231 return -ENOMEM;
232 list_add(&ctx->list, &ctx_list.idle_ctx);
233 return 0;
234 }
235