xref: /titanic_41/usr/src/uts/common/crypto/api/kcf_dual.c (revision 9b009fc1b553084f6003dcd46b171890049de0ff)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23  */
24 
25 #include <sys/errno.h>
26 #include <sys/types.h>
27 #include <sys/kmem.h>
28 #include <sys/sysmacros.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/impl.h>
31 #include <sys/crypto/api.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/crypto/sched_impl.h>
34 
35 #define	CRYPTO_OPS_OFFSET(f)		offsetof(crypto_ops_t, co_##f)
36 #define	CRYPTO_CIPHER_MAC_OFFSET(f) offsetof(crypto_dual_cipher_mac_ops_t, f)
37 
38 static int crypto_mac_decrypt_common(crypto_mechanism_t *,
39     crypto_mechanism_t *, crypto_dual_data_t *, crypto_key_t *, crypto_key_t *,
40     crypto_ctx_template_t, crypto_ctx_template_t, crypto_data_t *,
41     crypto_data_t *, crypto_call_req_t *, boolean_t);
42 
43 static int crypto_mac_decrypt_common_prov(crypto_provider_t provider,
44     crypto_session_id_t sid, crypto_mechanism_t *, crypto_mechanism_t *,
45     crypto_dual_data_t *, crypto_key_t *, crypto_key_t *,
46     crypto_ctx_template_t, crypto_ctx_template_t, crypto_data_t *,
47     crypto_data_t *, crypto_call_req_t *, boolean_t);
48 
49 int
crypto_encrypt_mac_prov(crypto_provider_t provider,crypto_session_id_t sid,crypto_mechanism_t * encr_mech,crypto_mechanism_t * mac_mech,crypto_data_t * pt,crypto_key_t * encr_key,crypto_key_t * mac_key,crypto_ctx_template_t encr_tmpl,crypto_ctx_template_t mac_tmpl,crypto_dual_data_t * ct,crypto_data_t * mac,crypto_call_req_t * crq)50 crypto_encrypt_mac_prov(crypto_provider_t provider, crypto_session_id_t sid,
51     crypto_mechanism_t *encr_mech, crypto_mechanism_t *mac_mech,
52     crypto_data_t *pt, crypto_key_t *encr_key, crypto_key_t *mac_key,
53     crypto_ctx_template_t encr_tmpl, crypto_ctx_template_t mac_tmpl,
54     crypto_dual_data_t *ct, crypto_data_t *mac, crypto_call_req_t *crq)
55 {
56 	/*
57 	 * First try to find a provider for the encryption mechanism, that
58 	 * is also capable of the MAC mechanism.
59 	 */
60 	int rv;
61 	kcf_mech_entry_t *me;
62 	kcf_provider_desc_t *pd = provider;
63 	kcf_provider_desc_t *real_provider = pd;
64 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
65 	kcf_req_params_t params;
66 	kcf_encrypt_mac_ops_params_t *cmops;
67 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
68 
69 	ASSERT(KCF_PROV_REFHELD(pd));
70 
71 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
72 		rv = kcf_get_hardware_provider(encr_mech->cm_type, encr_key,
73 		    mac_mech->cm_type, mac_key, pd, &real_provider,
74 		    CRYPTO_FG_ENCRYPT_MAC_ATOMIC);
75 
76 		if (rv != CRYPTO_SUCCESS)
77 			return (rv);
78 	}
79 
80 	/*
81 	 * For SW providers, check the validity of the context template
82 	 * It is very rare that the generation number mis-matches, so
83 	 * is acceptable to fail here, and let the consumer recover by
84 	 * freeing this tmpl and create a new one for the key and new SW
85 	 * provider
86 	 * Warning! will need to change when multiple software providers
87 	 * per mechanism are supported.
88 	 */
89 
90 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
91 		if (encr_tmpl != NULL) {
92 			if (kcf_get_mech_entry(encr_mech->cm_type, &me) !=
93 			    KCF_SUCCESS) {
94 				rv = CRYPTO_MECHANISM_INVALID;
95 				goto out;
96 			}
97 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
98 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
99 				rv = CRYPTO_OLD_CTX_TEMPLATE;
100 				goto out;
101 			}
102 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
103 		}
104 
105 		if (mac_tmpl != NULL) {
106 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
107 			    KCF_SUCCESS) {
108 				rv = CRYPTO_MECHANISM_INVALID;
109 				goto out;
110 			}
111 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
112 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
113 				rv = CRYPTO_OLD_CTX_TEMPLATE;
114 				goto out;
115 			}
116 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
117 		}
118 	}
119 
120 	/* The fast path for SW providers. */
121 	if (CHECK_FASTPATH(crq, real_provider)) {
122 		crypto_mechanism_t lencr_mech;
123 		crypto_mechanism_t lmac_mech;
124 
125 		/* careful! structs assignments */
126 		lencr_mech = *encr_mech;
127 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
128 		    &lencr_mech);
129 
130 		lmac_mech = *mac_mech;
131 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
132 		    &lmac_mech);
133 
134 		rv = KCF_PROV_ENCRYPT_MAC_ATOMIC(real_provider, sid,
135 		    &lencr_mech, encr_key, &lmac_mech, mac_key, pt, ct,
136 		    mac, spi_encr_tmpl, spi_mac_tmpl, KCF_SWFP_RHNDL(crq));
137 
138 		KCF_PROV_INCRSTATS(pd, rv);
139 	} else {
140 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_ATOMIC,
141 		    sid, encr_key, mac_key, pt, ct, mac, spi_encr_tmpl,
142 		    spi_mac_tmpl);
143 
144 		cmops = &(params.rp_u.encrypt_mac_params);
145 
146 		/* careful! structs assignments */
147 		cmops->em_encr_mech = *encr_mech;
148 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
149 		    &cmops->em_encr_mech);
150 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
151 
152 		cmops->em_mac_mech = *mac_mech;
153 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
154 		    &cmops->em_mac_mech);
155 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
156 
157 		rv = kcf_submit_request(real_provider, NULL, crq, &params,
158 		    B_FALSE);
159 	}
160 
161 out:
162 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
163 		KCF_PROV_REFRELE(real_provider);
164 	return (rv);
165 }
166 
167 /*
168  * Performs a dual encrypt/mac atomic operation. The provider and session
169  * to use are determined by the KCF dispatcher.
170  */
171 int
crypto_encrypt_mac(crypto_mechanism_t * encr_mech,crypto_mechanism_t * mac_mech,crypto_data_t * pt,crypto_key_t * encr_key,crypto_key_t * mac_key,crypto_ctx_template_t encr_tmpl,crypto_ctx_template_t mac_tmpl,crypto_dual_data_t * ct,crypto_data_t * mac,crypto_call_req_t * crq)172 crypto_encrypt_mac(crypto_mechanism_t *encr_mech,
173     crypto_mechanism_t *mac_mech, crypto_data_t *pt,
174     crypto_key_t *encr_key, crypto_key_t *mac_key,
175     crypto_ctx_template_t encr_tmpl, crypto_ctx_template_t mac_tmpl,
176     crypto_dual_data_t *ct, crypto_data_t *mac, crypto_call_req_t *crq)
177 {
178 	/*
179 	 * First try to find a provider for the encryption mechanism, that
180 	 * is also capable of the MAC mechanism.
181 	 */
182 	int error;
183 	kcf_mech_entry_t *me;
184 	kcf_provider_desc_t *pd;
185 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
186 	kcf_req_params_t params;
187 	kcf_encrypt_mac_ops_params_t *cmops;
188 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
189 	crypto_mech_type_t prov_encr_mechid, prov_mac_mechid;
190 	kcf_prov_tried_t *list = NULL;
191 	boolean_t encr_tmpl_checked = B_FALSE;
192 	boolean_t mac_tmpl_checked = B_FALSE;
193 	kcf_dual_req_t *next_req = NULL;
194 
195 retry:
196 	/* pd is returned held on success */
197 	pd = kcf_get_dual_provider(encr_mech, encr_key, mac_mech, mac_key,
198 	    &me, &prov_encr_mechid,
199 	    &prov_mac_mechid, &error, list,
200 	    CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_ENCRYPT_MAC_ATOMIC,
201 	    CRYPTO_FG_MAC_ATOMIC | CRYPTO_FG_ENCRYPT_MAC_ATOMIC,
202 	    ct->dd_len1);
203 	if (pd == NULL) {
204 		if (list != NULL)
205 			kcf_free_triedlist(list);
206 		if (next_req != NULL)
207 			kmem_free(next_req, sizeof (kcf_dual_req_t));
208 		return (error);
209 	}
210 
211 	/*
212 	 * For SW providers, check the validity of the context template
213 	 * It is very rare that the generation number mis-matches, so
214 	 * is acceptable to fail here, and let the consumer recover by
215 	 * freeing this tmpl and create a new one for the key and new SW
216 	 * provider
217 	 * Warning! will need to change when multiple software providers
218 	 * per mechanism are supported.
219 	 */
220 
221 	if ((!encr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
222 		if (encr_tmpl != NULL) {
223 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
224 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
225 
226 				if (next_req != NULL)
227 					kmem_free(next_req,
228 					    sizeof (kcf_dual_req_t));
229 				if (list != NULL)
230 					kcf_free_triedlist(list);
231 
232 				KCF_PROV_REFRELE(pd);
233 				/* Which one is the the old one ? */
234 				return (CRYPTO_OLD_CTX_TEMPLATE);
235 			}
236 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
237 		}
238 		encr_tmpl_checked = B_TRUE;
239 	}
240 
241 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
242 		crypto_call_req_t encr_req;
243 
244 		/* Need to emulate with 2 internal calls */
245 		/* Allocate and initialize the MAC req for the callback */
246 
247 		if (crq != NULL) {
248 			if (next_req == NULL) {
249 				next_req = kcf_alloc_req(crq);
250 
251 				if (next_req == NULL) {
252 					KCF_PROV_REFRELE(pd);
253 					if (list != NULL)
254 						kcf_free_triedlist(list);
255 					return (CRYPTO_HOST_MEMORY);
256 				}
257 				/*
258 				 * Careful! we're wrapping-in mac_tmpl instead
259 				 * of an spi_mac_tmpl. The callback routine will
260 				 * have to validate mac_tmpl, and use the
261 				 * mac_ctx_tmpl, once it picks a MAC provider.
262 				 */
263 				KCF_WRAP_MAC_OPS_PARAMS(&(next_req->kr_params),
264 				    KCF_OP_ATOMIC, NULL, mac_mech, mac_key,
265 				    (crypto_data_t *)ct, mac, mac_tmpl);
266 			}
267 
268 			encr_req.cr_flag = crq->cr_flag;
269 			encr_req.cr_callback_func = kcf_next_req;
270 			encr_req.cr_callback_arg = next_req;
271 		}
272 
273 		if (pt == NULL) {
274 			KCF_WRAP_ENCRYPT_OPS_PARAMS(&params, KCF_OP_ATOMIC,
275 			    pd->pd_sid, encr_mech, encr_key,
276 			    (crypto_data_t *)ct, NULL, spi_encr_tmpl);
277 		} else {
278 			KCF_WRAP_ENCRYPT_OPS_PARAMS(&params, KCF_OP_ATOMIC,
279 			    pd->pd_sid, encr_mech, encr_key, pt,
280 			    (crypto_data_t *)ct, spi_encr_tmpl);
281 		}
282 
283 		error = kcf_submit_request(pd, NULL, (crq == NULL) ? NULL :
284 		    &encr_req, &params, B_TRUE);
285 
286 		switch (error) {
287 		case CRYPTO_SUCCESS: {
288 			off_t saveoffset;
289 			size_t savelen;
290 
291 			/*
292 			 * The encryption step is done. Reuse the encr_req
293 			 * for submitting the MAC step.
294 			 */
295 			if (next_req == NULL) {
296 				saveoffset = ct->dd_offset1;
297 				savelen = ct->dd_len1;
298 			} else {
299 				saveoffset = next_req->kr_saveoffset =
300 				    ct->dd_offset1;
301 				savelen = next_req->kr_savelen = ct->dd_len1;
302 				encr_req.cr_callback_func = kcf_last_req;
303 			}
304 
305 			ct->dd_offset1 = ct->dd_offset2;
306 			ct->dd_len1 = ct->dd_len2;
307 
308 			error = crypto_mac(mac_mech, (crypto_data_t *)ct,
309 			    mac_key, mac_tmpl, mac, (crq == NULL) ? NULL :
310 			    &encr_req);
311 
312 			if (error != CRYPTO_QUEUED) {
313 				ct->dd_offset1 = saveoffset;
314 				ct->dd_len1 = savelen;
315 			}
316 			break;
317 		}
318 
319 		case CRYPTO_QUEUED:
320 			if ((crq != NULL) &&
321 			    !(crq->cr_flag & CRYPTO_SKIP_REQID))
322 				crq->cr_reqid = encr_req.cr_reqid;
323 			break;
324 
325 		default:
326 
327 			/* Add pd to the linked list of providers tried. */
328 			if (IS_RECOVERABLE(error)) {
329 				if (kcf_insert_triedlist(&list, pd,
330 				    KCF_KMFLAG(crq)) != NULL)
331 					goto retry;
332 			}
333 		}
334 		if (error != CRYPTO_QUEUED && next_req != NULL)
335 			kmem_free(next_req, sizeof (kcf_dual_req_t));
336 		if (list != NULL)
337 			kcf_free_triedlist(list);
338 		KCF_PROV_REFRELE(pd);
339 		return (error);
340 	}
341 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
342 		if ((mac_tmpl != NULL) &&
343 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
344 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
345 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
346 
347 				if (next_req != NULL)
348 					kmem_free(next_req,
349 					    sizeof (kcf_dual_req_t));
350 				if (list != NULL)
351 					kcf_free_triedlist(list);
352 
353 				KCF_PROV_REFRELE(pd);
354 				/* Which one is the the old one ? */
355 				return (CRYPTO_OLD_CTX_TEMPLATE);
356 			}
357 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
358 		}
359 		mac_tmpl_checked = B_TRUE;
360 	}
361 
362 	/* The fast path for SW providers. */
363 	if (CHECK_FASTPATH(crq, pd)) {
364 		crypto_mechanism_t lencr_mech;
365 		crypto_mechanism_t lmac_mech;
366 
367 		/* careful! structs assignments */
368 		lencr_mech = *encr_mech;
369 		lencr_mech.cm_type = prov_encr_mechid;
370 		lmac_mech = *mac_mech;
371 		lmac_mech.cm_type = prov_mac_mechid;
372 
373 		error = KCF_PROV_ENCRYPT_MAC_ATOMIC(pd, pd->pd_sid,
374 		    &lencr_mech, encr_key, &lmac_mech, mac_key, pt, ct,
375 		    mac, spi_encr_tmpl, spi_mac_tmpl, KCF_SWFP_RHNDL(crq));
376 
377 		KCF_PROV_INCRSTATS(pd, error);
378 	} else {
379 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_ATOMIC,
380 		    pd->pd_sid, encr_key, mac_key, pt, ct, mac, spi_encr_tmpl,
381 		    spi_mac_tmpl);
382 
383 		cmops = &(params.rp_u.encrypt_mac_params);
384 
385 		/* careful! structs assignments */
386 		cmops->em_encr_mech = *encr_mech;
387 		cmops->em_encr_mech.cm_type = prov_encr_mechid;
388 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
389 		cmops->em_mac_mech = *mac_mech;
390 		cmops->em_mac_mech.cm_type = prov_mac_mechid;
391 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
392 
393 		error = kcf_submit_request(pd, NULL, crq, &params, B_FALSE);
394 	}
395 
396 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED &&
397 	    IS_RECOVERABLE(error)) {
398 		/* Add pd to the linked list of providers tried. */
399 		if (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(crq)) != NULL)
400 			goto retry;
401 	}
402 
403 	if (next_req != NULL)
404 		kmem_free(next_req, sizeof (kcf_dual_req_t));
405 
406 	if (list != NULL)
407 		kcf_free_triedlist(list);
408 
409 	KCF_PROV_REFRELE(pd);
410 	return (error);
411 }
412 
413 int
crypto_encrypt_mac_init_prov(crypto_provider_t provider,crypto_session_id_t sid,crypto_mechanism_t * encr_mech,crypto_mechanism_t * mac_mech,crypto_key_t * encr_key,crypto_key_t * mac_key,crypto_ctx_template_t encr_tmpl,crypto_ctx_template_t mac_tmpl,crypto_context_t * ctxp,crypto_call_req_t * cr)414 crypto_encrypt_mac_init_prov(crypto_provider_t provider,
415     crypto_session_id_t sid, crypto_mechanism_t *encr_mech,
416     crypto_mechanism_t *mac_mech, crypto_key_t *encr_key,
417     crypto_key_t *mac_key, crypto_ctx_template_t encr_tmpl,
418     crypto_ctx_template_t mac_tmpl, crypto_context_t *ctxp,
419     crypto_call_req_t *cr)
420 {
421 	/*
422 	 * First try to find a provider for the encryption mechanism, that
423 	 * is also capable of the MAC mechanism.
424 	 */
425 	int rv;
426 	kcf_mech_entry_t *me;
427 	kcf_provider_desc_t *pd = provider;
428 	kcf_provider_desc_t *real_provider = pd;
429 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
430 	kcf_req_params_t params;
431 	kcf_encrypt_mac_ops_params_t *cmops;
432 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
433 	crypto_ctx_t *ctx;
434 	kcf_context_t *encr_kcf_context = NULL;
435 
436 	ASSERT(KCF_PROV_REFHELD(pd));
437 
438 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
439 		rv = kcf_get_hardware_provider(encr_mech->cm_type, encr_key,
440 		    mac_mech->cm_type, mac_key, pd, &real_provider,
441 		    CRYPTO_FG_ENCRYPT_MAC);
442 
443 		if (rv != CRYPTO_SUCCESS)
444 			return (rv);
445 	}
446 
447 	/*
448 	 * For SW providers, check the validity of the context template
449 	 * It is very rare that the generation number mis-matches, so
450 	 * is acceptable to fail here, and let the consumer recover by
451 	 * freeing this tmpl and create a new one for the key and new SW
452 	 * provider
453 	 * Warning! will need to change when multiple software providers
454 	 * per mechanism are supported.
455 	 */
456 
457 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
458 		if (encr_tmpl != NULL) {
459 			if (kcf_get_mech_entry(encr_mech->cm_type, &me) !=
460 			    KCF_SUCCESS) {
461 				rv = CRYPTO_MECHANISM_INVALID;
462 				goto out;
463 			}
464 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
465 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
466 				rv = CRYPTO_OLD_CTX_TEMPLATE;
467 				goto out;
468 			}
469 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
470 		}
471 
472 		if (mac_tmpl != NULL) {
473 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
474 			    KCF_SUCCESS) {
475 				rv = CRYPTO_MECHANISM_INVALID;
476 				goto out;
477 			}
478 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
479 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
480 				rv = CRYPTO_OLD_CTX_TEMPLATE;
481 				goto out;
482 			}
483 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
484 		}
485 	}
486 
487 	ctx = kcf_new_ctx(cr, real_provider, sid);
488 	if (ctx == NULL) {
489 		rv = CRYPTO_HOST_MEMORY;
490 		goto out;
491 	}
492 	encr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
493 
494 	/* The fast path for SW providers. */
495 	if (CHECK_FASTPATH(cr, real_provider)) {
496 		crypto_mechanism_t lencr_mech;
497 		crypto_mechanism_t lmac_mech;
498 
499 		/* careful! structs assignments */
500 		lencr_mech = *encr_mech;
501 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
502 		    &lencr_mech);
503 
504 		lmac_mech = *mac_mech;
505 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
506 		    &lmac_mech);
507 
508 		rv = KCF_PROV_ENCRYPT_MAC_INIT(real_provider, ctx, &lencr_mech,
509 		    encr_key, &lmac_mech, mac_key, spi_encr_tmpl, spi_mac_tmpl,
510 		    KCF_SWFP_RHNDL(cr));
511 
512 		KCF_PROV_INCRSTATS(pd, rv);
513 	} else {
514 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
515 		    sid, encr_key, mac_key, NULL, NULL, NULL,
516 		    spi_encr_tmpl, spi_mac_tmpl);
517 
518 		cmops = &(params.rp_u.encrypt_mac_params);
519 
520 		/* careful! structs assignments */
521 		cmops->em_encr_mech = *encr_mech;
522 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
523 		    &cmops->em_encr_mech);
524 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
525 
526 		cmops->em_mac_mech = *mac_mech;
527 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
528 		    &cmops->em_mac_mech);
529 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
530 
531 		rv = kcf_submit_request(real_provider, ctx, cr, &params,
532 		    B_FALSE);
533 	}
534 
535 	if (rv != CRYPTO_SUCCESS && rv != CRYPTO_QUEUED) {
536 		KCF_CONTEXT_REFRELE(encr_kcf_context);
537 	} else
538 		*ctxp = (crypto_context_t)ctx;
539 
540 out:
541 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
542 		KCF_PROV_REFRELE(real_provider);
543 	return (rv);
544 }
545 
546 /*
547  * Starts a multi-part dual encrypt/mac operation. The provider and session
548  * to use are determined by the KCF dispatcher.
549  */
550 /* ARGSUSED */
551 int
crypto_encrypt_mac_init(crypto_mechanism_t * encr_mech,crypto_mechanism_t * mac_mech,crypto_key_t * encr_key,crypto_key_t * mac_key,crypto_ctx_template_t encr_tmpl,crypto_ctx_template_t mac_tmpl,crypto_context_t * ctxp,crypto_call_req_t * cr)552 crypto_encrypt_mac_init(crypto_mechanism_t *encr_mech,
553     crypto_mechanism_t *mac_mech, crypto_key_t *encr_key,
554     crypto_key_t *mac_key, crypto_ctx_template_t encr_tmpl,
555     crypto_ctx_template_t mac_tmpl, crypto_context_t *ctxp,
556     crypto_call_req_t *cr)
557 {
558 	/*
559 	 * First try to find a provider for the encryption mechanism, that
560 	 * is also capable of the MAC mechanism.
561 	 */
562 	int error;
563 	kcf_mech_entry_t *me;
564 	kcf_provider_desc_t *pd;
565 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
566 	kcf_req_params_t params;
567 	kcf_encrypt_mac_ops_params_t *cmops;
568 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
569 	crypto_mech_type_t prov_encr_mechid, prov_mac_mechid;
570 	kcf_prov_tried_t *list = NULL;
571 	boolean_t encr_tmpl_checked = B_FALSE;
572 	boolean_t mac_tmpl_checked = B_FALSE;
573 	crypto_ctx_t *ctx = NULL;
574 	kcf_context_t *encr_kcf_context = NULL, *mac_kcf_context;
575 	crypto_call_flag_t save_flag;
576 
577 retry:
578 	/* pd is returned held on success */
579 	pd = kcf_get_dual_provider(encr_mech, encr_key, mac_mech, mac_key,
580 	    &me, &prov_encr_mechid,
581 	    &prov_mac_mechid, &error, list,
582 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_MAC, CRYPTO_FG_MAC, 0);
583 	if (pd == NULL) {
584 		if (list != NULL)
585 			kcf_free_triedlist(list);
586 		return (error);
587 	}
588 
589 	/*
590 	 * For SW providers, check the validity of the context template
591 	 * It is very rare that the generation number mis-matches, so
592 	 * is acceptable to fail here, and let the consumer recover by
593 	 * freeing this tmpl and create a new one for the key and new SW
594 	 * provider
595 	 * Warning! will need to change when multiple software providers
596 	 * per mechanism are supported.
597 	 */
598 
599 	if ((!encr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
600 		if (encr_tmpl != NULL) {
601 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
602 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
603 
604 				if (list != NULL)
605 					kcf_free_triedlist(list);
606 				if (encr_kcf_context != NULL)
607 					KCF_CONTEXT_REFRELE(encr_kcf_context);
608 
609 				KCF_PROV_REFRELE(pd);
610 				/* Which one is the the old one ? */
611 				return (CRYPTO_OLD_CTX_TEMPLATE);
612 			}
613 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
614 		}
615 		encr_tmpl_checked = B_TRUE;
616 	}
617 
618 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
619 		/* Need to emulate with 2 internal calls */
620 
621 		/*
622 		 * We avoid code complexity by limiting the pure async.
623 		 * case to be done using only a SW provider.
624 		 * XXX - Redo the emulation code below so that we can
625 		 * remove this limitation.
626 		 */
627 		if (cr != NULL && pd->pd_prov_type == CRYPTO_HW_PROVIDER) {
628 			if ((kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr))
629 			    != NULL))
630 				goto retry;
631 			if (list != NULL)
632 				kcf_free_triedlist(list);
633 			if (encr_kcf_context != NULL)
634 				KCF_CONTEXT_REFRELE(encr_kcf_context);
635 			KCF_PROV_REFRELE(pd);
636 			return (CRYPTO_HOST_MEMORY);
637 		}
638 
639 		if (ctx == NULL && pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
640 			ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
641 			if (ctx == NULL) {
642 				if (list != NULL)
643 					kcf_free_triedlist(list);
644 				if (encr_kcf_context != NULL)
645 					KCF_CONTEXT_REFRELE(encr_kcf_context);
646 				KCF_PROV_REFRELE(pd);
647 				return (CRYPTO_HOST_MEMORY);
648 			}
649 			encr_kcf_context = (kcf_context_t *)
650 			    ctx->cc_framework_private;
651 		}
652 		/*
653 		 * Trade-off speed vs avoidance of code complexity and
654 		 * duplication:
655 		 * Could do all the combinations of fastpath / synch / asynch
656 		 * for the encryption and the mac steps. Early attempts
657 		 * showed the code grew wild and bug-prone, for little gain.
658 		 * Therefore, the adaptative asynch case is not implemented.
659 		 * It's either pure synchronous, or pure asynchronous.
660 		 * We still preserve a fastpath for the pure synchronous
661 		 * requests to SW providers.
662 		 */
663 		if (cr == NULL) {
664 			crypto_context_t mac_context;
665 
666 			if (pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
667 				crypto_mechanism_t lmech = *encr_mech;
668 
669 				lmech.cm_type = prov_encr_mechid;
670 
671 				error = KCF_PROV_ENCRYPT_INIT(pd, ctx, &lmech,
672 				    encr_key, spi_encr_tmpl,
673 				    KCF_RHNDL(KM_SLEEP));
674 			} else {
675 				/*
676 				 * If we did the 'goto retry' then ctx may not
677 				 * be NULL.  In general, we can't reuse another
678 				 * provider's context, so we free it now so
679 				 * we don't leak it.
680 				 */
681 				if (ctx != NULL) {
682 					KCF_CONTEXT_REFRELE((kcf_context_t *)
683 					    ctx->cc_framework_private);
684 					encr_kcf_context = NULL;
685 				}
686 				error = crypto_encrypt_init_prov(pd, pd->pd_sid,
687 				    encr_mech, encr_key, &encr_tmpl,
688 				    (crypto_context_t *)&ctx, NULL);
689 
690 				if (error == CRYPTO_SUCCESS) {
691 					encr_kcf_context = (kcf_context_t *)
692 					    ctx->cc_framework_private;
693 				}
694 			}
695 			KCF_PROV_INCRSTATS(pd, error);
696 
697 			KCF_PROV_REFRELE(pd);
698 
699 			if (error != CRYPTO_SUCCESS) {
700 				/* Can't be CRYPTO_QUEUED. return the failure */
701 				if (list != NULL)
702 					kcf_free_triedlist(list);
703 				if (encr_kcf_context != NULL)
704 					KCF_CONTEXT_REFRELE(encr_kcf_context);
705 
706 				return (error);
707 			}
708 			error = crypto_mac_init(mac_mech, mac_key, mac_tmpl,
709 			    &mac_context, NULL);
710 
711 			if (list != NULL)
712 				kcf_free_triedlist(list);
713 
714 			if (error != CRYPTO_SUCCESS) {
715 				/* Should this be an ASSERT() ? */
716 
717 				KCF_CONTEXT_REFRELE(encr_kcf_context);
718 			} else {
719 				encr_kcf_context = (kcf_context_t *)
720 				    ctx->cc_framework_private;
721 				mac_kcf_context = (kcf_context_t *)
722 				    ((crypto_ctx_t *)mac_context)->
723 				    cc_framework_private;
724 
725 				encr_kcf_context->kc_secondctx =
726 				    mac_kcf_context;
727 				KCF_CONTEXT_REFHOLD(mac_kcf_context);
728 
729 				*ctxp = (crypto_context_t)ctx;
730 			}
731 
732 			return (error);
733 		}
734 
735 		/* submit a pure asynchronous request. */
736 		save_flag = cr->cr_flag;
737 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
738 
739 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
740 		    pd->pd_sid, encr_key, mac_key, NULL, NULL, NULL,
741 		    spi_encr_tmpl, spi_mac_tmpl);
742 
743 		cmops = &(params.rp_u.encrypt_mac_params);
744 
745 		/* careful! structs assignments */
746 		cmops->em_encr_mech = *encr_mech;
747 		/*
748 		 * cmops->em_encr_mech.cm_type will be set when we get to
749 		 * kcf_emulate_dual() routine.
750 		 */
751 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
752 		cmops->em_mac_mech = *mac_mech;
753 
754 		/*
755 		 * cmops->em_mac_mech.cm_type will be set when we know the
756 		 * MAC provider.
757 		 */
758 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
759 
760 		/*
761 		 * non-NULL ctx->kc_secondctx tells common_submit_request
762 		 * that this request uses separate cipher and MAC contexts.
763 		 * That function will set ctx->kc_secondctx to the new
764 		 * MAC context, once it gets one.
765 		 */
766 		encr_kcf_context->kc_secondctx = encr_kcf_context;
767 
768 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
769 
770 		cr->cr_flag = save_flag;
771 
772 		if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
773 			KCF_CONTEXT_REFRELE(encr_kcf_context);
774 		}
775 		if (list != NULL)
776 			kcf_free_triedlist(list);
777 		*ctxp = (crypto_context_t)ctx;
778 		KCF_PROV_REFRELE(pd);
779 		return (error);
780 	}
781 
782 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
783 		if ((mac_tmpl != NULL) &&
784 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
785 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
786 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
787 
788 				if (list != NULL)
789 					kcf_free_triedlist(list);
790 
791 				KCF_PROV_REFRELE(pd);
792 				/* Which one is the the old one ? */
793 				return (CRYPTO_OLD_CTX_TEMPLATE);
794 			}
795 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
796 		}
797 		mac_tmpl_checked = B_TRUE;
798 	}
799 
800 	if (ctx == NULL) {
801 		ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
802 		if (ctx == NULL) {
803 			if (list != NULL)
804 				kcf_free_triedlist(list);
805 
806 			KCF_PROV_REFRELE(pd);
807 			return (CRYPTO_HOST_MEMORY);
808 		}
809 		encr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
810 	}
811 
812 	/* The fast path for SW providers. */
813 	if (CHECK_FASTPATH(cr, pd)) {
814 		crypto_mechanism_t lencr_mech;
815 		crypto_mechanism_t lmac_mech;
816 
817 		/* careful! structs assignments */
818 		lencr_mech = *encr_mech;
819 		lencr_mech.cm_type = prov_encr_mechid;
820 		lmac_mech = *mac_mech;
821 		lmac_mech.cm_type = prov_mac_mechid;
822 
823 		error = KCF_PROV_ENCRYPT_MAC_INIT(pd, ctx, &lencr_mech,
824 		    encr_key, &lmac_mech, mac_key, spi_encr_tmpl, spi_mac_tmpl,
825 		    KCF_SWFP_RHNDL(cr));
826 
827 		KCF_PROV_INCRSTATS(pd, error);
828 	} else {
829 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
830 		    pd->pd_sid, encr_key, mac_key, NULL, NULL, NULL,
831 		    spi_encr_tmpl, spi_mac_tmpl);
832 
833 		cmops = &(params.rp_u.encrypt_mac_params);
834 
835 		/* careful! structs assignments */
836 		cmops->em_encr_mech = *encr_mech;
837 		cmops->em_encr_mech.cm_type = prov_encr_mechid;
838 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
839 		cmops->em_mac_mech = *mac_mech;
840 		cmops->em_mac_mech.cm_type = prov_mac_mechid;
841 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
842 
843 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
844 	}
845 
846 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
847 		if ((IS_RECOVERABLE(error)) &&
848 		    (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr)) != NULL))
849 			goto retry;
850 
851 		KCF_CONTEXT_REFRELE(encr_kcf_context);
852 	} else
853 		*ctxp = (crypto_context_t)ctx;
854 
855 	if (list != NULL)
856 		kcf_free_triedlist(list);
857 
858 	KCF_PROV_REFRELE(pd);
859 	return (error);
860 }
861 
862 /*
863  * Continues a multi-part dual encrypt/mac operation.
864  */
865 /* ARGSUSED */
866 int
crypto_encrypt_mac_update(crypto_context_t context,crypto_data_t * pt,crypto_dual_data_t * ct,crypto_call_req_t * cr)867 crypto_encrypt_mac_update(crypto_context_t context,
868     crypto_data_t *pt, crypto_dual_data_t *ct, crypto_call_req_t *cr)
869 {
870 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
871 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
872 	kcf_provider_desc_t *pd;
873 	int error;
874 	kcf_req_params_t params;
875 
876 	if ((ctx == NULL) ||
877 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
878 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
879 		return (CRYPTO_INVALID_CONTEXT);
880 	}
881 
882 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
883 
884 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
885 		off_t save_offset;
886 		size_t save_len;
887 		crypto_call_flag_t save_flag;
888 
889 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
890 			error = CRYPTO_INVALID_CONTEXT;
891 			goto out;
892 		}
893 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
894 
895 		/* First we submit the encryption request */
896 		if (cr == NULL) {
897 			/*
898 			 * 'ct' is always not NULL.
899 			 * A NULL 'pt' means in-place.
900 			 */
901 			if (pt == NULL)
902 				error = crypto_encrypt_update(context,
903 				    (crypto_data_t *)ct, NULL, NULL);
904 			else
905 				error = crypto_encrypt_update(context, pt,
906 				    (crypto_data_t *)ct, NULL);
907 
908 			if (error != CRYPTO_SUCCESS)
909 				goto out;
910 
911 			/*
912 			 * call  mac_update when there is data to throw in
913 			 * the mix. Either an explicitly non-zero ct->dd_len2,
914 			 * or the last ciphertext portion.
915 			 */
916 			save_offset = ct->dd_offset1;
917 			save_len = ct->dd_len1;
918 			if (ct->dd_len2 == 0) {
919 				/*
920 				 * The previous encrypt step was an
921 				 * accumulation only and didn't produce any
922 				 * partial output
923 				 */
924 				if (ct->dd_len1 == 0)
925 					goto out;
926 			} else {
927 				ct->dd_offset1 = ct->dd_offset2;
928 				ct->dd_len1 = ct->dd_len2;
929 			}
930 			error = crypto_mac_update((crypto_context_t)mac_ctx,
931 			    (crypto_data_t *)ct, NULL);
932 
933 			ct->dd_offset1 = save_offset;
934 			ct->dd_len1 = save_len;
935 
936 			goto out;
937 		}
938 		/* submit a pure asynchronous request. */
939 		save_flag = cr->cr_flag;
940 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
941 
942 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_UPDATE,
943 		    pd->pd_sid, NULL, NULL, pt, ct, NULL, NULL, NULL)
944 
945 
946 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
947 
948 		cr->cr_flag = save_flag;
949 		goto out;
950 	}
951 
952 	/* The fast path for SW providers. */
953 	if (CHECK_FASTPATH(cr, pd)) {
954 		error = KCF_PROV_ENCRYPT_MAC_UPDATE(pd, ctx, pt, ct, NULL);
955 		KCF_PROV_INCRSTATS(pd, error);
956 	} else {
957 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_UPDATE,
958 		    ctx->cc_session, NULL, NULL, pt, ct, NULL, NULL, NULL);
959 
960 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
961 	}
962 out:
963 	return (error);
964 }
965 
966 /*
967  * Terminates a multi-part dual encrypt/mac operation.
968  */
969 /* ARGSUSED */
crypto_encrypt_mac_final(crypto_context_t context,crypto_dual_data_t * ct,crypto_data_t * mac,crypto_call_req_t * cr)970 int crypto_encrypt_mac_final(crypto_context_t context, crypto_dual_data_t *ct,
971     crypto_data_t *mac, crypto_call_req_t *cr)
972 {
973 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
974 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
975 	kcf_provider_desc_t *pd;
976 	int error;
977 	kcf_req_params_t params;
978 
979 	if ((ctx == NULL) ||
980 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
981 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
982 		return (CRYPTO_INVALID_CONTEXT);
983 	}
984 
985 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
986 
987 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
988 		off_t save_offset;
989 		size_t save_len;
990 		crypto_context_t mac_context;
991 		crypto_call_flag_t save_flag;
992 
993 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
994 			return (CRYPTO_INVALID_CONTEXT);
995 		}
996 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
997 		mac_context = (crypto_context_t)mac_ctx;
998 
999 		if (cr == NULL) {
1000 			/* Get the last chunk of ciphertext */
1001 			error = crypto_encrypt_final(context,
1002 			    (crypto_data_t *)ct, NULL);
1003 
1004 			if (error != CRYPTO_SUCCESS)  {
1005 				/*
1006 				 * Needed here, because the caller of
1007 				 * crypto_encrypt_mac_final() lost all
1008 				 * refs to the mac_ctx.
1009 				 */
1010 				crypto_cancel_ctx(mac_context);
1011 				return (error);
1012 			}
1013 			if (ct->dd_len2 > 0) {
1014 				save_offset = ct->dd_offset1;
1015 				save_len = ct->dd_len1;
1016 				ct->dd_offset1 = ct->dd_offset2;
1017 				ct->dd_len1 = ct->dd_len2;
1018 
1019 				error = crypto_mac_update(mac_context,
1020 				    (crypto_data_t *)ct, NULL);
1021 
1022 				ct->dd_offset1 = save_offset;
1023 				ct->dd_len1 = save_len;
1024 
1025 				if (error != CRYPTO_SUCCESS)  {
1026 					crypto_cancel_ctx(mac_context);
1027 					return (error);
1028 				}
1029 			}
1030 
1031 			/* and finally, collect the MAC */
1032 			error = crypto_mac_final(mac_context, mac, NULL);
1033 
1034 			return (error);
1035 		}
1036 		/* submit a pure asynchronous request. */
1037 		save_flag = cr->cr_flag;
1038 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
1039 
1040 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_FINAL,
1041 		    pd->pd_sid, NULL, NULL, NULL, ct, mac, NULL, NULL)
1042 
1043 
1044 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1045 
1046 		cr->cr_flag = save_flag;
1047 		return (error);
1048 	}
1049 	/* The fast path for SW providers. */
1050 	if (CHECK_FASTPATH(cr, pd)) {
1051 		error = KCF_PROV_ENCRYPT_MAC_FINAL(pd, ctx, ct, mac, NULL);
1052 		KCF_PROV_INCRSTATS(pd, error);
1053 	} else {
1054 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_FINAL,
1055 		    ctx->cc_session, NULL, NULL, NULL, ct, mac, NULL, NULL);
1056 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1057 	}
1058 out:
1059 	/* Release the hold done in kcf_new_ctx() during init step. */
1060 	KCF_CONTEXT_COND_RELEASE(error, kcf_ctx);
1061 	return (error);
1062 }
1063 
1064 /*
1065  * Performs an atomic dual mac/decrypt operation. The provider to use
1066  * is determined by the KCF dispatcher.
1067  */
1068 int
crypto_mac_decrypt(crypto_mechanism_t * mac_mech,crypto_mechanism_t * decr_mech,crypto_dual_data_t * ct,crypto_key_t * mac_key,crypto_key_t * decr_key,crypto_ctx_template_t mac_tmpl,crypto_ctx_template_t decr_tmpl,crypto_data_t * mac,crypto_data_t * pt,crypto_call_req_t * crq)1069 crypto_mac_decrypt(crypto_mechanism_t *mac_mech,
1070     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1071     crypto_key_t *mac_key, crypto_key_t *decr_key,
1072     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1073     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1074 {
1075 	return (crypto_mac_decrypt_common(mac_mech, decr_mech, ct, mac_key,
1076 	    decr_key, mac_tmpl, decr_tmpl, mac, pt, crq, B_FALSE));
1077 }
1078 
1079 int
crypto_mac_decrypt_prov(crypto_provider_t provider,crypto_session_id_t sid,crypto_mechanism_t * mac_mech,crypto_mechanism_t * decr_mech,crypto_dual_data_t * ct,crypto_key_t * mac_key,crypto_key_t * decr_key,crypto_ctx_template_t mac_tmpl,crypto_ctx_template_t decr_tmpl,crypto_data_t * mac,crypto_data_t * pt,crypto_call_req_t * crq)1080 crypto_mac_decrypt_prov(crypto_provider_t provider, crypto_session_id_t sid,
1081     crypto_mechanism_t *mac_mech, crypto_mechanism_t *decr_mech,
1082     crypto_dual_data_t *ct, crypto_key_t *mac_key, crypto_key_t *decr_key,
1083     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1084     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1085 {
1086 	return (crypto_mac_decrypt_common_prov(provider, sid, mac_mech,
1087 	    decr_mech, ct, mac_key, decr_key, mac_tmpl, decr_tmpl, mac, pt,
1088 	    crq, B_FALSE));
1089 }
1090 
1091 /*
1092  * Performs an atomic dual mac/decrypt operation. The provider to use
1093  * is determined by the KCF dispatcher. 'mac' specifies the expected
1094  * value for the MAC. The decryption is not performed if the computed
1095  * MAC does not match the expected MAC.
1096  */
1097 int
crypto_mac_verify_decrypt(crypto_mechanism_t * mac_mech,crypto_mechanism_t * decr_mech,crypto_dual_data_t * ct,crypto_key_t * mac_key,crypto_key_t * decr_key,crypto_ctx_template_t mac_tmpl,crypto_ctx_template_t decr_tmpl,crypto_data_t * mac,crypto_data_t * pt,crypto_call_req_t * crq)1098 crypto_mac_verify_decrypt(crypto_mechanism_t *mac_mech,
1099     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1100     crypto_key_t *mac_key, crypto_key_t *decr_key,
1101     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1102     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1103 {
1104 	return (crypto_mac_decrypt_common(mac_mech, decr_mech, ct, mac_key,
1105 	    decr_key, mac_tmpl, decr_tmpl, mac, pt, crq, B_TRUE));
1106 }
1107 
1108 int
crypto_mac_verify_decrypt_prov(crypto_provider_t provider,crypto_session_id_t sid,crypto_mechanism_t * mac_mech,crypto_mechanism_t * decr_mech,crypto_dual_data_t * ct,crypto_key_t * mac_key,crypto_key_t * decr_key,crypto_ctx_template_t mac_tmpl,crypto_ctx_template_t decr_tmpl,crypto_data_t * mac,crypto_data_t * pt,crypto_call_req_t * crq)1109 crypto_mac_verify_decrypt_prov(crypto_provider_t provider,
1110     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1111     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1112     crypto_key_t *mac_key, crypto_key_t *decr_key,
1113     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1114     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1115 {
1116 	return (crypto_mac_decrypt_common_prov(provider, sid, mac_mech,
1117 	    decr_mech, ct, mac_key, decr_key, mac_tmpl, decr_tmpl, mac, pt,
1118 	    crq, B_TRUE));
1119 }
1120 
1121 /*
1122  * Called by both crypto_mac_decrypt() and crypto_mac_verify_decrypt().
1123  * optionally verified if the MACs match before calling the decryption step.
1124  */
1125 static int
crypto_mac_decrypt_common(crypto_mechanism_t * mac_mech,crypto_mechanism_t * decr_mech,crypto_dual_data_t * ct,crypto_key_t * mac_key,crypto_key_t * decr_key,crypto_ctx_template_t mac_tmpl,crypto_ctx_template_t decr_tmpl,crypto_data_t * mac,crypto_data_t * pt,crypto_call_req_t * crq,boolean_t do_verify)1126 crypto_mac_decrypt_common(crypto_mechanism_t *mac_mech,
1127     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1128     crypto_key_t *mac_key, crypto_key_t *decr_key,
1129     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1130     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq,
1131     boolean_t do_verify)
1132 {
1133 	/*
1134 	 * First try to find a provider for the decryption mechanism, that
1135 	 * is also capable of the MAC mechanism.
1136 	 * We still favor optimizing the costlier decryption.
1137 	 */
1138 	int error;
1139 	kcf_mech_entry_t *me;
1140 	kcf_provider_desc_t *pd;
1141 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1142 	kcf_req_params_t params;
1143 	kcf_mac_decrypt_ops_params_t *cmops;
1144 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1145 	crypto_mech_type_t prov_decr_mechid, prov_mac_mechid;
1146 	kcf_prov_tried_t *list = NULL;
1147 	boolean_t decr_tmpl_checked = B_FALSE;
1148 	boolean_t mac_tmpl_checked = B_FALSE;
1149 	kcf_dual_req_t *next_req = NULL;
1150 	crypto_call_req_t mac_req, *mac_reqp = NULL;
1151 
1152 retry:
1153 	/* pd is returned held on success */
1154 	pd = kcf_get_dual_provider(decr_mech, decr_key, mac_mech, mac_key,
1155 	    &me, &prov_decr_mechid,
1156 	    &prov_mac_mechid, &error, list,
1157 	    CRYPTO_FG_DECRYPT_ATOMIC | CRYPTO_FG_MAC_DECRYPT_ATOMIC,
1158 	    CRYPTO_FG_MAC_ATOMIC | CRYPTO_FG_MAC_DECRYPT_ATOMIC, ct->dd_len2);
1159 	if (pd == NULL) {
1160 		if (list != NULL)
1161 			kcf_free_triedlist(list);
1162 		if (next_req != NULL)
1163 			kmem_free(next_req, sizeof (kcf_dual_req_t));
1164 		return (CRYPTO_MECH_NOT_SUPPORTED);
1165 	}
1166 
1167 	/*
1168 	 * For SW providers, check the validity of the context template
1169 	 * It is very rare that the generation number mis-matches, so
1170 	 * is acceptable to fail here, and let the consumer recover by
1171 	 * freeing this tmpl and create a new one for the key and new SW
1172 	 * provider
1173 	 */
1174 
1175 	if ((!decr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1176 		if (decr_tmpl != NULL) {
1177 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1178 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1179 				if (next_req != NULL)
1180 					kmem_free(next_req,
1181 					    sizeof (kcf_dual_req_t));
1182 				if (list != NULL)
1183 					kcf_free_triedlist(list);
1184 				KCF_PROV_REFRELE(pd);
1185 
1186 				/* Which one is the the old one ? */
1187 				return (CRYPTO_OLD_CTX_TEMPLATE);
1188 			}
1189 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1190 		}
1191 		decr_tmpl_checked = B_TRUE;
1192 	}
1193 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
1194 		/* Need to emulate with 2 internal calls */
1195 
1196 		/* Prepare the call_req to be submitted for the MAC step */
1197 
1198 		if (crq != NULL) {
1199 
1200 			if (next_req == NULL) {
1201 				/*
1202 				 * allocate, initialize and prepare the
1203 				 * params for the next step only in the
1204 				 * first pass (not on every retry).
1205 				 */
1206 				next_req = kcf_alloc_req(crq);
1207 
1208 				if (next_req == NULL) {
1209 					KCF_PROV_REFRELE(pd);
1210 					if (list != NULL)
1211 						kcf_free_triedlist(list);
1212 					return (CRYPTO_HOST_MEMORY);
1213 				}
1214 				KCF_WRAP_DECRYPT_OPS_PARAMS(
1215 				    &(next_req->kr_params), KCF_OP_ATOMIC,
1216 				    NULL, decr_mech, decr_key,
1217 				    (crypto_data_t *)ct, pt, spi_decr_tmpl);
1218 			}
1219 
1220 			mac_req.cr_flag = (crq != NULL) ? crq->cr_flag : 0;
1221 			mac_req.cr_flag |= CRYPTO_SETDUAL;
1222 			mac_req.cr_callback_func = kcf_next_req;
1223 			mac_req.cr_callback_arg = next_req;
1224 			mac_reqp = &mac_req;
1225 		}
1226 
1227 		/* 'pd' is the decryption provider. */
1228 
1229 		if (do_verify)
1230 			error = crypto_mac_verify(mac_mech, (crypto_data_t *)ct,
1231 			    mac_key, mac_tmpl, mac,
1232 			    (crq == NULL) ? NULL : mac_reqp);
1233 		else
1234 			error = crypto_mac(mac_mech, (crypto_data_t *)ct,
1235 			    mac_key, mac_tmpl, mac,
1236 			    (crq == NULL) ? NULL : mac_reqp);
1237 
1238 		switch (error) {
1239 		case CRYPTO_SUCCESS: {
1240 			off_t saveoffset;
1241 			size_t savelen;
1242 
1243 			if (next_req == NULL) {
1244 				saveoffset = ct->dd_offset1;
1245 				savelen = ct->dd_len1;
1246 			} else {
1247 				saveoffset = next_req->kr_saveoffset =
1248 				    ct->dd_offset1;
1249 				savelen = next_req->kr_savelen = ct->dd_len1;
1250 
1251 				ASSERT(mac_reqp != NULL);
1252 				mac_req.cr_flag &= ~CRYPTO_SETDUAL;
1253 				mac_req.cr_callback_func = kcf_last_req;
1254 			}
1255 			ct->dd_offset1 = ct->dd_offset2;
1256 			ct->dd_len1 = ct->dd_len2;
1257 
1258 			if (CHECK_FASTPATH(crq, pd)) {
1259 				crypto_mechanism_t lmech;
1260 
1261 				lmech = *decr_mech;
1262 				KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type,
1263 				    pd, &lmech);
1264 
1265 				error = KCF_PROV_DECRYPT_ATOMIC(pd, pd->pd_sid,
1266 				    &lmech, decr_key, (crypto_data_t *)ct,
1267 				    (crypto_data_t *)pt, spi_decr_tmpl,
1268 				    KCF_SWFP_RHNDL(mac_reqp));
1269 
1270 				KCF_PROV_INCRSTATS(pd, error);
1271 			} else {
1272 				KCF_WRAP_DECRYPT_OPS_PARAMS(&params,
1273 				    KCF_OP_ATOMIC, pd->pd_sid, decr_mech,
1274 				    decr_key, (crypto_data_t *)ct, pt,
1275 				    spi_decr_tmpl);
1276 
1277 				error = kcf_submit_request(pd, NULL,
1278 				    (crq == NULL) ? NULL : mac_reqp,
1279 				    &params, B_FALSE);
1280 			}
1281 			if (error != CRYPTO_QUEUED) {
1282 				KCF_PROV_INCRSTATS(pd, error);
1283 				ct->dd_offset1 = saveoffset;
1284 				ct->dd_len1 = savelen;
1285 			}
1286 			break;
1287 		}
1288 
1289 		case CRYPTO_QUEUED:
1290 			if ((crq != NULL) && (crq->cr_flag & CRYPTO_SKIP_REQID))
1291 				crq->cr_reqid = mac_req.cr_reqid;
1292 			break;
1293 
1294 		default:
1295 			if (IS_RECOVERABLE(error)) {
1296 				if (kcf_insert_triedlist(&list, pd,
1297 				    KCF_KMFLAG(crq)) != NULL)
1298 					goto retry;
1299 			}
1300 		}
1301 		if (error != CRYPTO_QUEUED && next_req != NULL)
1302 			kmem_free(next_req, sizeof (kcf_dual_req_t));
1303 		if (list != NULL)
1304 			kcf_free_triedlist(list);
1305 		KCF_PROV_REFRELE(pd);
1306 		return (error);
1307 	}
1308 
1309 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1310 		if ((mac_tmpl != NULL) &&
1311 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
1312 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1313 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1314 				if (next_req != NULL)
1315 					kmem_free(next_req,
1316 					    sizeof (kcf_dual_req_t));
1317 				if (list != NULL)
1318 					kcf_free_triedlist(list);
1319 				KCF_PROV_REFRELE(pd);
1320 
1321 				/* Which one is the the old one ? */
1322 				return (CRYPTO_OLD_CTX_TEMPLATE);
1323 			}
1324 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1325 		}
1326 		mac_tmpl_checked = B_TRUE;
1327 	}
1328 
1329 	/* The fast path for SW providers. */
1330 	if (CHECK_FASTPATH(crq, pd)) {
1331 		crypto_mechanism_t lmac_mech;
1332 		crypto_mechanism_t ldecr_mech;
1333 
1334 		/* careful! structs assignments */
1335 		ldecr_mech = *decr_mech;
1336 		ldecr_mech.cm_type = prov_decr_mechid;
1337 		lmac_mech = *mac_mech;
1338 		lmac_mech.cm_type = prov_mac_mechid;
1339 
1340 		if (do_verify)
1341 			error = KCF_PROV_MAC_VERIFY_DECRYPT_ATOMIC(pd,
1342 			    pd->pd_sid, &lmac_mech, mac_key, &ldecr_mech,
1343 			    decr_key, ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1344 			    KCF_SWFP_RHNDL(crq));
1345 		else
1346 			error = KCF_PROV_MAC_DECRYPT_ATOMIC(pd, pd->pd_sid,
1347 			    &lmac_mech, mac_key, &ldecr_mech, decr_key,
1348 			    ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1349 			    KCF_SWFP_RHNDL(crq));
1350 
1351 		KCF_PROV_INCRSTATS(pd, error);
1352 	} else {
1353 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params,
1354 		    (do_verify) ? KCF_OP_MAC_VERIFY_DECRYPT_ATOMIC :
1355 		    KCF_OP_ATOMIC, pd->pd_sid, mac_key, decr_key, ct, mac, pt,
1356 		    spi_mac_tmpl, spi_decr_tmpl);
1357 
1358 		cmops = &(params.rp_u.mac_decrypt_params);
1359 
1360 		/* careful! structs assignments */
1361 		cmops->md_decr_mech = *decr_mech;
1362 		cmops->md_decr_mech.cm_type = prov_decr_mechid;
1363 		cmops->md_framework_decr_mechtype = decr_mech->cm_type;
1364 		cmops->md_mac_mech = *mac_mech;
1365 		cmops->md_mac_mech.cm_type = prov_mac_mechid;
1366 		cmops->md_framework_mac_mechtype = mac_mech->cm_type;
1367 
1368 		error = kcf_submit_request(pd, NULL, crq, &params, B_FALSE);
1369 	}
1370 
1371 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED &&
1372 	    IS_RECOVERABLE(error)) {
1373 		/* Add pd to the linked list of providers tried. */
1374 		if (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(crq)) != NULL)
1375 			goto retry;
1376 	}
1377 
1378 	if (list != NULL)
1379 		kcf_free_triedlist(list);
1380 
1381 	if (next_req != NULL)
1382 		kmem_free(next_req, sizeof (kcf_dual_req_t));
1383 	KCF_PROV_REFRELE(pd);
1384 	return (error);
1385 }
1386 
1387 static int
crypto_mac_decrypt_common_prov(crypto_provider_t provider,crypto_session_id_t sid,crypto_mechanism_t * mac_mech,crypto_mechanism_t * decr_mech,crypto_dual_data_t * ct,crypto_key_t * mac_key,crypto_key_t * decr_key,crypto_ctx_template_t mac_tmpl,crypto_ctx_template_t decr_tmpl,crypto_data_t * mac,crypto_data_t * pt,crypto_call_req_t * crq,boolean_t do_verify)1388 crypto_mac_decrypt_common_prov(crypto_provider_t provider,
1389     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1390     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1391     crypto_key_t *mac_key, crypto_key_t *decr_key,
1392     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1393     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq,
1394     boolean_t do_verify)
1395 {
1396 	/*
1397 	 * First try to find a provider for the decryption mechanism, that
1398 	 * is also capable of the MAC mechanism.
1399 	 * We still favor optimizing the costlier decryption.
1400 	 */
1401 	int error;
1402 	kcf_mech_entry_t *me;
1403 	kcf_provider_desc_t *pd = provider;
1404 	kcf_provider_desc_t *real_provider = pd;
1405 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1406 	kcf_req_params_t params;
1407 	kcf_mac_decrypt_ops_params_t *cmops;
1408 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1409 
1410 	ASSERT(KCF_PROV_REFHELD(pd));
1411 
1412 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
1413 		error = kcf_get_hardware_provider(decr_mech->cm_type, decr_key,
1414 		    mac_mech->cm_type, mac_key, pd, &real_provider,
1415 		    CRYPTO_FG_MAC_DECRYPT_ATOMIC);
1416 
1417 		if (error != CRYPTO_SUCCESS)
1418 			return (error);
1419 	}
1420 
1421 	/*
1422 	 * For SW providers, check the validity of the context template
1423 	 * It is very rare that the generation number mis-matches, so
1424 	 * is acceptable to fail here, and let the consumer recover by
1425 	 * freeing this tmpl and create a new one for the key and new SW
1426 	 * provider
1427 	 */
1428 
1429 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
1430 		if (decr_tmpl != NULL) {
1431 			if (kcf_get_mech_entry(decr_mech->cm_type, &me) !=
1432 			    KCF_SUCCESS) {
1433 				error = CRYPTO_MECHANISM_INVALID;
1434 				goto out;
1435 			}
1436 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1437 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1438 				error = CRYPTO_OLD_CTX_TEMPLATE;
1439 				goto out;
1440 			}
1441 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1442 		}
1443 
1444 		if (mac_tmpl != NULL) {
1445 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
1446 			    KCF_SUCCESS) {
1447 				error = CRYPTO_MECHANISM_INVALID;
1448 				goto out;
1449 			}
1450 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1451 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1452 				error = CRYPTO_OLD_CTX_TEMPLATE;
1453 				goto out;
1454 			}
1455 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1456 		}
1457 	}
1458 
1459 	/* The fast path for SW providers. */
1460 	if (CHECK_FASTPATH(crq, pd)) {
1461 		crypto_mechanism_t lmac_mech;
1462 		crypto_mechanism_t ldecr_mech;
1463 
1464 		/* careful! structs assignments */
1465 		ldecr_mech = *decr_mech;
1466 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1467 		    &ldecr_mech);
1468 
1469 		lmac_mech = *mac_mech;
1470 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1471 		    &lmac_mech);
1472 
1473 		if (do_verify)
1474 			error = KCF_PROV_MAC_VERIFY_DECRYPT_ATOMIC(
1475 			    real_provider, sid, &lmac_mech, mac_key,
1476 			    &ldecr_mech, decr_key, ct, mac, pt, spi_mac_tmpl,
1477 			    spi_decr_tmpl, KCF_SWFP_RHNDL(crq));
1478 		else
1479 			error = KCF_PROV_MAC_DECRYPT_ATOMIC(real_provider, sid,
1480 			    &lmac_mech, mac_key, &ldecr_mech, decr_key,
1481 			    ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1482 			    KCF_SWFP_RHNDL(crq));
1483 
1484 		KCF_PROV_INCRSTATS(pd, error);
1485 	} else {
1486 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params,
1487 		    (do_verify) ? KCF_OP_MAC_VERIFY_DECRYPT_ATOMIC :
1488 		    KCF_OP_ATOMIC, sid, mac_key, decr_key, ct, mac, pt,
1489 		    spi_mac_tmpl, spi_decr_tmpl);
1490 
1491 		cmops = &(params.rp_u.mac_decrypt_params);
1492 
1493 		/* careful! structs assignments */
1494 		cmops->md_decr_mech = *decr_mech;
1495 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1496 		    &cmops->md_decr_mech);
1497 		cmops->md_framework_decr_mechtype = decr_mech->cm_type;
1498 
1499 		cmops->md_mac_mech = *mac_mech;
1500 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1501 		    &cmops->md_mac_mech);
1502 		cmops->md_framework_mac_mechtype = mac_mech->cm_type;
1503 
1504 		error = kcf_submit_request(real_provider, NULL, crq, &params,
1505 		    B_FALSE);
1506 	}
1507 
1508 out:
1509 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
1510 		KCF_PROV_REFRELE(real_provider);
1511 	return (error);
1512 }
1513 
1514 /*
1515  * Starts a multi-part dual mac/decrypt operation. The provider to
1516  * use is determined by the KCF dispatcher.
1517  */
1518 /* ARGSUSED */
1519 int
crypto_mac_decrypt_init(crypto_mechanism_t * mac_mech,crypto_mechanism_t * decr_mech,crypto_key_t * mac_key,crypto_key_t * decr_key,crypto_ctx_template_t mac_tmpl,crypto_ctx_template_t decr_tmpl,crypto_context_t * ctxp,crypto_call_req_t * cr)1520 crypto_mac_decrypt_init(crypto_mechanism_t *mac_mech,
1521     crypto_mechanism_t *decr_mech, crypto_key_t *mac_key,
1522     crypto_key_t *decr_key, crypto_ctx_template_t mac_tmpl,
1523     crypto_ctx_template_t decr_tmpl, crypto_context_t *ctxp,
1524     crypto_call_req_t *cr)
1525 {
1526 	/*
1527 	 * First try to find a provider for the decryption mechanism, that
1528 	 * is also capable of the MAC mechanism.
1529 	 * We still favor optimizing the costlier decryption.
1530 	 */
1531 	int error;
1532 	kcf_mech_entry_t *me;
1533 	kcf_provider_desc_t *pd;
1534 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1535 	kcf_req_params_t params;
1536 	kcf_mac_decrypt_ops_params_t *mdops;
1537 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1538 	crypto_mech_type_t prov_decr_mechid, prov_mac_mechid;
1539 	kcf_prov_tried_t *list = NULL;
1540 	boolean_t decr_tmpl_checked = B_FALSE;
1541 	boolean_t mac_tmpl_checked = B_FALSE;
1542 	crypto_ctx_t *ctx = NULL;
1543 	kcf_context_t *decr_kcf_context = NULL, *mac_kcf_context = NULL;
1544 	crypto_call_flag_t save_flag;
1545 
1546 retry:
1547 	/* pd is returned held on success */
1548 	pd = kcf_get_dual_provider(decr_mech, decr_key, mac_mech, mac_key,
1549 	    &me, &prov_decr_mechid,
1550 	    &prov_mac_mechid, &error, list,
1551 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_MAC_DECRYPT, CRYPTO_FG_MAC, 0);
1552 	if (pd == NULL) {
1553 		if (list != NULL)
1554 			kcf_free_triedlist(list);
1555 		return (error);
1556 	}
1557 
1558 	/*
1559 	 * For SW providers, check the validity of the context template
1560 	 * It is very rare that the generation number mis-matches, so
1561 	 * is acceptable to fail here, and let the consumer recover by
1562 	 * freeing this tmpl and create a new one for the key and new SW
1563 	 * provider
1564 	 * Warning! will need to change when multiple software providers
1565 	 * per mechanism are supported.
1566 	 */
1567 
1568 	if ((!decr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1569 		if (decr_tmpl != NULL) {
1570 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1571 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1572 
1573 				if (list != NULL)
1574 					kcf_free_triedlist(list);
1575 				if (decr_kcf_context != NULL)
1576 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1577 
1578 				KCF_PROV_REFRELE(pd);
1579 				/* Which one is the the old one ? */
1580 				return (CRYPTO_OLD_CTX_TEMPLATE);
1581 			}
1582 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1583 		}
1584 		decr_tmpl_checked = B_TRUE;
1585 	}
1586 
1587 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
1588 		/* Need to emulate with 2 internal calls */
1589 
1590 		/*
1591 		 * We avoid code complexity by limiting the pure async.
1592 		 * case to be done using only a SW provider.
1593 		 * XXX - Redo the emulation code below so that we can
1594 		 * remove this limitation.
1595 		 */
1596 		if (cr != NULL && pd->pd_prov_type == CRYPTO_HW_PROVIDER) {
1597 			if ((kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr))
1598 			    != NULL))
1599 				goto retry;
1600 			if (list != NULL)
1601 				kcf_free_triedlist(list);
1602 			if (decr_kcf_context != NULL)
1603 				KCF_CONTEXT_REFRELE(decr_kcf_context);
1604 			KCF_PROV_REFRELE(pd);
1605 			return (CRYPTO_HOST_MEMORY);
1606 		}
1607 
1608 		if (ctx == NULL && pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
1609 			ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
1610 			if (ctx == NULL) {
1611 				if (list != NULL)
1612 					kcf_free_triedlist(list);
1613 				if (decr_kcf_context != NULL)
1614 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1615 				KCF_PROV_REFRELE(pd);
1616 				return (CRYPTO_HOST_MEMORY);
1617 			}
1618 			decr_kcf_context = (kcf_context_t *)
1619 			    ctx->cc_framework_private;
1620 		}
1621 		/*
1622 		 * Trade-off speed vs avoidance of code complexity and
1623 		 * duplication:
1624 		 * Could do all the combinations of fastpath / synch / asynch
1625 		 * for the decryption and the mac steps. Early attempts
1626 		 * showed the code grew wild and bug-prone, for little gain.
1627 		 * Therefore, the adaptative asynch case is not implemented.
1628 		 * It's either pure synchronous, or pure asynchronous.
1629 		 * We still preserve a fastpath for the pure synchronous
1630 		 * requests to SW providers.
1631 		 */
1632 		if (cr == NULL) {
1633 			crypto_context_t mac_context;
1634 
1635 			error = crypto_mac_init(mac_mech, mac_key, mac_tmpl,
1636 			    &mac_context, NULL);
1637 
1638 			if (error != CRYPTO_SUCCESS) {
1639 				/* Can't be CRYPTO_QUEUED. return the failure */
1640 				if (list != NULL)
1641 					kcf_free_triedlist(list);
1642 
1643 				if (decr_kcf_context != NULL)
1644 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1645 				return (error);
1646 			}
1647 			if (pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
1648 				crypto_mechanism_t lmech = *decr_mech;
1649 
1650 				lmech.cm_type = prov_decr_mechid;
1651 
1652 				error = KCF_PROV_DECRYPT_INIT(pd, ctx, &lmech,
1653 				    decr_key, spi_decr_tmpl,
1654 				    KCF_RHNDL(KM_SLEEP));
1655 			} else {
1656 				/*
1657 				 * If we did the 'goto retry' then ctx may not
1658 				 * be NULL.  In general, we can't reuse another
1659 				 * provider's context, so we free it now so
1660 				 * we don't leak it.
1661 				 */
1662 				if (ctx != NULL) {
1663 					KCF_CONTEXT_REFRELE((kcf_context_t *)
1664 					    ctx->cc_framework_private);
1665 					decr_kcf_context = NULL;
1666 				}
1667 				error = crypto_decrypt_init_prov(pd, pd->pd_sid,
1668 				    decr_mech, decr_key, &decr_tmpl,
1669 				    (crypto_context_t *)&ctx, NULL);
1670 
1671 				if (error == CRYPTO_SUCCESS) {
1672 					decr_kcf_context = (kcf_context_t *)
1673 					    ctx->cc_framework_private;
1674 				}
1675 			}
1676 
1677 			KCF_PROV_INCRSTATS(pd, error);
1678 
1679 			KCF_PROV_REFRELE(pd);
1680 
1681 			if (error != CRYPTO_SUCCESS) {
1682 				/* Can't be CRYPTO_QUEUED. return the failure */
1683 				if (list != NULL)
1684 					kcf_free_triedlist(list);
1685 				if (mac_kcf_context != NULL)
1686 					KCF_CONTEXT_REFRELE(mac_kcf_context);
1687 
1688 				return (error);
1689 			}
1690 			mac_kcf_context = (kcf_context_t *)
1691 			    ((crypto_ctx_t *)mac_context)->
1692 			    cc_framework_private;
1693 
1694 			decr_kcf_context = (kcf_context_t *)
1695 			    ctx->cc_framework_private;
1696 
1697 			/*
1698 			 * Here also, the mac context is second. The callback
1699 			 * case can't overwrite the context returned to
1700 			 * the caller.
1701 			 */
1702 			decr_kcf_context->kc_secondctx = mac_kcf_context;
1703 			KCF_CONTEXT_REFHOLD(mac_kcf_context);
1704 
1705 			*ctxp = (crypto_context_t)ctx;
1706 
1707 			return (error);
1708 		}
1709 		/* submit a pure asynchronous request. */
1710 		save_flag = cr->cr_flag;
1711 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
1712 
1713 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1714 		    pd->pd_sid, mac_key, decr_key, NULL, NULL, NULL,
1715 		    spi_mac_tmpl, spi_decr_tmpl);
1716 
1717 		mdops = &(params.rp_u.mac_decrypt_params);
1718 
1719 		/* careful! structs assignments */
1720 		mdops->md_decr_mech = *decr_mech;
1721 		/*
1722 		 * mdops->md_decr_mech.cm_type will be set when we get to
1723 		 * kcf_emulate_dual() routine.
1724 		 */
1725 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1726 		mdops->md_mac_mech = *mac_mech;
1727 
1728 		/*
1729 		 * mdops->md_mac_mech.cm_type will be set when we know the
1730 		 * MAC provider.
1731 		 */
1732 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1733 
1734 		/*
1735 		 * non-NULL ctx->kc_secondctx tells common_submit_request
1736 		 * that this request uses separate cipher and MAC contexts.
1737 		 * That function will set the MAC context's kc_secondctx to
1738 		 * this decrypt context.
1739 		 */
1740 		decr_kcf_context->kc_secondctx = decr_kcf_context;
1741 
1742 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1743 
1744 		cr->cr_flag = save_flag;
1745 
1746 		if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
1747 			KCF_CONTEXT_REFRELE(decr_kcf_context);
1748 		}
1749 		if (list != NULL)
1750 			kcf_free_triedlist(list);
1751 		*ctxp =  ctx;
1752 		KCF_PROV_REFRELE(pd);
1753 		return (error);
1754 	}
1755 
1756 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1757 		if ((mac_tmpl != NULL) &&
1758 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
1759 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1760 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1761 
1762 				if (list != NULL)
1763 					kcf_free_triedlist(list);
1764 
1765 				KCF_PROV_REFRELE(pd);
1766 				/* Which one is the the old one ? */
1767 				return (CRYPTO_OLD_CTX_TEMPLATE);
1768 			}
1769 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1770 		}
1771 		mac_tmpl_checked = B_TRUE;
1772 	}
1773 
1774 	if (ctx == NULL) {
1775 		ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
1776 		if (ctx == NULL) {
1777 			error = CRYPTO_HOST_MEMORY;
1778 			if (list != NULL)
1779 				kcf_free_triedlist(list);
1780 			return (CRYPTO_HOST_MEMORY);
1781 		}
1782 		decr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
1783 	}
1784 
1785 	/* The fast path for SW providers. */
1786 	if (CHECK_FASTPATH(cr, pd)) {
1787 		crypto_mechanism_t ldecr_mech;
1788 		crypto_mechanism_t lmac_mech;
1789 
1790 		/* careful! structs assignments */
1791 		ldecr_mech = *decr_mech;
1792 		ldecr_mech.cm_type = prov_decr_mechid;
1793 		lmac_mech = *mac_mech;
1794 		lmac_mech.cm_type = prov_mac_mechid;
1795 
1796 		error = KCF_PROV_MAC_DECRYPT_INIT(pd, ctx, &lmac_mech,
1797 		    mac_key, &ldecr_mech, decr_key, spi_mac_tmpl, spi_decr_tmpl,
1798 		    KCF_SWFP_RHNDL(cr));
1799 
1800 		KCF_PROV_INCRSTATS(pd, error);
1801 	} else {
1802 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1803 		    pd->pd_sid, mac_key, decr_key, NULL, NULL, NULL,
1804 		    spi_mac_tmpl, spi_decr_tmpl);
1805 
1806 		mdops = &(params.rp_u.mac_decrypt_params);
1807 
1808 		/* careful! structs assignments */
1809 		mdops->md_decr_mech = *decr_mech;
1810 		mdops->md_decr_mech.cm_type = prov_decr_mechid;
1811 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1812 		mdops->md_mac_mech = *mac_mech;
1813 		mdops->md_mac_mech.cm_type = prov_mac_mechid;
1814 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1815 
1816 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1817 	}
1818 
1819 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
1820 		if ((IS_RECOVERABLE(error)) &&
1821 		    (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr)) != NULL))
1822 			goto retry;
1823 
1824 		KCF_CONTEXT_REFRELE(decr_kcf_context);
1825 	} else
1826 		*ctxp = (crypto_context_t)ctx;
1827 
1828 	if (list != NULL)
1829 		kcf_free_triedlist(list);
1830 
1831 	KCF_PROV_REFRELE(pd);
1832 	return (error);
1833 }
1834 
1835 int
crypto_mac_decrypt_init_prov(crypto_provider_t provider,crypto_session_id_t sid,crypto_mechanism_t * mac_mech,crypto_mechanism_t * decr_mech,crypto_key_t * mac_key,crypto_key_t * decr_key,crypto_ctx_template_t mac_tmpl,crypto_ctx_template_t decr_tmpl,crypto_context_t * ctxp,crypto_call_req_t * cr)1836 crypto_mac_decrypt_init_prov(crypto_provider_t provider,
1837     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1838     crypto_mechanism_t *decr_mech, crypto_key_t *mac_key,
1839     crypto_key_t *decr_key, crypto_ctx_template_t mac_tmpl,
1840     crypto_ctx_template_t decr_tmpl, crypto_context_t *ctxp,
1841     crypto_call_req_t *cr)
1842 {
1843 	/*
1844 	 * First try to find a provider for the decryption mechanism, that
1845 	 * is also capable of the MAC mechanism.
1846 	 * We still favor optimizing the costlier decryption.
1847 	 */
1848 	int rv;
1849 	kcf_mech_entry_t *me;
1850 	kcf_provider_desc_t *pd = provider;
1851 	kcf_provider_desc_t *real_provider = pd;
1852 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1853 	kcf_req_params_t params;
1854 	kcf_mac_decrypt_ops_params_t *mdops;
1855 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1856 	crypto_ctx_t *ctx;
1857 	kcf_context_t *decr_kcf_context = NULL;
1858 
1859 	ASSERT(KCF_PROV_REFHELD(pd));
1860 
1861 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
1862 		rv = kcf_get_hardware_provider(decr_mech->cm_type, decr_key,
1863 		    mac_mech->cm_type, mac_key, pd, &real_provider,
1864 		    CRYPTO_FG_MAC_DECRYPT);
1865 
1866 		if (rv != CRYPTO_SUCCESS)
1867 			return (rv);
1868 	}
1869 
1870 	/*
1871 	 * For SW providers, check the validity of the context template
1872 	 * It is very rare that the generation number mis-matches, so
1873 	 * is acceptable to fail here, and let the consumer recover by
1874 	 * freeing this tmpl and create a new one for the key and new SW
1875 	 * provider
1876 	 * Warning! will need to change when multiple software providers
1877 	 * per mechanism are supported.
1878 	 */
1879 
1880 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
1881 		if (decr_tmpl != NULL) {
1882 			if (kcf_get_mech_entry(decr_mech->cm_type, &me) !=
1883 			    KCF_SUCCESS) {
1884 				rv = CRYPTO_MECHANISM_INVALID;
1885 				goto out;
1886 			}
1887 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1888 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1889 				rv = CRYPTO_OLD_CTX_TEMPLATE;
1890 				goto out;
1891 			}
1892 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1893 		}
1894 
1895 		if (mac_tmpl != NULL) {
1896 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
1897 			    KCF_SUCCESS) {
1898 				rv = CRYPTO_MECHANISM_INVALID;
1899 				goto out;
1900 			}
1901 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1902 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1903 				rv = CRYPTO_OLD_CTX_TEMPLATE;
1904 				goto out;
1905 			}
1906 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1907 		}
1908 	}
1909 
1910 	ctx = kcf_new_ctx(cr, real_provider, sid);
1911 	if (ctx == NULL) {
1912 		rv = CRYPTO_HOST_MEMORY;
1913 		goto out;
1914 	}
1915 	decr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
1916 
1917 	/* The fast path for SW providers. */
1918 	if (CHECK_FASTPATH(cr, pd)) {
1919 		crypto_mechanism_t ldecr_mech;
1920 		crypto_mechanism_t lmac_mech;
1921 
1922 		/* careful! structs assignments */
1923 		ldecr_mech = *decr_mech;
1924 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1925 		    &ldecr_mech);
1926 
1927 		lmac_mech = *mac_mech;
1928 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1929 		    &lmac_mech);
1930 
1931 		rv = KCF_PROV_MAC_DECRYPT_INIT(real_provider, ctx, &lmac_mech,
1932 		    mac_key, &ldecr_mech, decr_key, spi_mac_tmpl, spi_decr_tmpl,
1933 		    KCF_SWFP_RHNDL(cr));
1934 
1935 		KCF_PROV_INCRSTATS(pd, rv);
1936 	} else {
1937 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1938 		    sid, mac_key, decr_key, NULL, NULL, NULL,
1939 		    spi_mac_tmpl, spi_decr_tmpl);
1940 
1941 		mdops = &(params.rp_u.mac_decrypt_params);
1942 
1943 		/* careful! structs assignments */
1944 		mdops->md_decr_mech = *decr_mech;
1945 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1946 		    &mdops->md_decr_mech);
1947 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1948 
1949 		mdops->md_mac_mech = *mac_mech;
1950 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1951 		    &mdops->md_mac_mech);
1952 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1953 
1954 		rv = kcf_submit_request(real_provider, ctx, cr, &params,
1955 		    B_FALSE);
1956 	}
1957 
1958 	if (rv != CRYPTO_SUCCESS && rv != CRYPTO_QUEUED) {
1959 		KCF_CONTEXT_REFRELE(decr_kcf_context);
1960 	} else
1961 		*ctxp = (crypto_context_t)ctx;
1962 
1963 out:
1964 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
1965 		KCF_PROV_REFRELE(real_provider);
1966 	return (rv);
1967 }
1968 /*
1969  * Continues a multi-part dual mac/decrypt operation.
1970  */
1971 /* ARGSUSED */
1972 int
crypto_mac_decrypt_update(crypto_context_t context,crypto_dual_data_t * ct,crypto_data_t * pt,crypto_call_req_t * cr)1973 crypto_mac_decrypt_update(crypto_context_t context,
1974     crypto_dual_data_t *ct, crypto_data_t *pt, crypto_call_req_t *cr)
1975 {
1976 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
1977 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
1978 	kcf_provider_desc_t *pd;
1979 	int error;
1980 	kcf_req_params_t params;
1981 
1982 	if ((ctx == NULL) ||
1983 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
1984 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
1985 		return (CRYPTO_INVALID_CONTEXT);
1986 	}
1987 
1988 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
1989 
1990 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
1991 		off_t save_offset;
1992 		size_t save_len;
1993 		crypto_call_flag_t save_flag;
1994 
1995 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
1996 			error = CRYPTO_INVALID_CONTEXT;
1997 			goto out;
1998 		}
1999 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
2000 
2001 		/* First we submit the MAC request */
2002 		if (cr == NULL) {
2003 			/*
2004 			 * 'ct' is always not NULL.
2005 			 */
2006 			error = crypto_mac_update((crypto_context_t)mac_ctx,
2007 			    (crypto_data_t *)ct, NULL);
2008 
2009 			if (error != CRYPTO_SUCCESS)
2010 				goto out;
2011 
2012 			/* Decrypt a different length only when told so */
2013 
2014 			save_offset = ct->dd_offset1;
2015 			save_len = ct->dd_len1;
2016 
2017 			if (ct->dd_len2 > 0) {
2018 				ct->dd_offset1 = ct->dd_offset2;
2019 				ct->dd_len1 = ct->dd_len2;
2020 			}
2021 
2022 			error = crypto_decrypt_update(context,
2023 			    (crypto_data_t *)ct, pt, NULL);
2024 
2025 			ct->dd_offset1 = save_offset;
2026 			ct->dd_len1 = save_len;
2027 
2028 			goto out;
2029 		}
2030 		/* submit a pure asynchronous request. */
2031 		save_flag = cr->cr_flag;
2032 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
2033 
2034 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_UPDATE,
2035 		    pd->pd_sid, NULL, NULL, ct, NULL, pt, NULL, NULL)
2036 
2037 
2038 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2039 
2040 		cr->cr_flag = save_flag;
2041 		goto out;
2042 	}
2043 
2044 	/* The fast path for SW providers. */
2045 	if (CHECK_FASTPATH(cr, pd)) {
2046 		error = KCF_PROV_MAC_DECRYPT_UPDATE(pd, ctx, ct, pt, NULL);
2047 		KCF_PROV_INCRSTATS(pd, error);
2048 	} else {
2049 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_UPDATE,
2050 		    ctx->cc_session, NULL, NULL, ct, NULL, pt, NULL, NULL);
2051 
2052 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2053 	}
2054 out:
2055 	return (error);
2056 }
2057 
2058 /*
2059  * Terminates a multi-part dual mac/decrypt operation.
2060  */
2061 /* ARGSUSED */
2062 int
crypto_mac_decrypt_final(crypto_context_t context,crypto_data_t * mac,crypto_data_t * pt,crypto_call_req_t * cr)2063 crypto_mac_decrypt_final(crypto_context_t context, crypto_data_t *mac,
2064     crypto_data_t *pt, crypto_call_req_t *cr)
2065 {
2066 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
2067 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
2068 	kcf_provider_desc_t *pd;
2069 	int error;
2070 	kcf_req_params_t params;
2071 
2072 	if ((ctx == NULL) ||
2073 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
2074 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
2075 		return (CRYPTO_INVALID_CONTEXT);
2076 	}
2077 
2078 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
2079 
2080 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
2081 		crypto_call_flag_t save_flag;
2082 
2083 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
2084 			error = CRYPTO_INVALID_CONTEXT;
2085 			goto out;
2086 		}
2087 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
2088 
2089 		/* First we collect the MAC */
2090 		if (cr == NULL) {
2091 
2092 			error = crypto_mac_final((crypto_context_t)mac_ctx,
2093 			    mac, NULL);
2094 
2095 			if (error != CRYPTO_SUCCESS) {
2096 				crypto_cancel_ctx(ctx);
2097 			} else {
2098 				/* Get the last chunk of plaintext */
2099 				error = crypto_decrypt_final(context, pt, NULL);
2100 			}
2101 
2102 			return (error);
2103 		}
2104 		/* submit a pure asynchronous request. */
2105 		save_flag = cr->cr_flag;
2106 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
2107 
2108 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_FINAL,
2109 		    pd->pd_sid, NULL, NULL, NULL, mac, pt, NULL, NULL)
2110 
2111 
2112 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2113 
2114 		cr->cr_flag = save_flag;
2115 
2116 		return (error);
2117 	}
2118 
2119 	/* The fast path for SW providers. */
2120 	if (CHECK_FASTPATH(cr, pd)) {
2121 		error = KCF_PROV_MAC_DECRYPT_FINAL(pd, ctx, mac, pt, NULL);
2122 		KCF_PROV_INCRSTATS(pd, error);
2123 	} else {
2124 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_FINAL,
2125 		    ctx->cc_session, NULL, NULL, NULL, mac, pt, NULL, NULL);
2126 
2127 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2128 	}
2129 out:
2130 	/* Release the hold done in kcf_new_ctx() during init step. */
2131 	KCF_CONTEXT_COND_RELEASE(error, kcf_ctx);
2132 	return (error);
2133 }
2134 
2135 /*
2136  * Digest/Encrypt dual operation. Project-private entry point, not part of
2137  * the k-API.
2138  */
2139 /* ARGSUSED */
2140 int
crypto_digest_encrypt_update(crypto_context_t digest_ctx,crypto_context_t encrypt_ctx,crypto_data_t * plaintext,crypto_data_t * ciphertext,crypto_call_req_t * crq)2141 crypto_digest_encrypt_update(crypto_context_t digest_ctx,
2142     crypto_context_t encrypt_ctx, crypto_data_t *plaintext,
2143     crypto_data_t *ciphertext, crypto_call_req_t *crq)
2144 {
2145 	/*
2146 	 * RFE 4688647:
2147 	 * core functions needed by ioctl interface missing from impl.h
2148 	 */
2149 	return (CRYPTO_NOT_SUPPORTED);
2150 }
2151 
2152 /*
2153  * Decrypt/Digest dual operation. Project-private entry point, not part of
2154  * the k-API.
2155  */
2156 /* ARGSUSED */
2157 int
crypto_decrypt_digest_update(crypto_context_t decryptctx,crypto_context_t encrypt_ctx,crypto_data_t * ciphertext,crypto_data_t * plaintext,crypto_call_req_t * crq)2158 crypto_decrypt_digest_update(crypto_context_t decryptctx,
2159     crypto_context_t encrypt_ctx, crypto_data_t *ciphertext,
2160     crypto_data_t *plaintext, crypto_call_req_t *crq)
2161 {
2162 	/*
2163 	 * RFE 4688647:
2164 	 * core functions needed by ioctl interface missing from impl.h
2165 	 */
2166 	return (CRYPTO_NOT_SUPPORTED);
2167 }
2168 
2169 /*
2170  * Sign/Encrypt dual operation. Project-private entry point, not part of
2171  * the k-API.
2172  */
2173 /* ARGSUSED */
2174 int
crypto_sign_encrypt_update(crypto_context_t sign_ctx,crypto_context_t encrypt_ctx,crypto_data_t * plaintext,crypto_data_t * ciphertext,crypto_call_req_t * crq)2175 crypto_sign_encrypt_update(crypto_context_t sign_ctx,
2176     crypto_context_t encrypt_ctx, crypto_data_t *plaintext,
2177     crypto_data_t *ciphertext, crypto_call_req_t *crq)
2178 {
2179 	/*
2180 	 * RFE 4688647:
2181 	 * core functions needed by ioctl interface missing from impl.h
2182 	 */
2183 	return (CRYPTO_NOT_SUPPORTED);
2184 }
2185 
2186 /*
2187  * Decrypt/Verify dual operation. Project-private entry point, not part of
2188  * the k-API.
2189  */
2190 /* ARGSUSED */
2191 int
crypto_decrypt_verify_update(crypto_context_t decrypt_ctx,crypto_context_t verify_ctx,crypto_data_t * ciphertext,crypto_data_t * plaintext,crypto_call_req_t * crq)2192 crypto_decrypt_verify_update(crypto_context_t decrypt_ctx,
2193     crypto_context_t verify_ctx, crypto_data_t *ciphertext,
2194     crypto_data_t *plaintext, crypto_call_req_t *crq)
2195 {
2196 	/*
2197 	 * RFE 4688647:
2198 	 * core functions needed by ioctl interface missing from impl.h
2199 	 */
2200 	return (CRYPTO_NOT_SUPPORTED);
2201 }
2202