xref: /titanic_51/usr/src/uts/common/crypto/api/kcf_dual.c (revision c8e880c1386b032ac975c61826ba3bc0d8dce5ac)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2007 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 #pragma ident	"%Z%%M%	%I%	%E% SMI"
27 
28 #include <sys/errno.h>
29 #include <sys/types.h>
30 #include <sys/kmem.h>
31 #include <sys/sysmacros.h>
32 #include <sys/crypto/common.h>
33 #include <sys/crypto/impl.h>
34 #include <sys/crypto/api.h>
35 #include <sys/crypto/spi.h>
36 #include <sys/crypto/sched_impl.h>
37 
38 #define	CRYPTO_OPS_OFFSET(f)		offsetof(crypto_ops_t, co_##f)
39 #define	CRYPTO_CIPHER_MAC_OFFSET(f) offsetof(crypto_dual_cipher_mac_ops_t, f)
40 
41 static int crypto_mac_decrypt_common(crypto_mechanism_t *,
42     crypto_mechanism_t *, crypto_dual_data_t *, crypto_key_t *, crypto_key_t *,
43     crypto_ctx_template_t, crypto_ctx_template_t, crypto_data_t *,
44     crypto_data_t *, crypto_call_req_t *, boolean_t);
45 
46 static int crypto_mac_decrypt_common_prov(crypto_provider_t provider,
47     crypto_session_id_t sid, crypto_mechanism_t *, crypto_mechanism_t *,
48     crypto_dual_data_t *, crypto_key_t *, crypto_key_t *,
49     crypto_ctx_template_t, crypto_ctx_template_t, crypto_data_t *,
50     crypto_data_t *, crypto_call_req_t *, boolean_t);
51 
52 int
53 crypto_encrypt_mac_prov(crypto_provider_t provider, crypto_session_id_t sid,
54     crypto_mechanism_t *encr_mech, crypto_mechanism_t *mac_mech,
55     crypto_data_t *pt, crypto_key_t *encr_key, crypto_key_t *mac_key,
56     crypto_ctx_template_t encr_tmpl, crypto_ctx_template_t mac_tmpl,
57     crypto_dual_data_t *ct, crypto_data_t *mac, crypto_call_req_t *crq)
58 {
59 	/*
60 	 * First try to find a provider for the encryption mechanism, that
61 	 * is also capable of the MAC mechanism.
62 	 */
63 	int rv;
64 	kcf_mech_entry_t *me;
65 	kcf_provider_desc_t *pd = provider;
66 	kcf_provider_desc_t *real_provider = pd;
67 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
68 	kcf_req_params_t params;
69 	kcf_encrypt_mac_ops_params_t *cmops;
70 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
71 
72 	ASSERT(KCF_PROV_REFHELD(pd));
73 
74 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
75 		rv = kcf_get_hardware_provider(encr_mech->cm_type,
76 		    mac_mech->cm_type, CHECK_RESTRICT(crq), pd,
77 		    &real_provider, CRYPTO_FG_ENCRYPT_MAC_ATOMIC);
78 
79 		if (rv != CRYPTO_SUCCESS)
80 			return (rv);
81 	}
82 
83 	/*
84 	 * For SW providers, check the validity of the context template
85 	 * It is very rare that the generation number mis-matches, so
86 	 * is acceptable to fail here, and let the consumer recover by
87 	 * freeing this tmpl and create a new one for the key and new SW
88 	 * provider
89 	 * Warning! will need to change when multiple software providers
90 	 * per mechanism are supported.
91 	 */
92 
93 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
94 		if (encr_tmpl != NULL) {
95 			if (kcf_get_mech_entry(encr_mech->cm_type, &me) !=
96 			    KCF_SUCCESS) {
97 				rv = CRYPTO_MECHANISM_INVALID;
98 				goto out;
99 			}
100 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
101 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
102 				rv = CRYPTO_OLD_CTX_TEMPLATE;
103 				goto out;
104 			}
105 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
106 		}
107 
108 		if (mac_tmpl != NULL) {
109 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
110 			    KCF_SUCCESS) {
111 				rv = CRYPTO_MECHANISM_INVALID;
112 				goto out;
113 			}
114 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
115 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
116 				rv = CRYPTO_OLD_CTX_TEMPLATE;
117 				goto out;
118 			}
119 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
120 		}
121 	}
122 
123 	/* The fast path for SW providers. */
124 	if (CHECK_FASTPATH(crq, real_provider)) {
125 		crypto_mechanism_t lencr_mech;
126 		crypto_mechanism_t lmac_mech;
127 
128 		/* careful! structs assignments */
129 		lencr_mech = *encr_mech;
130 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
131 		    &lencr_mech);
132 
133 		lmac_mech = *mac_mech;
134 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
135 		    &lmac_mech);
136 
137 		rv = KCF_PROV_ENCRYPT_MAC_ATOMIC(real_provider, sid,
138 		    &lencr_mech, encr_key, &lmac_mech, mac_key, pt, ct,
139 		    mac, spi_encr_tmpl, spi_mac_tmpl, KCF_SWFP_RHNDL(crq));
140 
141 		KCF_PROV_INCRSTATS(pd, rv);
142 	} else {
143 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_ATOMIC,
144 		    sid, encr_key, mac_key, pt, ct, mac, spi_encr_tmpl,
145 		    spi_mac_tmpl);
146 
147 		cmops = &(params.rp_u.encrypt_mac_params);
148 
149 		/* careful! structs assignments */
150 		cmops->em_encr_mech = *encr_mech;
151 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
152 		    &cmops->em_encr_mech);
153 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
154 
155 		cmops->em_mac_mech = *mac_mech;
156 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
157 		    &cmops->em_mac_mech);
158 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
159 
160 		rv = kcf_submit_request(real_provider, NULL, crq, &params,
161 		    B_FALSE);
162 	}
163 
164 out:
165 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
166 		KCF_PROV_REFRELE(real_provider);
167 	return (rv);
168 }
169 
170 /*
171  * Performs a dual encrypt/mac atomic operation. The provider and session
172  * to use are determined by the KCF dispatcher.
173  */
174 int
175 crypto_encrypt_mac(crypto_mechanism_t *encr_mech,
176     crypto_mechanism_t *mac_mech, crypto_data_t *pt,
177     crypto_key_t *encr_key, crypto_key_t *mac_key,
178     crypto_ctx_template_t encr_tmpl, crypto_ctx_template_t mac_tmpl,
179     crypto_dual_data_t *ct, crypto_data_t *mac, crypto_call_req_t *crq)
180 {
181 	/*
182 	 * First try to find a provider for the encryption mechanism, that
183 	 * is also capable of the MAC mechanism.
184 	 */
185 	int error;
186 	kcf_mech_entry_t *me;
187 	kcf_provider_desc_t *pd;
188 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
189 	kcf_req_params_t params;
190 	kcf_encrypt_mac_ops_params_t *cmops;
191 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
192 	crypto_mech_type_t prov_encr_mechid, prov_mac_mechid;
193 	kcf_prov_tried_t *list = NULL;
194 	boolean_t encr_tmpl_checked = B_FALSE;
195 	boolean_t mac_tmpl_checked = B_FALSE;
196 	kcf_dual_req_t *next_req = NULL;
197 
198 retry:
199 	/* pd is returned held on success */
200 	pd = kcf_get_dual_provider(encr_mech, mac_mech, &me, &prov_encr_mechid,
201 	    &prov_mac_mechid, &error, list,
202 	    CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_ENCRYPT_MAC_ATOMIC,
203 	    CRYPTO_FG_MAC_ATOMIC | CRYPTO_FG_ENCRYPT_MAC_ATOMIC,
204 	    CHECK_RESTRICT(crq), ct->dd_len1);
205 	if (pd == NULL) {
206 		if (list != NULL)
207 			kcf_free_triedlist(list);
208 		if (next_req != NULL)
209 			kmem_free(next_req, sizeof (kcf_dual_req_t));
210 		return (error);
211 	}
212 
213 	/*
214 	 * For SW providers, check the validity of the context template
215 	 * It is very rare that the generation number mis-matches, so
216 	 * is acceptable to fail here, and let the consumer recover by
217 	 * freeing this tmpl and create a new one for the key and new SW
218 	 * provider
219 	 * Warning! will need to change when multiple software providers
220 	 * per mechanism are supported.
221 	 */
222 
223 	if ((!encr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
224 		if (encr_tmpl != NULL) {
225 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
226 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
227 
228 				if (next_req != NULL)
229 					kmem_free(next_req,
230 					    sizeof (kcf_dual_req_t));
231 				if (list != NULL)
232 					kcf_free_triedlist(list);
233 
234 				KCF_PROV_REFRELE(pd);
235 				/* Which one is the the old one ? */
236 				return (CRYPTO_OLD_CTX_TEMPLATE);
237 			}
238 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
239 		}
240 		encr_tmpl_checked = B_TRUE;
241 	}
242 
243 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
244 		crypto_call_req_t encr_req;
245 
246 		/* Need to emulate with 2 internal calls */
247 		/* Allocate and initialize the MAC req for the callback */
248 
249 		if (crq != NULL) {
250 			if (next_req == NULL) {
251 				next_req = kcf_alloc_req(crq);
252 
253 				if (next_req == NULL) {
254 					KCF_PROV_REFRELE(pd);
255 					if (list != NULL)
256 						kcf_free_triedlist(list);
257 					return (CRYPTO_HOST_MEMORY);
258 				}
259 				/*
260 				 * Careful! we're wrapping-in mac_tmpl instead
261 				 * of an spi_mac_tmpl. The callback routine will
262 				 * have to validate mac_tmpl, and use the
263 				 * mac_ctx_tmpl, once it picks a MAC provider.
264 				 */
265 				KCF_WRAP_MAC_OPS_PARAMS(&(next_req->kr_params),
266 				    KCF_OP_ATOMIC, NULL, mac_mech, mac_key,
267 				    (crypto_data_t *)ct, mac, mac_tmpl);
268 			}
269 
270 			encr_req.cr_flag = crq->cr_flag;
271 			encr_req.cr_callback_func = kcf_next_req;
272 			encr_req.cr_callback_arg = next_req;
273 		}
274 
275 		if (pt == NULL) {
276 			KCF_WRAP_ENCRYPT_OPS_PARAMS(&params, KCF_OP_ATOMIC,
277 			    pd->pd_sid, encr_mech, encr_key,
278 			    (crypto_data_t *)ct, NULL, spi_encr_tmpl);
279 		} else {
280 			KCF_WRAP_ENCRYPT_OPS_PARAMS(&params, KCF_OP_ATOMIC,
281 			    pd->pd_sid, encr_mech, encr_key, pt,
282 			    (crypto_data_t *)ct, spi_encr_tmpl);
283 		}
284 
285 		error = kcf_submit_request(pd, NULL, (crq == NULL) ? NULL :
286 		    &encr_req, &params, B_TRUE);
287 
288 		switch (error) {
289 		case CRYPTO_SUCCESS: {
290 			off_t saveoffset;
291 			size_t savelen;
292 
293 			/*
294 			 * The encryption step is done. Reuse the encr_req
295 			 * for submitting the MAC step.
296 			 */
297 			if (next_req == NULL) {
298 				saveoffset = ct->dd_offset1;
299 				savelen = ct->dd_len1;
300 			} else {
301 				saveoffset = next_req->kr_saveoffset =
302 				    ct->dd_offset1;
303 				savelen = next_req->kr_savelen = ct->dd_len1;
304 				encr_req.cr_callback_func = kcf_last_req;
305 			}
306 
307 			ct->dd_offset1 = ct->dd_offset2;
308 			ct->dd_len1 = ct->dd_len2;
309 
310 			error = crypto_mac(mac_mech, (crypto_data_t *)ct,
311 			    mac_key, mac_tmpl, mac, (crq == NULL) ? NULL :
312 			    &encr_req);
313 
314 			if (error != CRYPTO_QUEUED) {
315 				ct->dd_offset1 = saveoffset;
316 				ct->dd_len1 = savelen;
317 			}
318 			break;
319 		}
320 
321 		case CRYPTO_QUEUED:
322 			if ((crq != NULL) &&
323 			    !(crq->cr_flag & CRYPTO_SKIP_REQID))
324 				crq->cr_reqid = encr_req.cr_reqid;
325 			break;
326 
327 		default:
328 
329 			/* Add pd to the linked list of providers tried. */
330 			if (IS_RECOVERABLE(error)) {
331 				if (kcf_insert_triedlist(&list, pd,
332 				    KCF_KMFLAG(crq)) != NULL)
333 					goto retry;
334 			}
335 		}
336 		if (error != CRYPTO_QUEUED && next_req != NULL)
337 			kmem_free(next_req, sizeof (kcf_dual_req_t));
338 		if (list != NULL)
339 			kcf_free_triedlist(list);
340 		KCF_PROV_REFRELE(pd);
341 		return (error);
342 	}
343 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
344 		if ((mac_tmpl != NULL) &&
345 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
346 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
347 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
348 
349 				if (next_req != NULL)
350 					kmem_free(next_req,
351 					    sizeof (kcf_dual_req_t));
352 				if (list != NULL)
353 					kcf_free_triedlist(list);
354 
355 				KCF_PROV_REFRELE(pd);
356 				/* Which one is the the old one ? */
357 				return (CRYPTO_OLD_CTX_TEMPLATE);
358 			}
359 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
360 		}
361 		mac_tmpl_checked = B_TRUE;
362 	}
363 
364 	/* The fast path for SW providers. */
365 	if (CHECK_FASTPATH(crq, pd)) {
366 		crypto_mechanism_t lencr_mech;
367 		crypto_mechanism_t lmac_mech;
368 
369 		/* careful! structs assignments */
370 		lencr_mech = *encr_mech;
371 		lencr_mech.cm_type = prov_encr_mechid;
372 		lmac_mech = *mac_mech;
373 		lmac_mech.cm_type = prov_mac_mechid;
374 
375 		error = KCF_PROV_ENCRYPT_MAC_ATOMIC(pd, pd->pd_sid,
376 		    &lencr_mech, encr_key, &lmac_mech, mac_key, pt, ct,
377 		    mac, spi_encr_tmpl, spi_mac_tmpl, KCF_SWFP_RHNDL(crq));
378 
379 		KCF_PROV_INCRSTATS(pd, error);
380 	} else {
381 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_ATOMIC,
382 		    pd->pd_sid, encr_key, mac_key, pt, ct, mac, spi_encr_tmpl,
383 		    spi_mac_tmpl);
384 
385 		cmops = &(params.rp_u.encrypt_mac_params);
386 
387 		/* careful! structs assignments */
388 		cmops->em_encr_mech = *encr_mech;
389 		cmops->em_encr_mech.cm_type = prov_encr_mechid;
390 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
391 		cmops->em_mac_mech = *mac_mech;
392 		cmops->em_mac_mech.cm_type = prov_mac_mechid;
393 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
394 
395 		error = kcf_submit_request(pd, NULL, crq, &params, B_FALSE);
396 	}
397 
398 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED &&
399 	    IS_RECOVERABLE(error)) {
400 		/* Add pd to the linked list of providers tried. */
401 		if (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(crq)) != NULL)
402 			goto retry;
403 	}
404 
405 	if (next_req != NULL)
406 		kmem_free(next_req, sizeof (kcf_dual_req_t));
407 
408 	if (list != NULL)
409 		kcf_free_triedlist(list);
410 
411 	KCF_PROV_REFRELE(pd);
412 	return (error);
413 }
414 
415 int
416 crypto_encrypt_mac_init_prov(crypto_provider_t provider,
417     crypto_session_id_t sid, crypto_mechanism_t *encr_mech,
418     crypto_mechanism_t *mac_mech, crypto_key_t *encr_key,
419     crypto_key_t *mac_key, crypto_ctx_template_t encr_tmpl,
420     crypto_ctx_template_t mac_tmpl, crypto_context_t *ctxp,
421     crypto_call_req_t *cr)
422 {
423 	/*
424 	 * First try to find a provider for the encryption mechanism, that
425 	 * is also capable of the MAC mechanism.
426 	 */
427 	int rv;
428 	kcf_mech_entry_t *me;
429 	kcf_provider_desc_t *pd = provider;
430 	kcf_provider_desc_t *real_provider = pd;
431 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
432 	kcf_req_params_t params;
433 	kcf_encrypt_mac_ops_params_t *cmops;
434 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
435 	crypto_ctx_t *ctx;
436 	kcf_context_t *encr_kcf_context = NULL;
437 
438 	ASSERT(KCF_PROV_REFHELD(pd));
439 
440 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
441 		rv = kcf_get_hardware_provider(encr_mech->cm_type,
442 		    mac_mech->cm_type, CHECK_RESTRICT(cr), pd, &real_provider,
443 		    CRYPTO_FG_ENCRYPT_MAC);
444 
445 		if (rv != CRYPTO_SUCCESS)
446 			return (rv);
447 	}
448 
449 	/*
450 	 * For SW providers, check the validity of the context template
451 	 * It is very rare that the generation number mis-matches, so
452 	 * is acceptable to fail here, and let the consumer recover by
453 	 * freeing this tmpl and create a new one for the key and new SW
454 	 * provider
455 	 * Warning! will need to change when multiple software providers
456 	 * per mechanism are supported.
457 	 */
458 
459 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
460 		if (encr_tmpl != NULL) {
461 			if (kcf_get_mech_entry(encr_mech->cm_type, &me) !=
462 			    KCF_SUCCESS) {
463 				rv = CRYPTO_MECHANISM_INVALID;
464 				goto out;
465 			}
466 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
467 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
468 				rv = CRYPTO_OLD_CTX_TEMPLATE;
469 				goto out;
470 			}
471 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
472 		}
473 
474 		if (mac_tmpl != NULL) {
475 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
476 			    KCF_SUCCESS) {
477 				rv = CRYPTO_MECHANISM_INVALID;
478 				goto out;
479 			}
480 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
481 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
482 				rv = CRYPTO_OLD_CTX_TEMPLATE;
483 				goto out;
484 			}
485 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
486 		}
487 	}
488 
489 	ctx = kcf_new_ctx(cr, real_provider, sid);
490 	if (ctx == NULL) {
491 		rv = CRYPTO_HOST_MEMORY;
492 		goto out;
493 	}
494 	encr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
495 
496 	/* The fast path for SW providers. */
497 	if (CHECK_FASTPATH(cr, real_provider)) {
498 		crypto_mechanism_t lencr_mech;
499 		crypto_mechanism_t lmac_mech;
500 
501 		/* careful! structs assignments */
502 		lencr_mech = *encr_mech;
503 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
504 		    &lencr_mech);
505 
506 		lmac_mech = *mac_mech;
507 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
508 		    &lmac_mech);
509 
510 		rv = KCF_PROV_ENCRYPT_MAC_INIT(real_provider, ctx, &lencr_mech,
511 		    encr_key, &lmac_mech, mac_key, spi_encr_tmpl, spi_mac_tmpl,
512 		    KCF_SWFP_RHNDL(cr));
513 
514 		KCF_PROV_INCRSTATS(pd, rv);
515 	} else {
516 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
517 		    sid, encr_key, mac_key, NULL, NULL, NULL,
518 		    spi_encr_tmpl, spi_mac_tmpl);
519 
520 		cmops = &(params.rp_u.encrypt_mac_params);
521 
522 		/* careful! structs assignments */
523 		cmops->em_encr_mech = *encr_mech;
524 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
525 		    &cmops->em_encr_mech);
526 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
527 
528 		cmops->em_mac_mech = *mac_mech;
529 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
530 		    &cmops->em_mac_mech);
531 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
532 
533 		rv = kcf_submit_request(real_provider, ctx, cr, &params,
534 		    B_FALSE);
535 	}
536 
537 	if (rv != CRYPTO_SUCCESS && rv != CRYPTO_QUEUED) {
538 		KCF_CONTEXT_REFRELE(encr_kcf_context);
539 	} else
540 		*ctxp = (crypto_context_t)ctx;
541 
542 out:
543 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
544 		KCF_PROV_REFRELE(real_provider);
545 	return (rv);
546 }
547 
548 /*
549  * Starts a multi-part dual encrypt/mac operation. The provider and session
550  * to use are determined by the KCF dispatcher.
551  */
552 /* ARGSUSED */
553 int
554 crypto_encrypt_mac_init(crypto_mechanism_t *encr_mech,
555     crypto_mechanism_t *mac_mech, crypto_key_t *encr_key,
556     crypto_key_t *mac_key, crypto_ctx_template_t encr_tmpl,
557     crypto_ctx_template_t mac_tmpl, crypto_context_t *ctxp,
558     crypto_call_req_t *cr)
559 {
560 	/*
561 	 * First try to find a provider for the encryption mechanism, that
562 	 * is also capable of the MAC mechanism.
563 	 */
564 	int error;
565 	kcf_mech_entry_t *me;
566 	kcf_provider_desc_t *pd;
567 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
568 	kcf_req_params_t params;
569 	kcf_encrypt_mac_ops_params_t *cmops;
570 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
571 	crypto_mech_type_t prov_encr_mechid, prov_mac_mechid;
572 	kcf_prov_tried_t *list = NULL;
573 	boolean_t encr_tmpl_checked = B_FALSE;
574 	boolean_t mac_tmpl_checked = B_FALSE;
575 	crypto_ctx_t *ctx = NULL;
576 	kcf_context_t *encr_kcf_context = NULL, *mac_kcf_context;
577 	crypto_call_flag_t save_flag;
578 
579 retry:
580 	/* pd is returned held on success */
581 	pd = kcf_get_dual_provider(encr_mech, mac_mech, &me, &prov_encr_mechid,
582 	    &prov_mac_mechid, &error, list,
583 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_MAC, CRYPTO_FG_MAC,
584 	    CHECK_RESTRICT(cr), 0);
585 	if (pd == NULL) {
586 		if (list != NULL)
587 			kcf_free_triedlist(list);
588 		return (error);
589 	}
590 
591 	/*
592 	 * For SW providers, check the validity of the context template
593 	 * It is very rare that the generation number mis-matches, so
594 	 * is acceptable to fail here, and let the consumer recover by
595 	 * freeing this tmpl and create a new one for the key and new SW
596 	 * provider
597 	 * Warning! will need to change when multiple software providers
598 	 * per mechanism are supported.
599 	 */
600 
601 	if ((!encr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
602 		if (encr_tmpl != NULL) {
603 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
604 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
605 
606 				if (list != NULL)
607 					kcf_free_triedlist(list);
608 				if (encr_kcf_context != NULL)
609 					KCF_CONTEXT_REFRELE(encr_kcf_context);
610 
611 				KCF_PROV_REFRELE(pd);
612 				/* Which one is the the old one ? */
613 				return (CRYPTO_OLD_CTX_TEMPLATE);
614 			}
615 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
616 		}
617 		encr_tmpl_checked = B_TRUE;
618 	}
619 
620 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
621 		/* Need to emulate with 2 internal calls */
622 
623 		/*
624 		 * We avoid code complexity by limiting the pure async.
625 		 * case to be done using only a SW provider.
626 		 * XXX - Redo the emulation code below so that we can
627 		 * remove this limitation.
628 		 */
629 		if (cr != NULL && pd->pd_prov_type == CRYPTO_HW_PROVIDER) {
630 			if ((kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr))
631 			    != NULL))
632 				goto retry;
633 			if (list != NULL)
634 				kcf_free_triedlist(list);
635 			if (encr_kcf_context != NULL)
636 				KCF_CONTEXT_REFRELE(encr_kcf_context);
637 			KCF_PROV_REFRELE(pd);
638 			return (CRYPTO_HOST_MEMORY);
639 		}
640 
641 		if (ctx == NULL && pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
642 			ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
643 			if (ctx == NULL) {
644 				if (list != NULL)
645 					kcf_free_triedlist(list);
646 				if (encr_kcf_context != NULL)
647 					KCF_CONTEXT_REFRELE(encr_kcf_context);
648 				KCF_PROV_REFRELE(pd);
649 				return (CRYPTO_HOST_MEMORY);
650 			}
651 			encr_kcf_context = (kcf_context_t *)
652 			    ctx->cc_framework_private;
653 		}
654 		/*
655 		 * Trade-off speed vs avoidance of code complexity and
656 		 * duplication:
657 		 * Could do all the combinations of fastpath / synch / asynch
658 		 * for the encryption and the mac steps. Early attempts
659 		 * showed the code grew wild and bug-prone, for little gain.
660 		 * Therefore, the adaptative asynch case is not implemented.
661 		 * It's either pure synchronous, or pure asynchronous.
662 		 * We still preserve a fastpath for the pure synchronous
663 		 * requests to SW providers.
664 		 */
665 		if (cr == NULL) {
666 			crypto_context_t mac_context;
667 
668 			if (pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
669 				crypto_mechanism_t lmech = *encr_mech;
670 
671 				lmech.cm_type = prov_encr_mechid;
672 
673 				error = KCF_PROV_ENCRYPT_INIT(pd, ctx, &lmech,
674 				    encr_key, spi_encr_tmpl,
675 				    KCF_RHNDL(KM_SLEEP));
676 			} else {
677 				/*
678 				 * If we did the 'goto retry' then ctx may not
679 				 * be NULL.  In general, we can't reuse another
680 				 * provider's context, so we free it now so
681 				 * we don't leak it.
682 				 */
683 				if (ctx != NULL) {
684 					KCF_CONTEXT_REFRELE((kcf_context_t *)
685 					    ctx->cc_framework_private);
686 					encr_kcf_context = NULL;
687 				}
688 				error = crypto_encrypt_init_prov(pd, pd->pd_sid,
689 				    encr_mech, encr_key, &encr_tmpl,
690 				    (crypto_context_t *)&ctx, NULL);
691 
692 				if (error == CRYPTO_SUCCESS) {
693 					encr_kcf_context = (kcf_context_t *)
694 					    ctx->cc_framework_private;
695 				}
696 			}
697 			KCF_PROV_INCRSTATS(pd, error);
698 
699 			KCF_PROV_REFRELE(pd);
700 
701 			if (error != CRYPTO_SUCCESS) {
702 				/* Can't be CRYPTO_QUEUED. return the failure */
703 				if (list != NULL)
704 					kcf_free_triedlist(list);
705 				if (encr_kcf_context != NULL)
706 					KCF_CONTEXT_REFRELE(encr_kcf_context);
707 
708 				return (error);
709 			}
710 			error = crypto_mac_init(mac_mech, mac_key, mac_tmpl,
711 			    &mac_context, NULL);
712 
713 			if (list != NULL)
714 				kcf_free_triedlist(list);
715 
716 			if (error != CRYPTO_SUCCESS) {
717 				/* Should this be an ASSERT() ? */
718 
719 				KCF_CONTEXT_REFRELE(encr_kcf_context);
720 			} else {
721 				encr_kcf_context = (kcf_context_t *)
722 				    ctx->cc_framework_private;
723 				mac_kcf_context = (kcf_context_t *)
724 				    ((crypto_ctx_t *)mac_context)->
725 				    cc_framework_private;
726 
727 				encr_kcf_context->kc_secondctx =
728 				    mac_kcf_context;
729 				KCF_CONTEXT_REFHOLD(mac_kcf_context);
730 
731 				*ctxp = (crypto_context_t)ctx;
732 			}
733 
734 			return (error);
735 		}
736 
737 		/* submit a pure asynchronous request. */
738 		save_flag = cr->cr_flag;
739 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
740 
741 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
742 		    pd->pd_sid, encr_key, mac_key, NULL, NULL, NULL,
743 		    spi_encr_tmpl, spi_mac_tmpl);
744 
745 		cmops = &(params.rp_u.encrypt_mac_params);
746 
747 		/* careful! structs assignments */
748 		cmops->em_encr_mech = *encr_mech;
749 		/*
750 		 * cmops->em_encr_mech.cm_type will be set when we get to
751 		 * kcf_emulate_dual() routine.
752 		 */
753 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
754 		cmops->em_mac_mech = *mac_mech;
755 
756 		/*
757 		 * cmops->em_mac_mech.cm_type will be set when we know the
758 		 * MAC provider.
759 		 */
760 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
761 
762 		/*
763 		 * non-NULL ctx->kc_secondctx tells common_submit_request
764 		 * that this request uses separate cipher and MAC contexts.
765 		 * That function will set ctx->kc_secondctx to the new
766 		 * MAC context, once it gets one.
767 		 */
768 		encr_kcf_context->kc_secondctx = encr_kcf_context;
769 
770 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
771 
772 		cr->cr_flag = save_flag;
773 
774 		if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
775 			KCF_CONTEXT_REFRELE(encr_kcf_context);
776 		}
777 		if (list != NULL)
778 			kcf_free_triedlist(list);
779 		*ctxp = (crypto_context_t)ctx;
780 		KCF_PROV_REFRELE(pd);
781 		return (error);
782 	}
783 
784 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
785 		if ((mac_tmpl != NULL) &&
786 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
787 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
788 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
789 
790 				if (list != NULL)
791 					kcf_free_triedlist(list);
792 
793 				KCF_PROV_REFRELE(pd);
794 				/* Which one is the the old one ? */
795 				return (CRYPTO_OLD_CTX_TEMPLATE);
796 			}
797 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
798 		}
799 		mac_tmpl_checked = B_TRUE;
800 	}
801 
802 	if (ctx == NULL) {
803 		ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
804 		if (ctx == NULL) {
805 			if (list != NULL)
806 				kcf_free_triedlist(list);
807 
808 			KCF_PROV_REFRELE(pd);
809 			return (CRYPTO_HOST_MEMORY);
810 		}
811 		encr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
812 	}
813 
814 	/* The fast path for SW providers. */
815 	if (CHECK_FASTPATH(cr, pd)) {
816 		crypto_mechanism_t lencr_mech;
817 		crypto_mechanism_t lmac_mech;
818 
819 		/* careful! structs assignments */
820 		lencr_mech = *encr_mech;
821 		lencr_mech.cm_type = prov_encr_mechid;
822 		lmac_mech = *mac_mech;
823 		lmac_mech.cm_type = prov_mac_mechid;
824 
825 		error = KCF_PROV_ENCRYPT_MAC_INIT(pd, ctx, &lencr_mech,
826 		    encr_key, &lmac_mech, mac_key, spi_encr_tmpl, spi_mac_tmpl,
827 		    KCF_SWFP_RHNDL(cr));
828 
829 		KCF_PROV_INCRSTATS(pd, error);
830 	} else {
831 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
832 		    pd->pd_sid, encr_key, mac_key, NULL, NULL, NULL,
833 		    spi_encr_tmpl, spi_mac_tmpl);
834 
835 		cmops = &(params.rp_u.encrypt_mac_params);
836 
837 		/* careful! structs assignments */
838 		cmops->em_encr_mech = *encr_mech;
839 		cmops->em_encr_mech.cm_type = prov_encr_mechid;
840 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
841 		cmops->em_mac_mech = *mac_mech;
842 		cmops->em_mac_mech.cm_type = prov_mac_mechid;
843 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
844 
845 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
846 	}
847 
848 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
849 		if ((IS_RECOVERABLE(error)) &&
850 		    (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr)) != NULL))
851 			goto retry;
852 
853 		KCF_CONTEXT_REFRELE(encr_kcf_context);
854 	} else
855 		*ctxp = (crypto_context_t)ctx;
856 
857 	if (list != NULL)
858 		kcf_free_triedlist(list);
859 
860 	KCF_PROV_REFRELE(pd);
861 	return (error);
862 }
863 
864 /*
865  * Continues a multi-part dual encrypt/mac operation.
866  */
867 /* ARGSUSED */
868 int
869 crypto_encrypt_mac_update(crypto_context_t context,
870     crypto_data_t *pt, crypto_dual_data_t *ct, crypto_call_req_t *cr)
871 {
872 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
873 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
874 	kcf_provider_desc_t *pd;
875 	int error;
876 	kcf_req_params_t params;
877 
878 	if ((ctx == NULL) ||
879 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
880 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
881 		return (CRYPTO_INVALID_CONTEXT);
882 	}
883 
884 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
885 
886 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
887 		off_t save_offset;
888 		size_t save_len;
889 		crypto_call_flag_t save_flag;
890 
891 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
892 			error = CRYPTO_INVALID_CONTEXT;
893 			goto out;
894 		}
895 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
896 
897 		/* First we submit the encryption request */
898 		if (cr == NULL) {
899 			/*
900 			 * 'ct' is always not NULL.
901 			 * A NULL 'pt' means in-place.
902 			 */
903 			if (pt == NULL)
904 				error = crypto_encrypt_update(context,
905 				    (crypto_data_t *)ct, NULL, NULL);
906 			else
907 				error = crypto_encrypt_update(context, pt,
908 				    (crypto_data_t *)ct, NULL);
909 
910 			if (error != CRYPTO_SUCCESS)
911 				goto out;
912 
913 			/*
914 			 * call  mac_update when there is data to throw in
915 			 * the mix. Either an explicitly non-zero ct->dd_len2,
916 			 * or the last ciphertext portion.
917 			 */
918 			save_offset = ct->dd_offset1;
919 			save_len = ct->dd_len1;
920 			if (ct->dd_len2 == 0) {
921 				/*
922 				 * The previous encrypt step was an
923 				 * accumulation only and didn't produce any
924 				 * partial output
925 				 */
926 				if (ct->dd_len1 == 0)
927 					goto out;
928 			} else {
929 				ct->dd_offset1 = ct->dd_offset2;
930 				ct->dd_len1 = ct->dd_len2;
931 			}
932 			error = crypto_mac_update((crypto_context_t)mac_ctx,
933 			    (crypto_data_t *)ct, NULL);
934 
935 			ct->dd_offset1 = save_offset;
936 			ct->dd_len1 = save_len;
937 
938 			goto out;
939 		}
940 		/* submit a pure asynchronous request. */
941 		save_flag = cr->cr_flag;
942 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
943 
944 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_UPDATE,
945 		    pd->pd_sid, NULL, NULL, pt, ct, NULL, NULL, NULL)
946 
947 
948 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
949 
950 		cr->cr_flag = save_flag;
951 		goto out;
952 	}
953 
954 	/* The fast path for SW providers. */
955 	if (CHECK_FASTPATH(cr, pd)) {
956 		error = KCF_PROV_ENCRYPT_MAC_UPDATE(pd, ctx, pt, ct, NULL);
957 		KCF_PROV_INCRSTATS(pd, error);
958 	} else {
959 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_UPDATE,
960 		    ctx->cc_session, NULL, NULL, pt, ct, NULL, NULL, NULL);
961 
962 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
963 	}
964 out:
965 	return (error);
966 }
967 
968 /*
969  * Terminates a multi-part dual encrypt/mac operation.
970  */
971 /* ARGSUSED */
972 int crypto_encrypt_mac_final(crypto_context_t context, crypto_dual_data_t *ct,
973     crypto_data_t *mac, crypto_call_req_t *cr)
974 {
975 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
976 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
977 	kcf_provider_desc_t *pd;
978 	int error;
979 	kcf_req_params_t params;
980 
981 	if ((ctx == NULL) ||
982 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
983 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
984 		return (CRYPTO_INVALID_CONTEXT);
985 	}
986 
987 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
988 
989 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
990 		off_t save_offset;
991 		size_t save_len;
992 		crypto_context_t mac_context;
993 		crypto_call_flag_t save_flag;
994 
995 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
996 			return (CRYPTO_INVALID_CONTEXT);
997 		}
998 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
999 		mac_context = (crypto_context_t)mac_ctx;
1000 
1001 		if (cr == NULL) {
1002 			/* Get the last chunk of ciphertext */
1003 			error = crypto_encrypt_final(context,
1004 			    (crypto_data_t *)ct, NULL);
1005 
1006 			if (error != CRYPTO_SUCCESS)  {
1007 				/*
1008 				 * Needed here, because the caller of
1009 				 * crypto_encrypt_mac_final() lost all
1010 				 * refs to the mac_ctx.
1011 				 */
1012 				crypto_cancel_ctx(mac_context);
1013 				return (error);
1014 			}
1015 			if (ct->dd_len2 > 0) {
1016 				save_offset = ct->dd_offset1;
1017 				save_len = ct->dd_len1;
1018 				ct->dd_offset1 = ct->dd_offset2;
1019 				ct->dd_len1 = ct->dd_len2;
1020 
1021 				error = crypto_mac_update(mac_context,
1022 				    (crypto_data_t *)ct, NULL);
1023 
1024 				ct->dd_offset1 = save_offset;
1025 				ct->dd_len1 = save_len;
1026 
1027 				if (error != CRYPTO_SUCCESS)  {
1028 					crypto_cancel_ctx(mac_context);
1029 					return (error);
1030 				}
1031 			}
1032 
1033 			/* and finally, collect the MAC */
1034 			error = crypto_mac_final(mac_context, mac, NULL);
1035 
1036 			return (error);
1037 		}
1038 		/* submit a pure asynchronous request. */
1039 		save_flag = cr->cr_flag;
1040 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
1041 
1042 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_FINAL,
1043 		    pd->pd_sid, NULL, NULL, NULL, ct, mac, NULL, NULL)
1044 
1045 
1046 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1047 
1048 		cr->cr_flag = save_flag;
1049 		return (error);
1050 	}
1051 	/* The fast path for SW providers. */
1052 	if (CHECK_FASTPATH(cr, pd)) {
1053 		error = KCF_PROV_ENCRYPT_MAC_FINAL(pd, ctx, ct, mac, NULL);
1054 		KCF_PROV_INCRSTATS(pd, error);
1055 	} else {
1056 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_FINAL,
1057 		    ctx->cc_session, NULL, NULL, NULL, ct, mac, NULL, NULL);
1058 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1059 	}
1060 out:
1061 	/* Release the hold done in kcf_new_ctx() during init step. */
1062 	KCF_CONTEXT_COND_RELEASE(error, kcf_ctx);
1063 	return (error);
1064 }
1065 
1066 /*
1067  * Performs an atomic dual mac/decrypt operation. The provider to use
1068  * is determined by the KCF dispatcher.
1069  */
1070 int
1071 crypto_mac_decrypt(crypto_mechanism_t *mac_mech,
1072     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1073     crypto_key_t *mac_key, crypto_key_t *decr_key,
1074     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1075     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1076 {
1077 	return (crypto_mac_decrypt_common(mac_mech, decr_mech, ct, mac_key,
1078 	    decr_key, mac_tmpl, decr_tmpl, mac, pt, crq, B_FALSE));
1079 }
1080 
1081 int
1082 crypto_mac_decrypt_prov(crypto_provider_t provider, crypto_session_id_t sid,
1083     crypto_mechanism_t *mac_mech, crypto_mechanism_t *decr_mech,
1084     crypto_dual_data_t *ct, crypto_key_t *mac_key, crypto_key_t *decr_key,
1085     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1086     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1087 {
1088 	return (crypto_mac_decrypt_common_prov(provider, sid, mac_mech,
1089 	    decr_mech, ct, mac_key, decr_key, mac_tmpl, decr_tmpl, mac, pt,
1090 	    crq, B_FALSE));
1091 }
1092 
1093 /*
1094  * Performs an atomic dual mac/decrypt operation. The provider to use
1095  * is determined by the KCF dispatcher. 'mac' specifies the expected
1096  * value for the MAC. The decryption is not performed if the computed
1097  * MAC does not match the expected MAC.
1098  */
1099 int
1100 crypto_mac_verify_decrypt(crypto_mechanism_t *mac_mech,
1101     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1102     crypto_key_t *mac_key, crypto_key_t *decr_key,
1103     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1104     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1105 {
1106 	return (crypto_mac_decrypt_common(mac_mech, decr_mech, ct, mac_key,
1107 	    decr_key, mac_tmpl, decr_tmpl, mac, pt, crq, B_TRUE));
1108 }
1109 
1110 int
1111 crypto_mac_verify_decrypt_prov(crypto_provider_t provider,
1112     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1113     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1114     crypto_key_t *mac_key, crypto_key_t *decr_key,
1115     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1116     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1117 {
1118 	return (crypto_mac_decrypt_common_prov(provider, sid, mac_mech,
1119 	    decr_mech, ct, mac_key, decr_key, mac_tmpl, decr_tmpl, mac, pt,
1120 	    crq, B_TRUE));
1121 }
1122 
1123 /*
1124  * Called by both crypto_mac_decrypt() and crypto_mac_verify_decrypt().
1125  * optionally verified if the MACs match before calling the decryption step.
1126  */
1127 static int
1128 crypto_mac_decrypt_common(crypto_mechanism_t *mac_mech,
1129     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1130     crypto_key_t *mac_key, crypto_key_t *decr_key,
1131     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1132     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq,
1133     boolean_t do_verify)
1134 {
1135 	/*
1136 	 * First try to find a provider for the decryption mechanism, that
1137 	 * is also capable of the MAC mechanism.
1138 	 * We still favor optimizing the costlier decryption.
1139 	 */
1140 	int error;
1141 	kcf_mech_entry_t *me;
1142 	kcf_provider_desc_t *pd;
1143 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1144 	kcf_req_params_t params;
1145 	kcf_mac_decrypt_ops_params_t *cmops;
1146 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1147 	crypto_mech_type_t prov_decr_mechid, prov_mac_mechid;
1148 	kcf_prov_tried_t *list = NULL;
1149 	boolean_t decr_tmpl_checked = B_FALSE;
1150 	boolean_t mac_tmpl_checked = B_FALSE;
1151 	kcf_dual_req_t *next_req = NULL;
1152 	crypto_call_req_t mac_req, *mac_reqp = NULL;
1153 
1154 retry:
1155 	/* pd is returned held on success */
1156 	pd = kcf_get_dual_provider(decr_mech, mac_mech, &me, &prov_decr_mechid,
1157 	    &prov_mac_mechid, &error, list,
1158 	    CRYPTO_FG_DECRYPT_ATOMIC | CRYPTO_FG_MAC_DECRYPT_ATOMIC,
1159 	    CRYPTO_FG_MAC_ATOMIC | CRYPTO_FG_MAC_DECRYPT_ATOMIC,
1160 	    CHECK_RESTRICT(crq), ct->dd_len2);
1161 	if (pd == NULL) {
1162 		if (list != NULL)
1163 			kcf_free_triedlist(list);
1164 		if (next_req != NULL)
1165 			kmem_free(next_req, sizeof (kcf_dual_req_t));
1166 		return (CRYPTO_MECH_NOT_SUPPORTED);
1167 	}
1168 
1169 	/*
1170 	 * For SW providers, check the validity of the context template
1171 	 * It is very rare that the generation number mis-matches, so
1172 	 * is acceptable to fail here, and let the consumer recover by
1173 	 * freeing this tmpl and create a new one for the key and new SW
1174 	 * provider
1175 	 */
1176 
1177 	if ((!decr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1178 		if (decr_tmpl != NULL) {
1179 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1180 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1181 				if (next_req != NULL)
1182 					kmem_free(next_req,
1183 					    sizeof (kcf_dual_req_t));
1184 				if (list != NULL)
1185 					kcf_free_triedlist(list);
1186 				KCF_PROV_REFRELE(pd);
1187 
1188 				/* Which one is the the old one ? */
1189 				return (CRYPTO_OLD_CTX_TEMPLATE);
1190 			}
1191 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1192 		}
1193 		decr_tmpl_checked = B_TRUE;
1194 	}
1195 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
1196 		/* Need to emulate with 2 internal calls */
1197 
1198 		/* Prepare the call_req to be submitted for the MAC step */
1199 
1200 		if (crq != NULL) {
1201 
1202 			if (next_req == NULL) {
1203 				/*
1204 				 * allocate, initialize and prepare the
1205 				 * params for the next step only in the
1206 				 * first pass (not on every retry).
1207 				 */
1208 				next_req = kcf_alloc_req(crq);
1209 
1210 				if (next_req == NULL) {
1211 					KCF_PROV_REFRELE(pd);
1212 					if (list != NULL)
1213 						kcf_free_triedlist(list);
1214 					return (CRYPTO_HOST_MEMORY);
1215 				}
1216 				KCF_WRAP_DECRYPT_OPS_PARAMS(
1217 				    &(next_req->kr_params), KCF_OP_ATOMIC,
1218 				    NULL, decr_mech, decr_key,
1219 				    (crypto_data_t *)ct, pt, spi_decr_tmpl);
1220 			}
1221 
1222 			mac_req.cr_flag = (crq != NULL) ? crq->cr_flag : 0;
1223 			mac_req.cr_flag |= CRYPTO_SETDUAL;
1224 			mac_req.cr_callback_func = kcf_next_req;
1225 			mac_req.cr_callback_arg = next_req;
1226 			mac_reqp = &mac_req;
1227 		}
1228 
1229 		/* 'pd' is the decryption provider. */
1230 
1231 		if (do_verify)
1232 			error = crypto_mac_verify(mac_mech, (crypto_data_t *)ct,
1233 			    mac_key, mac_tmpl, mac,
1234 			    (crq == NULL) ? NULL : mac_reqp);
1235 		else
1236 			error = crypto_mac(mac_mech, (crypto_data_t *)ct,
1237 			    mac_key, mac_tmpl, mac,
1238 			    (crq == NULL) ? NULL : mac_reqp);
1239 
1240 		switch (error) {
1241 		case CRYPTO_SUCCESS: {
1242 			off_t saveoffset;
1243 			size_t savelen;
1244 
1245 			if (next_req == NULL) {
1246 				saveoffset = ct->dd_offset1;
1247 				savelen = ct->dd_len1;
1248 			} else {
1249 				saveoffset = next_req->kr_saveoffset =
1250 				    ct->dd_offset1;
1251 				savelen = next_req->kr_savelen = ct->dd_len1;
1252 
1253 				ASSERT(mac_reqp != NULL);
1254 				mac_req.cr_flag &= ~CRYPTO_SETDUAL;
1255 				mac_req.cr_callback_func = kcf_last_req;
1256 			}
1257 			ct->dd_offset1 = ct->dd_offset2;
1258 			ct->dd_len1 = ct->dd_len2;
1259 
1260 			if (CHECK_FASTPATH(crq, pd)) {
1261 				crypto_mechanism_t lmech;
1262 
1263 				lmech = *decr_mech;
1264 				KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type,
1265 				    pd, &lmech);
1266 
1267 				error = KCF_PROV_DECRYPT_ATOMIC(pd, pd->pd_sid,
1268 				    &lmech, decr_key, (crypto_data_t *)ct,
1269 				    (crypto_data_t *)pt, spi_decr_tmpl,
1270 				    KCF_SWFP_RHNDL(mac_reqp));
1271 
1272 				KCF_PROV_INCRSTATS(pd, error);
1273 			} else {
1274 				KCF_WRAP_DECRYPT_OPS_PARAMS(&params,
1275 				    KCF_OP_ATOMIC, pd->pd_sid, decr_mech,
1276 				    decr_key, (crypto_data_t *)ct, pt,
1277 				    spi_decr_tmpl);
1278 
1279 				error = kcf_submit_request(pd, NULL,
1280 				    (crq == NULL) ? NULL : mac_reqp,
1281 				    &params, B_FALSE);
1282 			}
1283 			if (error != CRYPTO_QUEUED) {
1284 				KCF_PROV_INCRSTATS(pd, error);
1285 				ct->dd_offset1 = saveoffset;
1286 				ct->dd_len1 = savelen;
1287 			}
1288 			break;
1289 		}
1290 
1291 		case CRYPTO_QUEUED:
1292 			if ((crq != NULL) && (crq->cr_flag & CRYPTO_SKIP_REQID))
1293 				crq->cr_reqid = mac_req.cr_reqid;
1294 			break;
1295 
1296 		default:
1297 			if (IS_RECOVERABLE(error)) {
1298 				if (kcf_insert_triedlist(&list, pd,
1299 				    KCF_KMFLAG(crq)) != NULL)
1300 					goto retry;
1301 			}
1302 		}
1303 		if (error != CRYPTO_QUEUED && next_req != NULL)
1304 			kmem_free(next_req, sizeof (kcf_dual_req_t));
1305 		if (list != NULL)
1306 			kcf_free_triedlist(list);
1307 		KCF_PROV_REFRELE(pd);
1308 		return (error);
1309 	}
1310 
1311 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1312 		if ((mac_tmpl != NULL) &&
1313 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
1314 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1315 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1316 				if (next_req != NULL)
1317 					kmem_free(next_req,
1318 					    sizeof (kcf_dual_req_t));
1319 				if (list != NULL)
1320 					kcf_free_triedlist(list);
1321 				KCF_PROV_REFRELE(pd);
1322 
1323 				/* Which one is the the old one ? */
1324 				return (CRYPTO_OLD_CTX_TEMPLATE);
1325 			}
1326 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1327 		}
1328 		mac_tmpl_checked = B_TRUE;
1329 	}
1330 
1331 	/* The fast path for SW providers. */
1332 	if (CHECK_FASTPATH(crq, pd)) {
1333 		crypto_mechanism_t lmac_mech;
1334 		crypto_mechanism_t ldecr_mech;
1335 
1336 		/* careful! structs assignments */
1337 		ldecr_mech = *decr_mech;
1338 		ldecr_mech.cm_type = prov_decr_mechid;
1339 		lmac_mech = *mac_mech;
1340 		lmac_mech.cm_type = prov_mac_mechid;
1341 
1342 		if (do_verify)
1343 			error = KCF_PROV_MAC_VERIFY_DECRYPT_ATOMIC(pd,
1344 			    pd->pd_sid, &lmac_mech, mac_key, &ldecr_mech,
1345 			    decr_key, ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1346 			    KCF_SWFP_RHNDL(crq));
1347 		else
1348 			error = KCF_PROV_MAC_DECRYPT_ATOMIC(pd, pd->pd_sid,
1349 			    &lmac_mech, mac_key, &ldecr_mech, decr_key,
1350 			    ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1351 			    KCF_SWFP_RHNDL(crq));
1352 
1353 		KCF_PROV_INCRSTATS(pd, error);
1354 	} else {
1355 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params,
1356 		    (do_verify) ? KCF_OP_MAC_VERIFY_DECRYPT_ATOMIC :
1357 		    KCF_OP_ATOMIC, pd->pd_sid, mac_key, decr_key, ct, mac, pt,
1358 		    spi_mac_tmpl, spi_decr_tmpl);
1359 
1360 		cmops = &(params.rp_u.mac_decrypt_params);
1361 
1362 		/* careful! structs assignments */
1363 		cmops->md_decr_mech = *decr_mech;
1364 		cmops->md_decr_mech.cm_type = prov_decr_mechid;
1365 		cmops->md_framework_decr_mechtype = decr_mech->cm_type;
1366 		cmops->md_mac_mech = *mac_mech;
1367 		cmops->md_mac_mech.cm_type = prov_mac_mechid;
1368 		cmops->md_framework_mac_mechtype = mac_mech->cm_type;
1369 
1370 		error = kcf_submit_request(pd, NULL, crq, &params, B_FALSE);
1371 	}
1372 
1373 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED &&
1374 	    IS_RECOVERABLE(error)) {
1375 		/* Add pd to the linked list of providers tried. */
1376 		if (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(crq)) != NULL)
1377 			goto retry;
1378 	}
1379 
1380 	if (list != NULL)
1381 		kcf_free_triedlist(list);
1382 
1383 	if (next_req != NULL)
1384 		kmem_free(next_req, sizeof (kcf_dual_req_t));
1385 	KCF_PROV_REFRELE(pd);
1386 	return (error);
1387 }
1388 
1389 static int
1390 crypto_mac_decrypt_common_prov(crypto_provider_t provider,
1391     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1392     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1393     crypto_key_t *mac_key, crypto_key_t *decr_key,
1394     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1395     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq,
1396     boolean_t do_verify)
1397 {
1398 	/*
1399 	 * First try to find a provider for the decryption mechanism, that
1400 	 * is also capable of the MAC mechanism.
1401 	 * We still favor optimizing the costlier decryption.
1402 	 */
1403 	int error;
1404 	kcf_mech_entry_t *me;
1405 	kcf_provider_desc_t *pd = provider;
1406 	kcf_provider_desc_t *real_provider = pd;
1407 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1408 	kcf_req_params_t params;
1409 	kcf_mac_decrypt_ops_params_t *cmops;
1410 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1411 
1412 	ASSERT(KCF_PROV_REFHELD(pd));
1413 
1414 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
1415 		error = kcf_get_hardware_provider(decr_mech->cm_type,
1416 		    mac_mech->cm_type, CHECK_RESTRICT(crq), pd,
1417 		    &real_provider, CRYPTO_FG_MAC_DECRYPT_ATOMIC);
1418 
1419 		if (error != CRYPTO_SUCCESS)
1420 			return (error);
1421 	}
1422 
1423 	/*
1424 	 * For SW providers, check the validity of the context template
1425 	 * It is very rare that the generation number mis-matches, so
1426 	 * is acceptable to fail here, and let the consumer recover by
1427 	 * freeing this tmpl and create a new one for the key and new SW
1428 	 * provider
1429 	 */
1430 
1431 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
1432 		if (decr_tmpl != NULL) {
1433 			if (kcf_get_mech_entry(decr_mech->cm_type, &me) !=
1434 			    KCF_SUCCESS) {
1435 				error = CRYPTO_MECHANISM_INVALID;
1436 				goto out;
1437 			}
1438 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1439 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1440 				error = CRYPTO_OLD_CTX_TEMPLATE;
1441 				goto out;
1442 			}
1443 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1444 		}
1445 
1446 		if (mac_tmpl != NULL) {
1447 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
1448 			    KCF_SUCCESS) {
1449 				error = CRYPTO_MECHANISM_INVALID;
1450 				goto out;
1451 			}
1452 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1453 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1454 				error = CRYPTO_OLD_CTX_TEMPLATE;
1455 				goto out;
1456 			}
1457 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1458 		}
1459 	}
1460 
1461 	/* The fast path for SW providers. */
1462 	if (CHECK_FASTPATH(crq, pd)) {
1463 		crypto_mechanism_t lmac_mech;
1464 		crypto_mechanism_t ldecr_mech;
1465 
1466 		/* careful! structs assignments */
1467 		ldecr_mech = *decr_mech;
1468 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1469 		    &ldecr_mech);
1470 
1471 		lmac_mech = *mac_mech;
1472 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1473 		    &lmac_mech);
1474 
1475 		if (do_verify)
1476 			error = KCF_PROV_MAC_VERIFY_DECRYPT_ATOMIC(
1477 			    real_provider, sid, &lmac_mech, mac_key,
1478 			    &ldecr_mech, decr_key, ct, mac, pt, spi_mac_tmpl,
1479 			    spi_decr_tmpl, KCF_SWFP_RHNDL(crq));
1480 		else
1481 			error = KCF_PROV_MAC_DECRYPT_ATOMIC(real_provider, sid,
1482 			    &lmac_mech, mac_key, &ldecr_mech, decr_key,
1483 			    ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1484 			    KCF_SWFP_RHNDL(crq));
1485 
1486 		KCF_PROV_INCRSTATS(pd, error);
1487 	} else {
1488 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params,
1489 		    (do_verify) ? KCF_OP_MAC_VERIFY_DECRYPT_ATOMIC :
1490 		    KCF_OP_ATOMIC, sid, mac_key, decr_key, ct, mac, pt,
1491 		    spi_mac_tmpl, spi_decr_tmpl);
1492 
1493 		cmops = &(params.rp_u.mac_decrypt_params);
1494 
1495 		/* careful! structs assignments */
1496 		cmops->md_decr_mech = *decr_mech;
1497 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1498 		    &cmops->md_decr_mech);
1499 		cmops->md_framework_decr_mechtype = decr_mech->cm_type;
1500 
1501 		cmops->md_mac_mech = *mac_mech;
1502 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1503 		    &cmops->md_mac_mech);
1504 		cmops->md_framework_mac_mechtype = mac_mech->cm_type;
1505 
1506 		error = kcf_submit_request(real_provider, NULL, crq, &params,
1507 		    B_FALSE);
1508 	}
1509 
1510 out:
1511 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
1512 		KCF_PROV_REFRELE(real_provider);
1513 	return (error);
1514 }
1515 
1516 /*
1517  * Starts a multi-part dual mac/decrypt operation. The provider to
1518  * use is determined by the KCF dispatcher.
1519  */
1520 /* ARGSUSED */
1521 int
1522 crypto_mac_decrypt_init(crypto_mechanism_t *mac_mech,
1523     crypto_mechanism_t *decr_mech, crypto_key_t *mac_key,
1524     crypto_key_t *decr_key, crypto_ctx_template_t mac_tmpl,
1525     crypto_ctx_template_t decr_tmpl, crypto_context_t *ctxp,
1526     crypto_call_req_t *cr)
1527 {
1528 	/*
1529 	 * First try to find a provider for the decryption mechanism, that
1530 	 * is also capable of the MAC mechanism.
1531 	 * We still favor optimizing the costlier decryption.
1532 	 */
1533 	int error;
1534 	kcf_mech_entry_t *me;
1535 	kcf_provider_desc_t *pd;
1536 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1537 	kcf_req_params_t params;
1538 	kcf_mac_decrypt_ops_params_t *mdops;
1539 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1540 	crypto_mech_type_t prov_decr_mechid, prov_mac_mechid;
1541 	kcf_prov_tried_t *list = NULL;
1542 	boolean_t decr_tmpl_checked = B_FALSE;
1543 	boolean_t mac_tmpl_checked = B_FALSE;
1544 	crypto_ctx_t *ctx = NULL;
1545 	kcf_context_t *decr_kcf_context = NULL, *mac_kcf_context = NULL;
1546 	crypto_call_flag_t save_flag;
1547 
1548 retry:
1549 	/* pd is returned held on success */
1550 	pd = kcf_get_dual_provider(decr_mech, mac_mech, &me, &prov_decr_mechid,
1551 	    &prov_mac_mechid, &error, list,
1552 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_MAC_DECRYPT, CRYPTO_FG_MAC,
1553 	    CHECK_RESTRICT(cr), 0);
1554 	if (pd == NULL) {
1555 		if (list != NULL)
1556 			kcf_free_triedlist(list);
1557 		return (error);
1558 	}
1559 
1560 	/*
1561 	 * For SW providers, check the validity of the context template
1562 	 * It is very rare that the generation number mis-matches, so
1563 	 * is acceptable to fail here, and let the consumer recover by
1564 	 * freeing this tmpl and create a new one for the key and new SW
1565 	 * provider
1566 	 * Warning! will need to change when multiple software providers
1567 	 * per mechanism are supported.
1568 	 */
1569 
1570 	if ((!decr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1571 		if (decr_tmpl != NULL) {
1572 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1573 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1574 
1575 				if (list != NULL)
1576 					kcf_free_triedlist(list);
1577 				if (decr_kcf_context != NULL)
1578 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1579 
1580 				KCF_PROV_REFRELE(pd);
1581 				/* Which one is the the old one ? */
1582 				return (CRYPTO_OLD_CTX_TEMPLATE);
1583 			}
1584 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1585 		}
1586 		decr_tmpl_checked = B_TRUE;
1587 	}
1588 
1589 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
1590 		/* Need to emulate with 2 internal calls */
1591 
1592 		/*
1593 		 * We avoid code complexity by limiting the pure async.
1594 		 * case to be done using only a SW provider.
1595 		 * XXX - Redo the emulation code below so that we can
1596 		 * remove this limitation.
1597 		 */
1598 		if (cr != NULL && pd->pd_prov_type == CRYPTO_HW_PROVIDER) {
1599 			if ((kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr))
1600 			    != NULL))
1601 				goto retry;
1602 			if (list != NULL)
1603 				kcf_free_triedlist(list);
1604 			if (decr_kcf_context != NULL)
1605 				KCF_CONTEXT_REFRELE(decr_kcf_context);
1606 			KCF_PROV_REFRELE(pd);
1607 			return (CRYPTO_HOST_MEMORY);
1608 		}
1609 
1610 		if (ctx == NULL && pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
1611 			ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
1612 			if (ctx == NULL) {
1613 				if (list != NULL)
1614 					kcf_free_triedlist(list);
1615 				if (decr_kcf_context != NULL)
1616 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1617 				KCF_PROV_REFRELE(pd);
1618 				return (CRYPTO_HOST_MEMORY);
1619 			}
1620 			decr_kcf_context = (kcf_context_t *)
1621 			    ctx->cc_framework_private;
1622 		}
1623 		/*
1624 		 * Trade-off speed vs avoidance of code complexity and
1625 		 * duplication:
1626 		 * Could do all the combinations of fastpath / synch / asynch
1627 		 * for the decryption and the mac steps. Early attempts
1628 		 * showed the code grew wild and bug-prone, for little gain.
1629 		 * Therefore, the adaptative asynch case is not implemented.
1630 		 * It's either pure synchronous, or pure asynchronous.
1631 		 * We still preserve a fastpath for the pure synchronous
1632 		 * requests to SW providers.
1633 		 */
1634 		if (cr == NULL) {
1635 			crypto_context_t mac_context;
1636 
1637 			error = crypto_mac_init(mac_mech, mac_key, mac_tmpl,
1638 			    &mac_context, NULL);
1639 
1640 			if (error != CRYPTO_SUCCESS) {
1641 				/* Can't be CRYPTO_QUEUED. return the failure */
1642 				if (list != NULL)
1643 					kcf_free_triedlist(list);
1644 
1645 				if (decr_kcf_context != NULL)
1646 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1647 				return (error);
1648 			}
1649 			if (pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
1650 				crypto_mechanism_t lmech = *decr_mech;
1651 
1652 				lmech.cm_type = prov_decr_mechid;
1653 
1654 				error = KCF_PROV_DECRYPT_INIT(pd, ctx, &lmech,
1655 				    decr_key, spi_decr_tmpl,
1656 				    KCF_RHNDL(KM_SLEEP));
1657 			} else {
1658 				/*
1659 				 * If we did the 'goto retry' then ctx may not
1660 				 * be NULL.  In general, we can't reuse another
1661 				 * provider's context, so we free it now so
1662 				 * we don't leak it.
1663 				 */
1664 				if (ctx != NULL) {
1665 					KCF_CONTEXT_REFRELE((kcf_context_t *)
1666 					    ctx->cc_framework_private);
1667 					decr_kcf_context = NULL;
1668 				}
1669 				error = crypto_decrypt_init_prov(pd, pd->pd_sid,
1670 				    decr_mech, decr_key, &decr_tmpl,
1671 				    (crypto_context_t *)&ctx, NULL);
1672 
1673 				if (error == CRYPTO_SUCCESS) {
1674 					decr_kcf_context = (kcf_context_t *)
1675 					    ctx->cc_framework_private;
1676 				}
1677 			}
1678 
1679 			KCF_PROV_INCRSTATS(pd, error);
1680 
1681 			KCF_PROV_REFRELE(pd);
1682 
1683 			if (error != CRYPTO_SUCCESS) {
1684 				/* Can't be CRYPTO_QUEUED. return the failure */
1685 				if (list != NULL)
1686 					kcf_free_triedlist(list);
1687 				if (mac_kcf_context != NULL)
1688 					KCF_CONTEXT_REFRELE(mac_kcf_context);
1689 
1690 				return (error);
1691 			}
1692 			mac_kcf_context = (kcf_context_t *)
1693 			    ((crypto_ctx_t *)mac_context)->
1694 			    cc_framework_private;
1695 
1696 			decr_kcf_context = (kcf_context_t *)
1697 			    ctx->cc_framework_private;
1698 
1699 			/*
1700 			 * Here also, the mac context is second. The callback
1701 			 * case can't overwrite the context returned to
1702 			 * the caller.
1703 			 */
1704 			decr_kcf_context->kc_secondctx = mac_kcf_context;
1705 			KCF_CONTEXT_REFHOLD(mac_kcf_context);
1706 
1707 			*ctxp = (crypto_context_t)ctx;
1708 
1709 			return (error);
1710 		}
1711 		/* submit a pure asynchronous request. */
1712 		save_flag = cr->cr_flag;
1713 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
1714 
1715 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1716 		    pd->pd_sid, mac_key, decr_key, NULL, NULL, NULL,
1717 		    spi_mac_tmpl, spi_decr_tmpl);
1718 
1719 		mdops = &(params.rp_u.mac_decrypt_params);
1720 
1721 		/* careful! structs assignments */
1722 		mdops->md_decr_mech = *decr_mech;
1723 		/*
1724 		 * mdops->md_decr_mech.cm_type will be set when we get to
1725 		 * kcf_emulate_dual() routine.
1726 		 */
1727 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1728 		mdops->md_mac_mech = *mac_mech;
1729 
1730 		/*
1731 		 * mdops->md_mac_mech.cm_type will be set when we know the
1732 		 * MAC provider.
1733 		 */
1734 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1735 
1736 		/*
1737 		 * non-NULL ctx->kc_secondctx tells common_submit_request
1738 		 * that this request uses separate cipher and MAC contexts.
1739 		 * That function will set the MAC context's kc_secondctx to
1740 		 * this decrypt context.
1741 		 */
1742 		decr_kcf_context->kc_secondctx = decr_kcf_context;
1743 
1744 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1745 
1746 		cr->cr_flag = save_flag;
1747 
1748 		if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
1749 			KCF_CONTEXT_REFRELE(decr_kcf_context);
1750 		}
1751 		if (list != NULL)
1752 			kcf_free_triedlist(list);
1753 		*ctxp =  ctx;
1754 		KCF_PROV_REFRELE(pd);
1755 		return (error);
1756 	}
1757 
1758 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1759 		if ((mac_tmpl != NULL) &&
1760 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
1761 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1762 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1763 
1764 				if (list != NULL)
1765 					kcf_free_triedlist(list);
1766 
1767 				KCF_PROV_REFRELE(pd);
1768 				/* Which one is the the old one ? */
1769 				return (CRYPTO_OLD_CTX_TEMPLATE);
1770 			}
1771 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1772 		}
1773 		mac_tmpl_checked = B_TRUE;
1774 	}
1775 
1776 	if (ctx == NULL) {
1777 		ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
1778 		if (ctx == NULL) {
1779 			error = CRYPTO_HOST_MEMORY;
1780 			if (list != NULL)
1781 				kcf_free_triedlist(list);
1782 			return (CRYPTO_HOST_MEMORY);
1783 		}
1784 		decr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
1785 	}
1786 
1787 	/* The fast path for SW providers. */
1788 	if (CHECK_FASTPATH(cr, pd)) {
1789 		crypto_mechanism_t ldecr_mech;
1790 		crypto_mechanism_t lmac_mech;
1791 
1792 		/* careful! structs assignments */
1793 		ldecr_mech = *decr_mech;
1794 		ldecr_mech.cm_type = prov_decr_mechid;
1795 		lmac_mech = *mac_mech;
1796 		lmac_mech.cm_type = prov_mac_mechid;
1797 
1798 		error = KCF_PROV_MAC_DECRYPT_INIT(pd, ctx, &lmac_mech,
1799 		    mac_key, &ldecr_mech, decr_key, spi_mac_tmpl, spi_decr_tmpl,
1800 		    KCF_SWFP_RHNDL(cr));
1801 
1802 		KCF_PROV_INCRSTATS(pd, error);
1803 	} else {
1804 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1805 		    pd->pd_sid, mac_key, decr_key, NULL, NULL, NULL,
1806 		    spi_mac_tmpl, spi_decr_tmpl);
1807 
1808 		mdops = &(params.rp_u.mac_decrypt_params);
1809 
1810 		/* careful! structs assignments */
1811 		mdops->md_decr_mech = *decr_mech;
1812 		mdops->md_decr_mech.cm_type = prov_decr_mechid;
1813 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1814 		mdops->md_mac_mech = *mac_mech;
1815 		mdops->md_mac_mech.cm_type = prov_mac_mechid;
1816 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1817 
1818 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1819 	}
1820 
1821 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
1822 		if ((IS_RECOVERABLE(error)) &&
1823 		    (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr)) != NULL))
1824 			goto retry;
1825 
1826 		KCF_CONTEXT_REFRELE(decr_kcf_context);
1827 	} else
1828 		*ctxp = (crypto_context_t)ctx;
1829 
1830 	if (list != NULL)
1831 		kcf_free_triedlist(list);
1832 
1833 	KCF_PROV_REFRELE(pd);
1834 	return (error);
1835 }
1836 
1837 int
1838 crypto_mac_decrypt_init_prov(crypto_provider_t provider,
1839     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1840     crypto_mechanism_t *decr_mech, crypto_key_t *mac_key,
1841     crypto_key_t *decr_key, crypto_ctx_template_t mac_tmpl,
1842     crypto_ctx_template_t decr_tmpl, crypto_context_t *ctxp,
1843     crypto_call_req_t *cr)
1844 {
1845 	/*
1846 	 * First try to find a provider for the decryption mechanism, that
1847 	 * is also capable of the MAC mechanism.
1848 	 * We still favor optimizing the costlier decryption.
1849 	 */
1850 	int rv;
1851 	kcf_mech_entry_t *me;
1852 	kcf_provider_desc_t *pd = provider;
1853 	kcf_provider_desc_t *real_provider = pd;
1854 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1855 	kcf_req_params_t params;
1856 	kcf_mac_decrypt_ops_params_t *mdops;
1857 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1858 	crypto_ctx_t *ctx;
1859 	kcf_context_t *decr_kcf_context = NULL;
1860 
1861 	ASSERT(KCF_PROV_REFHELD(pd));
1862 
1863 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
1864 		rv = kcf_get_hardware_provider(decr_mech->cm_type,
1865 		    mac_mech->cm_type, CHECK_RESTRICT(cr), pd, &real_provider,
1866 		    CRYPTO_FG_MAC_DECRYPT);
1867 
1868 		if (rv != CRYPTO_SUCCESS)
1869 			return (rv);
1870 	}
1871 
1872 	/*
1873 	 * For SW providers, check the validity of the context template
1874 	 * It is very rare that the generation number mis-matches, so
1875 	 * is acceptable to fail here, and let the consumer recover by
1876 	 * freeing this tmpl and create a new one for the key and new SW
1877 	 * provider
1878 	 * Warning! will need to change when multiple software providers
1879 	 * per mechanism are supported.
1880 	 */
1881 
1882 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
1883 		if (decr_tmpl != NULL) {
1884 			if (kcf_get_mech_entry(decr_mech->cm_type, &me) !=
1885 			    KCF_SUCCESS) {
1886 				rv = CRYPTO_MECHANISM_INVALID;
1887 				goto out;
1888 			}
1889 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1890 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1891 				rv = CRYPTO_OLD_CTX_TEMPLATE;
1892 				goto out;
1893 			}
1894 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1895 		}
1896 
1897 		if (mac_tmpl != NULL) {
1898 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
1899 			    KCF_SUCCESS) {
1900 				rv = CRYPTO_MECHANISM_INVALID;
1901 				goto out;
1902 			}
1903 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1904 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1905 				rv = CRYPTO_OLD_CTX_TEMPLATE;
1906 				goto out;
1907 			}
1908 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1909 		}
1910 	}
1911 
1912 	ctx = kcf_new_ctx(cr, real_provider, sid);
1913 	if (ctx == NULL) {
1914 		rv = CRYPTO_HOST_MEMORY;
1915 		goto out;
1916 	}
1917 	decr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
1918 
1919 	/* The fast path for SW providers. */
1920 	if (CHECK_FASTPATH(cr, pd)) {
1921 		crypto_mechanism_t ldecr_mech;
1922 		crypto_mechanism_t lmac_mech;
1923 
1924 		/* careful! structs assignments */
1925 		ldecr_mech = *decr_mech;
1926 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1927 		    &ldecr_mech);
1928 
1929 		lmac_mech = *mac_mech;
1930 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1931 		    &lmac_mech);
1932 
1933 		rv = KCF_PROV_MAC_DECRYPT_INIT(real_provider, ctx, &lmac_mech,
1934 		    mac_key, &ldecr_mech, decr_key, spi_mac_tmpl, spi_decr_tmpl,
1935 		    KCF_SWFP_RHNDL(cr));
1936 
1937 		KCF_PROV_INCRSTATS(pd, rv);
1938 	} else {
1939 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1940 		    sid, mac_key, decr_key, NULL, NULL, NULL,
1941 		    spi_mac_tmpl, spi_decr_tmpl);
1942 
1943 		mdops = &(params.rp_u.mac_decrypt_params);
1944 
1945 		/* careful! structs assignments */
1946 		mdops->md_decr_mech = *decr_mech;
1947 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1948 		    &mdops->md_decr_mech);
1949 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1950 
1951 		mdops->md_mac_mech = *mac_mech;
1952 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1953 		    &mdops->md_mac_mech);
1954 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1955 
1956 		rv = kcf_submit_request(real_provider, ctx, cr, &params,
1957 		    B_FALSE);
1958 	}
1959 
1960 	if (rv != CRYPTO_SUCCESS && rv != CRYPTO_QUEUED) {
1961 		KCF_CONTEXT_REFRELE(decr_kcf_context);
1962 	} else
1963 		*ctxp = (crypto_context_t)ctx;
1964 
1965 out:
1966 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
1967 		KCF_PROV_REFRELE(real_provider);
1968 	return (rv);
1969 }
1970 /*
1971  * Continues a multi-part dual mac/decrypt operation.
1972  */
1973 /* ARGSUSED */
1974 int
1975 crypto_mac_decrypt_update(crypto_context_t context,
1976     crypto_dual_data_t *ct, crypto_data_t *pt, crypto_call_req_t *cr)
1977 {
1978 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
1979 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
1980 	kcf_provider_desc_t *pd;
1981 	int error;
1982 	kcf_req_params_t params;
1983 
1984 	if ((ctx == NULL) ||
1985 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
1986 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
1987 		return (CRYPTO_INVALID_CONTEXT);
1988 	}
1989 
1990 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
1991 
1992 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
1993 		off_t save_offset;
1994 		size_t save_len;
1995 		crypto_call_flag_t save_flag;
1996 
1997 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
1998 			error = CRYPTO_INVALID_CONTEXT;
1999 			goto out;
2000 		}
2001 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
2002 
2003 		/* First we submit the MAC request */
2004 		if (cr == NULL) {
2005 			/*
2006 			 * 'ct' is always not NULL.
2007 			 */
2008 			error = crypto_mac_update((crypto_context_t)mac_ctx,
2009 			    (crypto_data_t *)ct, NULL);
2010 
2011 			if (error != CRYPTO_SUCCESS)
2012 				goto out;
2013 
2014 			/* Decrypt a different length only when told so */
2015 
2016 			save_offset = ct->dd_offset1;
2017 			save_len = ct->dd_len1;
2018 
2019 			if (ct->dd_len2 > 0) {
2020 				ct->dd_offset1 = ct->dd_offset2;
2021 				ct->dd_len1 = ct->dd_len2;
2022 			}
2023 
2024 			error = crypto_decrypt_update(context,
2025 			    (crypto_data_t *)ct, pt, NULL);
2026 
2027 			ct->dd_offset1 = save_offset;
2028 			ct->dd_len1 = save_len;
2029 
2030 			goto out;
2031 		}
2032 		/* submit a pure asynchronous request. */
2033 		save_flag = cr->cr_flag;
2034 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
2035 
2036 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_UPDATE,
2037 		    pd->pd_sid, NULL, NULL, ct, NULL, pt, NULL, NULL)
2038 
2039 
2040 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2041 
2042 		cr->cr_flag = save_flag;
2043 		goto out;
2044 	}
2045 
2046 	/* The fast path for SW providers. */
2047 	if (CHECK_FASTPATH(cr, pd)) {
2048 		error = KCF_PROV_MAC_DECRYPT_UPDATE(pd, ctx, ct, pt, NULL);
2049 		KCF_PROV_INCRSTATS(pd, error);
2050 	} else {
2051 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_UPDATE,
2052 		    ctx->cc_session, NULL, NULL, ct, NULL, pt, NULL, NULL);
2053 
2054 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2055 	}
2056 out:
2057 	return (error);
2058 }
2059 
2060 /*
2061  * Terminates a multi-part dual mac/decrypt operation.
2062  */
2063 /* ARGSUSED */
2064 int
2065 crypto_mac_decrypt_final(crypto_context_t context, crypto_data_t *mac,
2066     crypto_data_t *pt, crypto_call_req_t *cr)
2067 {
2068 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
2069 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
2070 	kcf_provider_desc_t *pd;
2071 	int error;
2072 	kcf_req_params_t params;
2073 
2074 	if ((ctx == NULL) ||
2075 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
2076 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
2077 		return (CRYPTO_INVALID_CONTEXT);
2078 	}
2079 
2080 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
2081 
2082 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
2083 		crypto_call_flag_t save_flag;
2084 
2085 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
2086 			error = CRYPTO_INVALID_CONTEXT;
2087 			goto out;
2088 		}
2089 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
2090 
2091 		/* First we collect the MAC */
2092 		if (cr == NULL) {
2093 
2094 			error = crypto_mac_final((crypto_context_t)mac_ctx,
2095 			    mac, NULL);
2096 
2097 			if (error != CRYPTO_SUCCESS) {
2098 				crypto_cancel_ctx(ctx);
2099 			} else {
2100 				/* Get the last chunk of plaintext */
2101 				error = crypto_decrypt_final(context, pt, NULL);
2102 			}
2103 
2104 			return (error);
2105 		}
2106 		/* submit a pure asynchronous request. */
2107 		save_flag = cr->cr_flag;
2108 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
2109 
2110 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_FINAL,
2111 		    pd->pd_sid, NULL, NULL, NULL, mac, pt, NULL, NULL)
2112 
2113 
2114 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2115 
2116 		cr->cr_flag = save_flag;
2117 
2118 		return (error);
2119 	}
2120 
2121 	/* The fast path for SW providers. */
2122 	if (CHECK_FASTPATH(cr, pd)) {
2123 		error = KCF_PROV_MAC_DECRYPT_FINAL(pd, ctx, mac, pt, NULL);
2124 		KCF_PROV_INCRSTATS(pd, error);
2125 	} else {
2126 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_FINAL,
2127 		    ctx->cc_session, NULL, NULL, NULL, mac, pt, NULL, NULL);
2128 
2129 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2130 	}
2131 out:
2132 	/* Release the hold done in kcf_new_ctx() during init step. */
2133 	KCF_CONTEXT_COND_RELEASE(error, kcf_ctx);
2134 	return (error);
2135 }
2136 
2137 /*
2138  * Digest/Encrypt dual operation. Project-private entry point, not part of
2139  * the k-API.
2140  */
2141 /* ARGSUSED */
2142 int
2143 crypto_digest_encrypt_update(crypto_context_t digest_ctx,
2144     crypto_context_t encrypt_ctx, crypto_data_t *plaintext,
2145     crypto_data_t *ciphertext, crypto_call_req_t *crq)
2146 {
2147 	/*
2148 	 * RFE 4688647:
2149 	 * core functions needed by ioctl interface missing from impl.h
2150 	 */
2151 	return (CRYPTO_NOT_SUPPORTED);
2152 }
2153 
2154 /*
2155  * Decrypt/Digest dual operation. Project-private entry point, not part of
2156  * the k-API.
2157  */
2158 /* ARGSUSED */
2159 int
2160 crypto_decrypt_digest_update(crypto_context_t decryptctx,
2161     crypto_context_t encrypt_ctx, crypto_data_t *ciphertext,
2162     crypto_data_t *plaintext, crypto_call_req_t *crq)
2163 {
2164 	/*
2165 	 * RFE 4688647:
2166 	 * core functions needed by ioctl interface missing from impl.h
2167 	 */
2168 	return (CRYPTO_NOT_SUPPORTED);
2169 }
2170 
2171 /*
2172  * Sign/Encrypt dual operation. Project-private entry point, not part of
2173  * the k-API.
2174  */
2175 /* ARGSUSED */
2176 int
2177 crypto_sign_encrypt_update(crypto_context_t sign_ctx,
2178     crypto_context_t encrypt_ctx, crypto_data_t *plaintext,
2179     crypto_data_t *ciphertext, crypto_call_req_t *crq)
2180 {
2181 	/*
2182 	 * RFE 4688647:
2183 	 * core functions needed by ioctl interface missing from impl.h
2184 	 */
2185 	return (CRYPTO_NOT_SUPPORTED);
2186 }
2187 
2188 /*
2189  * Decrypt/Verify dual operation. Project-private entry point, not part of
2190  * the k-API.
2191  */
2192 /* ARGSUSED */
2193 int
2194 crypto_decrypt_verify_update(crypto_context_t decrypt_ctx,
2195     crypto_context_t verify_ctx, crypto_data_t *ciphertext,
2196     crypto_data_t *plaintext, crypto_call_req_t *crq)
2197 {
2198 	/*
2199 	 * RFE 4688647:
2200 	 * core functions needed by ioctl interface missing from impl.h
2201 	 */
2202 	return (CRYPTO_NOT_SUPPORTED);
2203 }
2204