xref: /illumos-gate/usr/src/uts/common/crypto/api/kcf_dual.c (revision a07094369b21309434206d9b3601d162693466fc)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License, Version 1.0 only
6  * (the "License").  You may not use this file except in compliance
7  * with the License.
8  *
9  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10  * or http://www.opensolaris.org/os/licensing.
11  * See the License for the specific language governing permissions
12  * and limitations under the License.
13  *
14  * When distributing Covered Code, include this CDDL HEADER in each
15  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16  * If applicable, add the following below this CDDL HEADER, with the
17  * fields enclosed by brackets "[]" replaced with your own identifying
18  * information: Portions Copyright [yyyy] [name of copyright owner]
19  *
20  * CDDL HEADER END
21  */
22 /*
23  * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #pragma ident	"%Z%%M%	%I%	%E% SMI"
28 
29 #include <sys/errno.h>
30 #include <sys/types.h>
31 #include <sys/kmem.h>
32 #include <sys/sysmacros.h>
33 #include <sys/crypto/common.h>
34 #include <sys/crypto/impl.h>
35 #include <sys/crypto/api.h>
36 #include <sys/crypto/spi.h>
37 #include <sys/crypto/sched_impl.h>
38 
39 #define	CRYPTO_OPS_OFFSET(f)		offsetof(crypto_ops_t, co_##f)
40 #define	CRYPTO_CIPHER_MAC_OFFSET(f) offsetof(crypto_dual_cipher_mac_ops_t, f)
41 
42 static int crypto_mac_decrypt_common(crypto_mechanism_t *,
43     crypto_mechanism_t *, crypto_dual_data_t *, crypto_key_t *, crypto_key_t *,
44     crypto_ctx_template_t, crypto_ctx_template_t, crypto_data_t *,
45     crypto_data_t *, crypto_call_req_t *, boolean_t);
46 
47 static int crypto_mac_decrypt_common_prov(crypto_provider_t provider,
48     crypto_session_id_t sid, crypto_mechanism_t *, crypto_mechanism_t *,
49     crypto_dual_data_t *, crypto_key_t *, crypto_key_t *,
50     crypto_ctx_template_t, crypto_ctx_template_t, crypto_data_t *,
51     crypto_data_t *, crypto_call_req_t *, boolean_t);
52 
53 int
54 crypto_encrypt_mac_prov(crypto_provider_t provider, crypto_session_id_t sid,
55     crypto_mechanism_t *encr_mech, crypto_mechanism_t *mac_mech,
56     crypto_data_t *pt, crypto_key_t *encr_key, crypto_key_t *mac_key,
57     crypto_ctx_template_t encr_tmpl, crypto_ctx_template_t mac_tmpl,
58     crypto_dual_data_t *ct, crypto_data_t *mac, crypto_call_req_t *crq)
59 {
60 	/*
61 	 * First try to find a provider for the encryption mechanism, that
62 	 * is also capable of the MAC mechanism.
63 	 */
64 	int rv;
65 	kcf_mech_entry_t *me;
66 	kcf_provider_desc_t *pd = provider;
67 	kcf_provider_desc_t *real_provider = pd;
68 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
69 	kcf_req_params_t params;
70 	kcf_encrypt_mac_ops_params_t *cmops;
71 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
72 
73 	ASSERT(KCF_PROV_REFHELD(pd));
74 
75 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
76 		rv = kcf_get_hardware_provider(encr_mech->cm_type,
77 		    mac_mech->cm_type, CRYPTO_OPS_OFFSET(dual_cipher_mac_ops),
78 		    CRYPTO_CIPHER_MAC_OFFSET(encrypt_mac_atomic),
79 		    CHECK_RESTRICT(crq), pd, &real_provider);
80 
81 		if (rv != CRYPTO_SUCCESS)
82 			return (rv);
83 	}
84 
85 	/*
86 	 * For SW providers, check the validity of the context template
87 	 * It is very rare that the generation number mis-matches, so
88 	 * is acceptable to fail here, and let the consumer recover by
89 	 * freeing this tmpl and create a new one for the key and new SW
90 	 * provider
91 	 * Warning! will need to change when multiple software providers
92 	 * per mechanism are supported.
93 	 */
94 
95 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
96 		if (encr_tmpl != NULL) {
97 			if (kcf_get_mech_entry(encr_mech->cm_type, &me) !=
98 			    KCF_SUCCESS) {
99 				rv = CRYPTO_MECHANISM_INVALID;
100 				goto out;
101 			}
102 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
103 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
104 				rv = CRYPTO_OLD_CTX_TEMPLATE;
105 				goto out;
106 			}
107 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
108 		}
109 
110 		if (mac_tmpl != NULL) {
111 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
112 			    KCF_SUCCESS) {
113 				rv = CRYPTO_MECHANISM_INVALID;
114 				goto out;
115 			}
116 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
117 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
118 				rv = CRYPTO_OLD_CTX_TEMPLATE;
119 				goto out;
120 			}
121 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
122 		}
123 	}
124 
125 	/* The fast path for SW providers. */
126 	if (CHECK_FASTPATH(crq, real_provider)) {
127 		crypto_mechanism_t lencr_mech;
128 		crypto_mechanism_t lmac_mech;
129 
130 		/* careful! structs assignments */
131 		lencr_mech = *encr_mech;
132 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
133 		    &lencr_mech);
134 
135 		lmac_mech = *mac_mech;
136 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
137 		    &lmac_mech);
138 
139 		rv = KCF_PROV_ENCRYPT_MAC_ATOMIC(real_provider, sid,
140 		    &lencr_mech, encr_key, &lmac_mech, mac_key, pt, ct,
141 		    mac, spi_encr_tmpl, spi_mac_tmpl, KCF_SWFP_RHNDL(crq));
142 
143 		KCF_PROV_INCRSTATS(pd, rv);
144 	} else {
145 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_ATOMIC,
146 		    sid, encr_key, mac_key, pt, ct, mac, spi_encr_tmpl,
147 		    spi_mac_tmpl);
148 
149 		cmops = &(params.rp_u.encrypt_mac_params);
150 
151 		/* careful! structs assignments */
152 		cmops->em_encr_mech = *encr_mech;
153 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
154 		    &cmops->em_encr_mech);
155 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
156 
157 		cmops->em_mac_mech = *mac_mech;
158 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
159 		    &cmops->em_mac_mech);
160 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
161 
162 		rv = kcf_submit_request(real_provider, NULL, crq, &params,
163 		    B_FALSE);
164 	}
165 
166 out:
167 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
168 		KCF_PROV_REFRELE(real_provider);
169 	return (rv);
170 }
171 
172 /*
173  * Performs a dual encrypt/mac atomic operation. The provider and session
174  * to use are determined by the KCF dispatcher.
175  */
176 int
177 crypto_encrypt_mac(crypto_mechanism_t *encr_mech,
178     crypto_mechanism_t *mac_mech, crypto_data_t *pt,
179     crypto_key_t *encr_key, crypto_key_t *mac_key,
180     crypto_ctx_template_t encr_tmpl, crypto_ctx_template_t mac_tmpl,
181     crypto_dual_data_t *ct, crypto_data_t *mac, crypto_call_req_t *crq)
182 {
183 	/*
184 	 * First try to find a provider for the encryption mechanism, that
185 	 * is also capable of the MAC mechanism.
186 	 */
187 	int error;
188 	kcf_mech_entry_t *me;
189 	kcf_provider_desc_t *pd;
190 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
191 	kcf_req_params_t params;
192 	kcf_encrypt_mac_ops_params_t *cmops;
193 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
194 	crypto_mech_type_t prov_encr_mechid, prov_mac_mechid;
195 	kcf_prov_tried_t *list = NULL;
196 	boolean_t encr_tmpl_checked = B_FALSE;
197 	boolean_t mac_tmpl_checked = B_FALSE;
198 	kcf_dual_req_t *next_req = NULL;
199 
200 retry:
201 	/* pd is returned held on success */
202 	pd = kcf_get_dual_provider(encr_mech, mac_mech, &me, &prov_encr_mechid,
203 	    &prov_mac_mechid, &error, list,
204 	    CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_ENCRYPT_MAC_ATOMIC,
205 	    CRYPTO_FG_MAC_ATOMIC | CRYPTO_FG_ENCRYPT_MAC_ATOMIC,
206 	    CHECK_RESTRICT(crq), ct->dd_len1);
207 	if (pd == NULL) {
208 		if (list != NULL)
209 			kcf_free_triedlist(list);
210 		if (next_req != NULL)
211 			kmem_free(next_req, sizeof (kcf_dual_req_t));
212 		return (error);
213 	}
214 
215 	/*
216 	 * For SW providers, check the validity of the context template
217 	 * It is very rare that the generation number mis-matches, so
218 	 * is acceptable to fail here, and let the consumer recover by
219 	 * freeing this tmpl and create a new one for the key and new SW
220 	 * provider
221 	 * Warning! will need to change when multiple software providers
222 	 * per mechanism are supported.
223 	 */
224 
225 	if ((!encr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
226 		if (encr_tmpl != NULL) {
227 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
228 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
229 
230 				if (next_req != NULL)
231 					kmem_free(next_req,
232 					    sizeof (kcf_dual_req_t));
233 				if (list != NULL)
234 					kcf_free_triedlist(list);
235 
236 				KCF_PROV_REFRELE(pd);
237 				/* Which one is the the old one ? */
238 				return (CRYPTO_OLD_CTX_TEMPLATE);
239 			}
240 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
241 		}
242 		encr_tmpl_checked = B_TRUE;
243 	}
244 
245 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
246 		crypto_call_req_t encr_req;
247 
248 		/* Need to emulate with 2 internal calls */
249 		/* Allocate and initialize the MAC req for the callback */
250 
251 		if (crq != NULL) {
252 			if (next_req == NULL) {
253 				next_req = kcf_alloc_req(crq);
254 
255 				if (next_req == NULL) {
256 					KCF_PROV_REFRELE(pd);
257 					if (list != NULL)
258 						kcf_free_triedlist(list);
259 					return (CRYPTO_HOST_MEMORY);
260 				}
261 				/*
262 				 * Careful! we're wrapping-in mac_tmpl instead
263 				 * of an spi_mac_tmpl. The callback routine will
264 				 * have to validate mac_tmpl, and use the
265 				 * mac_ctx_tmpl, once it picks a MAC provider.
266 				 */
267 				KCF_WRAP_MAC_OPS_PARAMS(&(next_req->kr_params),
268 				    KCF_OP_ATOMIC, NULL, mac_mech, mac_key,
269 				    (crypto_data_t *)ct, mac, mac_tmpl);
270 			}
271 
272 			encr_req.cr_flag = crq->cr_flag;
273 			encr_req.cr_callback_func = kcf_next_req;
274 			encr_req.cr_callback_arg = next_req;
275 		}
276 
277 		if (pt == NULL) {
278 			KCF_WRAP_ENCRYPT_OPS_PARAMS(&params, KCF_OP_ATOMIC,
279 			    pd->pd_sid, encr_mech, encr_key,
280 			    (crypto_data_t *)ct, NULL, spi_encr_tmpl);
281 		} else {
282 			KCF_WRAP_ENCRYPT_OPS_PARAMS(&params, KCF_OP_ATOMIC,
283 			    pd->pd_sid, encr_mech, encr_key, pt,
284 			    (crypto_data_t *)ct, spi_encr_tmpl);
285 		}
286 
287 		error = kcf_submit_request(pd, NULL, (crq == NULL) ? NULL :
288 		    &encr_req, &params, B_TRUE);
289 
290 		switch (error) {
291 		case CRYPTO_SUCCESS: {
292 			off_t saveoffset;
293 			size_t savelen;
294 
295 			/*
296 			 * The encryption step is done. Reuse the encr_req
297 			 * for submitting the MAC step.
298 			 */
299 			if (next_req == NULL) {
300 				saveoffset = ct->dd_offset1;
301 				savelen = ct->dd_len1;
302 			} else {
303 				saveoffset = next_req->kr_saveoffset =
304 				    ct->dd_offset1;
305 				savelen = next_req->kr_savelen = ct->dd_len1;
306 				encr_req.cr_callback_func = kcf_last_req;
307 			}
308 
309 			ct->dd_offset1 = ct->dd_offset2;
310 			ct->dd_len1 = ct->dd_len2;
311 
312 			error = crypto_mac(mac_mech, (crypto_data_t *)ct,
313 			    mac_key, mac_tmpl, mac, (crq == NULL) ? NULL :
314 			    &encr_req);
315 
316 			if (error != CRYPTO_QUEUED) {
317 				ct->dd_offset1 = saveoffset;
318 				ct->dd_len1 = savelen;
319 			}
320 			break;
321 		}
322 
323 		case CRYPTO_QUEUED:
324 			if ((crq != NULL) &&
325 			    !(crq->cr_flag & CRYPTO_SKIP_REQID))
326 				crq->cr_reqid = encr_req.cr_reqid;
327 			break;
328 
329 		default:
330 
331 			/* Add pd to the linked list of providers tried. */
332 			if (IS_RECOVERABLE(error)) {
333 				if (kcf_insert_triedlist(&list, pd,
334 				    KCF_KMFLAG(crq)) != NULL)
335 					goto retry;
336 			}
337 		}
338 		if (error != CRYPTO_QUEUED && next_req != NULL)
339 			kmem_free(next_req, sizeof (kcf_dual_req_t));
340 		if (list != NULL)
341 			kcf_free_triedlist(list);
342 		KCF_PROV_REFRELE(pd);
343 		return (error);
344 	}
345 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
346 		if ((mac_tmpl != NULL) &&
347 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
348 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
349 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
350 
351 				if (next_req != NULL)
352 					kmem_free(next_req,
353 					    sizeof (kcf_dual_req_t));
354 				if (list != NULL)
355 					kcf_free_triedlist(list);
356 
357 				KCF_PROV_REFRELE(pd);
358 				/* Which one is the the old one ? */
359 				return (CRYPTO_OLD_CTX_TEMPLATE);
360 			}
361 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
362 		}
363 		mac_tmpl_checked = B_TRUE;
364 	}
365 
366 	/* The fast path for SW providers. */
367 	if (CHECK_FASTPATH(crq, pd)) {
368 		crypto_mechanism_t lencr_mech;
369 		crypto_mechanism_t lmac_mech;
370 
371 		/* careful! structs assignments */
372 		lencr_mech = *encr_mech;
373 		lencr_mech.cm_type = prov_encr_mechid;
374 		lmac_mech = *mac_mech;
375 		lmac_mech.cm_type = prov_mac_mechid;
376 
377 		error = KCF_PROV_ENCRYPT_MAC_ATOMIC(pd, pd->pd_sid,
378 		    &lencr_mech, encr_key, &lmac_mech, mac_key, pt, ct,
379 		    mac, spi_encr_tmpl, spi_mac_tmpl, KCF_SWFP_RHNDL(crq));
380 
381 		KCF_PROV_INCRSTATS(pd, error);
382 	} else {
383 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_ATOMIC,
384 		    pd->pd_sid, encr_key, mac_key, pt, ct, mac, spi_encr_tmpl,
385 		    spi_mac_tmpl);
386 
387 		cmops = &(params.rp_u.encrypt_mac_params);
388 
389 		/* careful! structs assignments */
390 		cmops->em_encr_mech = *encr_mech;
391 		cmops->em_encr_mech.cm_type = prov_encr_mechid;
392 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
393 		cmops->em_mac_mech = *mac_mech;
394 		cmops->em_mac_mech.cm_type = prov_mac_mechid;
395 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
396 
397 		error = kcf_submit_request(pd, NULL, crq, &params, B_FALSE);
398 	}
399 
400 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED &&
401 	    IS_RECOVERABLE(error)) {
402 		/* Add pd to the linked list of providers tried. */
403 		if (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(crq)) != NULL)
404 			goto retry;
405 	}
406 
407 	if (next_req != NULL)
408 		kmem_free(next_req, sizeof (kcf_dual_req_t));
409 
410 	if (list != NULL)
411 		kcf_free_triedlist(list);
412 
413 	KCF_PROV_REFRELE(pd);
414 	return (error);
415 }
416 
417 int
418 crypto_encrypt_mac_init_prov(crypto_provider_t provider,
419     crypto_session_id_t sid, crypto_mechanism_t *encr_mech,
420     crypto_mechanism_t *mac_mech, crypto_key_t *encr_key,
421     crypto_key_t *mac_key, crypto_ctx_template_t encr_tmpl,
422     crypto_ctx_template_t mac_tmpl, crypto_context_t *ctxp,
423     crypto_call_req_t *cr)
424 {
425 	/*
426 	 * First try to find a provider for the encryption mechanism, that
427 	 * is also capable of the MAC mechanism.
428 	 */
429 	int rv;
430 	kcf_mech_entry_t *me;
431 	kcf_provider_desc_t *pd = provider;
432 	kcf_provider_desc_t *real_provider = pd;
433 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
434 	kcf_req_params_t params;
435 	kcf_encrypt_mac_ops_params_t *cmops;
436 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
437 	crypto_ctx_t *ctx;
438 	kcf_context_t *encr_kcf_context = NULL;
439 
440 	ASSERT(KCF_PROV_REFHELD(pd));
441 
442 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
443 		rv = kcf_get_hardware_provider(encr_mech->cm_type,
444 		    mac_mech->cm_type, CRYPTO_OPS_OFFSET(dual_cipher_mac_ops),
445 		    CRYPTO_CIPHER_MAC_OFFSET(encrypt_mac_init),
446 		    CHECK_RESTRICT(cr), pd, &real_provider);
447 
448 		if (rv != CRYPTO_SUCCESS)
449 			return (rv);
450 	}
451 
452 	/*
453 	 * For SW providers, check the validity of the context template
454 	 * It is very rare that the generation number mis-matches, so
455 	 * is acceptable to fail here, and let the consumer recover by
456 	 * freeing this tmpl and create a new one for the key and new SW
457 	 * provider
458 	 * Warning! will need to change when multiple software providers
459 	 * per mechanism are supported.
460 	 */
461 
462 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
463 		if (encr_tmpl != NULL) {
464 			if (kcf_get_mech_entry(encr_mech->cm_type, &me) !=
465 			    KCF_SUCCESS) {
466 				rv = CRYPTO_MECHANISM_INVALID;
467 				goto out;
468 			}
469 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
470 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
471 				rv = CRYPTO_OLD_CTX_TEMPLATE;
472 				goto out;
473 			}
474 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
475 		}
476 
477 		if (mac_tmpl != NULL) {
478 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
479 			    KCF_SUCCESS) {
480 				rv = CRYPTO_MECHANISM_INVALID;
481 				goto out;
482 			}
483 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
484 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
485 				rv = CRYPTO_OLD_CTX_TEMPLATE;
486 				goto out;
487 			}
488 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
489 		}
490 	}
491 
492 	ctx = kcf_new_ctx(cr, real_provider, sid);
493 	if (ctx == NULL) {
494 		rv = CRYPTO_HOST_MEMORY;
495 		goto out;
496 	}
497 	encr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
498 
499 	/* The fast path for SW providers. */
500 	if (CHECK_FASTPATH(cr, real_provider)) {
501 		crypto_mechanism_t lencr_mech;
502 		crypto_mechanism_t lmac_mech;
503 
504 		/* careful! structs assignments */
505 		lencr_mech = *encr_mech;
506 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
507 		    &lencr_mech);
508 
509 		lmac_mech = *mac_mech;
510 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
511 		    &lmac_mech);
512 
513 		rv = KCF_PROV_ENCRYPT_MAC_INIT(real_provider, ctx, &lencr_mech,
514 		    encr_key, &lmac_mech, mac_key, spi_encr_tmpl, spi_mac_tmpl,
515 		    KCF_SWFP_RHNDL(cr));
516 
517 		KCF_PROV_INCRSTATS(pd, rv);
518 	} else {
519 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
520 		    sid, encr_key, mac_key, NULL, NULL, NULL,
521 		    spi_encr_tmpl, spi_mac_tmpl);
522 
523 		cmops = &(params.rp_u.encrypt_mac_params);
524 
525 		/* careful! structs assignments */
526 		cmops->em_encr_mech = *encr_mech;
527 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
528 		    &cmops->em_encr_mech);
529 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
530 
531 		cmops->em_mac_mech = *mac_mech;
532 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
533 		    &cmops->em_mac_mech);
534 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
535 
536 		rv = kcf_submit_request(real_provider, ctx, cr, &params,
537 		    B_FALSE);
538 	}
539 
540 	if (rv != CRYPTO_SUCCESS && rv != CRYPTO_QUEUED) {
541 		KCF_CONTEXT_REFRELE(encr_kcf_context);
542 	} else
543 		*ctxp = (crypto_context_t)ctx;
544 
545 out:
546 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
547 		KCF_PROV_REFRELE(real_provider);
548 	return (rv);
549 }
550 
551 /*
552  * Starts a multi-part dual encrypt/mac operation. The provider and session
553  * to use are determined by the KCF dispatcher.
554  */
555 /* ARGSUSED */
556 int
557 crypto_encrypt_mac_init(crypto_mechanism_t *encr_mech,
558     crypto_mechanism_t *mac_mech, crypto_key_t *encr_key,
559     crypto_key_t *mac_key, crypto_ctx_template_t encr_tmpl,
560     crypto_ctx_template_t mac_tmpl, crypto_context_t *ctxp,
561     crypto_call_req_t *cr)
562 {
563 	/*
564 	 * First try to find a provider for the encryption mechanism, that
565 	 * is also capable of the MAC mechanism.
566 	 */
567 	int error;
568 	kcf_mech_entry_t *me;
569 	kcf_provider_desc_t *pd;
570 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
571 	kcf_req_params_t params;
572 	kcf_encrypt_mac_ops_params_t *cmops;
573 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
574 	crypto_mech_type_t prov_encr_mechid, prov_mac_mechid;
575 	kcf_prov_tried_t *list = NULL;
576 	boolean_t encr_tmpl_checked = B_FALSE;
577 	boolean_t mac_tmpl_checked = B_FALSE;
578 	crypto_ctx_t *ctx = NULL;
579 	kcf_context_t *encr_kcf_context = NULL, *mac_kcf_context;
580 	crypto_call_flag_t save_flag;
581 
582 retry:
583 	/* pd is returned held on success */
584 	pd = kcf_get_dual_provider(encr_mech, mac_mech, &me, &prov_encr_mechid,
585 	    &prov_mac_mechid, &error, list,
586 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_MAC, CRYPTO_FG_MAC,
587 	    CHECK_RESTRICT(cr), 0);
588 	if (pd == NULL) {
589 		if (list != NULL)
590 			kcf_free_triedlist(list);
591 		return (error);
592 	}
593 
594 	/*
595 	 * For SW providers, check the validity of the context template
596 	 * It is very rare that the generation number mis-matches, so
597 	 * is acceptable to fail here, and let the consumer recover by
598 	 * freeing this tmpl and create a new one for the key and new SW
599 	 * provider
600 	 * Warning! will need to change when multiple software providers
601 	 * per mechanism are supported.
602 	 */
603 
604 	if ((!encr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
605 		if (encr_tmpl != NULL) {
606 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
607 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
608 
609 				if (list != NULL)
610 					kcf_free_triedlist(list);
611 				if (encr_kcf_context != NULL)
612 					KCF_CONTEXT_REFRELE(encr_kcf_context);
613 
614 				KCF_PROV_REFRELE(pd);
615 				/* Which one is the the old one ? */
616 				return (CRYPTO_OLD_CTX_TEMPLATE);
617 			}
618 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
619 		}
620 		encr_tmpl_checked = B_TRUE;
621 	}
622 
623 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
624 		/* Need to emulate with 2 internal calls */
625 
626 		/*
627 		 * We avoid code complexity by limiting the pure async.
628 		 * case to be done using only a SW provider.
629 		 * XXX - Redo the emulation code below so that we can
630 		 * remove this limitation.
631 		 */
632 		if (cr != NULL && pd->pd_prov_type == CRYPTO_HW_PROVIDER) {
633 			if ((kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr))
634 			    != NULL))
635 				goto retry;
636 			if (list != NULL)
637 				kcf_free_triedlist(list);
638 			if (encr_kcf_context != NULL)
639 				KCF_CONTEXT_REFRELE(encr_kcf_context);
640 			KCF_PROV_REFRELE(pd);
641 			return (CRYPTO_HOST_MEMORY);
642 		}
643 
644 		if (ctx == NULL && pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
645 			ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
646 			if (ctx == NULL) {
647 				if (list != NULL)
648 					kcf_free_triedlist(list);
649 				if (encr_kcf_context != NULL)
650 					KCF_CONTEXT_REFRELE(encr_kcf_context);
651 				KCF_PROV_REFRELE(pd);
652 				return (CRYPTO_HOST_MEMORY);
653 			}
654 			encr_kcf_context = (kcf_context_t *)
655 			    ctx->cc_framework_private;
656 		}
657 		/*
658 		 * Trade-off speed vs avoidance of code complexity and
659 		 * duplication:
660 		 * Could do all the combinations of fastpath / synch / asynch
661 		 * for the encryption and the mac steps. Early attempts
662 		 * showed the code grew wild and bug-prone, for little gain.
663 		 * Therefore, the adaptative asynch case is not implemented.
664 		 * It's either pure synchronous, or pure asynchronous.
665 		 * We still preserve a fastpath for the pure synchronous
666 		 * requests to SW providers.
667 		 */
668 		if (cr == NULL) {
669 			crypto_context_t mac_context;
670 
671 			if (pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
672 				crypto_mechanism_t lmech = *encr_mech;
673 
674 				lmech.cm_type = prov_encr_mechid;
675 
676 				error = KCF_PROV_ENCRYPT_INIT(pd, ctx, &lmech,
677 				    encr_key, spi_encr_tmpl,
678 				    KCF_RHNDL(KM_SLEEP));
679 			} else {
680 				/*
681 				 * If we did the 'goto retry' then ctx may not
682 				 * be NULL.  In general, we can't reuse another
683 				 * provider's context, so we free it now so
684 				 * we don't leak it.
685 				 */
686 				if (ctx != NULL) {
687 					KCF_CONTEXT_REFRELE((kcf_context_t *)
688 					    ctx->cc_framework_private);
689 					encr_kcf_context = NULL;
690 				}
691 				error = crypto_encrypt_init_prov(pd, pd->pd_sid,
692 				    encr_mech, encr_key, &encr_tmpl,
693 				    (crypto_context_t *)&ctx, NULL);
694 
695 				if (error == CRYPTO_SUCCESS) {
696 					encr_kcf_context = (kcf_context_t *)
697 					    ctx->cc_framework_private;
698 				}
699 			}
700 			KCF_PROV_INCRSTATS(pd, error);
701 
702 			KCF_PROV_REFRELE(pd);
703 
704 			if (error != CRYPTO_SUCCESS) {
705 				/* Can't be CRYPTO_QUEUED. return the failure */
706 				if (list != NULL)
707 					kcf_free_triedlist(list);
708 				if (encr_kcf_context != NULL)
709 					KCF_CONTEXT_REFRELE(encr_kcf_context);
710 
711 				return (error);
712 			}
713 			error = crypto_mac_init(mac_mech, mac_key, mac_tmpl,
714 			    &mac_context, NULL);
715 
716 			if (list != NULL)
717 				kcf_free_triedlist(list);
718 
719 			if (error != CRYPTO_SUCCESS) {
720 				/* Should this be an ASSERT() ? */
721 
722 				KCF_CONTEXT_REFRELE(encr_kcf_context);
723 			} else {
724 				encr_kcf_context = (kcf_context_t *)
725 				    ctx->cc_framework_private;
726 				mac_kcf_context = (kcf_context_t *)
727 				    ((crypto_ctx_t *)mac_context)->
728 				    cc_framework_private;
729 
730 				encr_kcf_context->kc_secondctx =
731 				    mac_kcf_context;
732 				KCF_CONTEXT_REFHOLD(mac_kcf_context);
733 
734 				*ctxp = (crypto_context_t)ctx;
735 			}
736 
737 			return (error);
738 		}
739 
740 		/* submit a pure asynchronous request. */
741 		save_flag = cr->cr_flag;
742 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
743 
744 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
745 		    pd->pd_sid, encr_key, mac_key, NULL, NULL, NULL,
746 		    spi_encr_tmpl, spi_mac_tmpl);
747 
748 		cmops = &(params.rp_u.encrypt_mac_params);
749 
750 		/* careful! structs assignments */
751 		cmops->em_encr_mech = *encr_mech;
752 		/*
753 		 * cmops->em_encr_mech.cm_type will be set when we get to
754 		 * kcf_emulate_dual() routine.
755 		 */
756 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
757 		cmops->em_mac_mech = *mac_mech;
758 
759 		/*
760 		 * cmops->em_mac_mech.cm_type will be set when we know the
761 		 * MAC provider.
762 		 */
763 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
764 
765 		/*
766 		 * non-NULL ctx->kc_secondctx tells common_submit_request
767 		 * that this request uses separate cipher and MAC contexts.
768 		 * That function will set ctx->kc_secondctx to the new
769 		 * MAC context, once it gets one.
770 		 */
771 		encr_kcf_context->kc_secondctx = encr_kcf_context;
772 
773 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
774 
775 		cr->cr_flag = save_flag;
776 
777 		if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
778 			KCF_CONTEXT_REFRELE(encr_kcf_context);
779 		}
780 		if (list != NULL)
781 			kcf_free_triedlist(list);
782 		*ctxp = (crypto_context_t)ctx;
783 		KCF_PROV_REFRELE(pd);
784 		return (error);
785 	}
786 
787 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
788 		if ((mac_tmpl != NULL) &&
789 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
790 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
791 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
792 
793 				if (list != NULL)
794 					kcf_free_triedlist(list);
795 
796 				KCF_PROV_REFRELE(pd);
797 				/* Which one is the the old one ? */
798 				return (CRYPTO_OLD_CTX_TEMPLATE);
799 			}
800 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
801 		}
802 		mac_tmpl_checked = B_TRUE;
803 	}
804 
805 	if (ctx == NULL) {
806 		ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
807 		if (ctx == NULL) {
808 			if (list != NULL)
809 				kcf_free_triedlist(list);
810 
811 			KCF_PROV_REFRELE(pd);
812 			return (CRYPTO_HOST_MEMORY);
813 		}
814 		encr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
815 	}
816 
817 	/* The fast path for SW providers. */
818 	if (CHECK_FASTPATH(cr, pd)) {
819 		crypto_mechanism_t lencr_mech;
820 		crypto_mechanism_t lmac_mech;
821 
822 		/* careful! structs assignments */
823 		lencr_mech = *encr_mech;
824 		lencr_mech.cm_type = prov_encr_mechid;
825 		lmac_mech = *mac_mech;
826 		lmac_mech.cm_type = prov_mac_mechid;
827 
828 		error = KCF_PROV_ENCRYPT_MAC_INIT(pd, ctx, &lencr_mech,
829 		    encr_key, &lmac_mech, mac_key, spi_encr_tmpl, spi_mac_tmpl,
830 		    KCF_SWFP_RHNDL(cr));
831 
832 		KCF_PROV_INCRSTATS(pd, error);
833 	} else {
834 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
835 		    pd->pd_sid, encr_key, mac_key, NULL, NULL, NULL,
836 		    spi_encr_tmpl, spi_mac_tmpl);
837 
838 		cmops = &(params.rp_u.encrypt_mac_params);
839 
840 		/* careful! structs assignments */
841 		cmops->em_encr_mech = *encr_mech;
842 		cmops->em_encr_mech.cm_type = prov_encr_mechid;
843 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
844 		cmops->em_mac_mech = *mac_mech;
845 		cmops->em_mac_mech.cm_type = prov_mac_mechid;
846 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
847 
848 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
849 	}
850 
851 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
852 		if ((IS_RECOVERABLE(error)) &&
853 		    (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr)) != NULL))
854 			goto retry;
855 
856 		KCF_CONTEXT_REFRELE(encr_kcf_context);
857 	} else
858 		*ctxp = (crypto_context_t)ctx;
859 
860 	if (list != NULL)
861 		kcf_free_triedlist(list);
862 
863 	KCF_PROV_REFRELE(pd);
864 	return (error);
865 }
866 
867 /*
868  * Continues a multi-part dual encrypt/mac operation.
869  */
870 /* ARGSUSED */
871 int
872 crypto_encrypt_mac_update(crypto_context_t context,
873     crypto_data_t *pt, crypto_dual_data_t *ct, crypto_call_req_t *cr)
874 {
875 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
876 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
877 	kcf_provider_desc_t *pd;
878 	int error;
879 	kcf_req_params_t params;
880 
881 	if ((ctx == NULL) ||
882 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
883 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
884 		return (CRYPTO_INVALID_CONTEXT);
885 	}
886 
887 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
888 	KCF_PROV_REFHOLD(pd);
889 
890 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
891 		off_t save_offset;
892 		size_t save_len;
893 		crypto_call_flag_t save_flag;
894 
895 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
896 			error = CRYPTO_INVALID_CONTEXT;
897 			goto out;
898 		}
899 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
900 
901 		/* First we submit the encryption request */
902 		if (cr == NULL) {
903 			/*
904 			 * 'ct' is always not NULL.
905 			 * A NULL 'pt' means in-place.
906 			 */
907 			if (pt == NULL)
908 				error = crypto_encrypt_update(context,
909 				    (crypto_data_t *)ct, NULL, NULL);
910 			else
911 				error = crypto_encrypt_update(context, pt,
912 				    (crypto_data_t *)ct, NULL);
913 
914 			if (error != CRYPTO_SUCCESS)
915 				goto out;
916 
917 			/*
918 			 * call  mac_update when there is data to throw in
919 			 * the mix. Either an explicitly non-zero ct->dd_len2,
920 			 * or the last ciphertext portion.
921 			 */
922 			save_offset = ct->dd_offset1;
923 			save_len = ct->dd_len1;
924 			if (ct->dd_len2 == 0) {
925 				/*
926 				 * The previous encrypt step was an
927 				 * accumulation only and didn't produce any
928 				 * partial output
929 				 */
930 				if (ct->dd_len1 == 0)
931 					goto out;
932 			} else {
933 				ct->dd_offset1 = ct->dd_offset2;
934 				ct->dd_len1 = ct->dd_len2;
935 			}
936 			error = crypto_mac_update((crypto_context_t)mac_ctx,
937 			    (crypto_data_t *)ct, NULL);
938 
939 			ct->dd_offset1 = save_offset;
940 			ct->dd_len1 = save_len;
941 
942 			goto out;
943 		}
944 		/* submit a pure asynchronous request. */
945 		save_flag = cr->cr_flag;
946 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
947 
948 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_UPDATE,
949 		    pd->pd_sid, NULL, NULL, pt, ct, NULL, NULL, NULL)
950 
951 
952 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
953 
954 		cr->cr_flag = save_flag;
955 		goto out;
956 	}
957 
958 	/* The fast path for SW providers. */
959 	if (CHECK_FASTPATH(cr, pd)) {
960 		error = KCF_PROV_ENCRYPT_MAC_UPDATE(pd, ctx, pt, ct, NULL);
961 		KCF_PROV_INCRSTATS(pd, error);
962 	} else {
963 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_UPDATE,
964 		    ctx->cc_session, NULL, NULL, pt, ct, NULL, NULL, NULL);
965 
966 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
967 	}
968 out:
969 	KCF_PROV_REFRELE(pd);
970 	return (error);
971 }
972 
973 /*
974  * Terminates a multi-part dual encrypt/mac operation.
975  */
976 /* ARGSUSED */
977 int crypto_encrypt_mac_final(crypto_context_t context, crypto_dual_data_t *ct,
978     crypto_data_t *mac, crypto_call_req_t *cr)
979 {
980 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
981 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
982 	kcf_provider_desc_t *pd;
983 	int error;
984 	kcf_req_params_t params;
985 
986 	if ((ctx == NULL) ||
987 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
988 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
989 		return (CRYPTO_INVALID_CONTEXT);
990 	}
991 
992 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
993 	KCF_PROV_REFHOLD(pd);
994 
995 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
996 		off_t save_offset;
997 		size_t save_len;
998 		crypto_context_t mac_context;
999 		crypto_call_flag_t save_flag;
1000 
1001 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
1002 			KCF_PROV_REFRELE(pd);
1003 			return (CRYPTO_INVALID_CONTEXT);
1004 		}
1005 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
1006 		mac_context = (crypto_context_t)mac_ctx;
1007 
1008 		if (cr == NULL) {
1009 			/* Get the last chunk of ciphertext */
1010 			error = crypto_encrypt_final(context,
1011 			    (crypto_data_t *)ct, NULL);
1012 
1013 			KCF_PROV_REFRELE(pd);
1014 
1015 			if (error != CRYPTO_SUCCESS)  {
1016 				/*
1017 				 * Needed here, because the caller of
1018 				 * crypto_encrypt_mac_final() lost all
1019 				 * refs to the mac_ctx.
1020 				 */
1021 				crypto_cancel_ctx(mac_context);
1022 				return (error);
1023 			}
1024 			if (ct->dd_len2 > 0) {
1025 				save_offset = ct->dd_offset1;
1026 				save_len = ct->dd_len1;
1027 				ct->dd_offset1 = ct->dd_offset2;
1028 				ct->dd_len1 = ct->dd_len2;
1029 
1030 				error = crypto_mac_update(mac_context,
1031 				    (crypto_data_t *)ct, NULL);
1032 
1033 				ct->dd_offset1 = save_offset;
1034 				ct->dd_len1 = save_len;
1035 
1036 				if (error != CRYPTO_SUCCESS)  {
1037 					crypto_cancel_ctx(mac_context);
1038 					return (error);
1039 				}
1040 			}
1041 
1042 			/* and finally, collect the MAC */
1043 			error = crypto_mac_final(mac_context, mac, NULL);
1044 
1045 			return (error);
1046 		}
1047 		/* submit a pure asynchronous request. */
1048 		save_flag = cr->cr_flag;
1049 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
1050 
1051 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_FINAL,
1052 		    pd->pd_sid, NULL, NULL, NULL, ct, mac, NULL, NULL)
1053 
1054 
1055 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1056 
1057 		cr->cr_flag = save_flag;
1058 		KCF_PROV_REFRELE(pd);
1059 		return (error);
1060 	}
1061 	/* The fast path for SW providers. */
1062 	if (CHECK_FASTPATH(cr, pd)) {
1063 		error = KCF_PROV_ENCRYPT_MAC_FINAL(pd, ctx, ct, mac, NULL);
1064 		KCF_PROV_INCRSTATS(pd, error);
1065 	} else {
1066 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_FINAL,
1067 		    ctx->cc_session, NULL, NULL, NULL, ct, mac, NULL, NULL);
1068 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1069 	}
1070 out:
1071 	KCF_PROV_REFRELE(pd);
1072 	/* Release the hold done in kcf_new_ctx() during init step. */
1073 	KCF_CONTEXT_COND_RELEASE(error, kcf_ctx);
1074 	return (error);
1075 }
1076 
1077 /*
1078  * Performs an atomic dual mac/decrypt operation. The provider to use
1079  * is determined by the KCF dispatcher.
1080  */
1081 int
1082 crypto_mac_decrypt(crypto_mechanism_t *mac_mech,
1083     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1084     crypto_key_t *mac_key, crypto_key_t *decr_key,
1085     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1086     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1087 {
1088 	return (crypto_mac_decrypt_common(mac_mech, decr_mech, ct, mac_key,
1089 	    decr_key, mac_tmpl, decr_tmpl, mac, pt, crq, B_FALSE));
1090 }
1091 
1092 int
1093 crypto_mac_decrypt_prov(crypto_provider_t provider, crypto_session_id_t sid,
1094     crypto_mechanism_t *mac_mech, crypto_mechanism_t *decr_mech,
1095     crypto_dual_data_t *ct, crypto_key_t *mac_key, crypto_key_t *decr_key,
1096     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1097     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1098 {
1099 	return (crypto_mac_decrypt_common_prov(provider, sid, mac_mech,
1100 	    decr_mech, ct, mac_key, decr_key, mac_tmpl, decr_tmpl, mac, pt,
1101 	    crq, B_FALSE));
1102 }
1103 
1104 /*
1105  * Performs an atomic dual mac/decrypt operation. The provider to use
1106  * is determined by the KCF dispatcher. 'mac' specifies the expected
1107  * value for the MAC. The decryption is not performed if the computed
1108  * MAC does not match the expected MAC.
1109  */
1110 int
1111 crypto_mac_verify_decrypt(crypto_mechanism_t *mac_mech,
1112     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1113     crypto_key_t *mac_key, crypto_key_t *decr_key,
1114     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1115     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1116 {
1117 	return (crypto_mac_decrypt_common(mac_mech, decr_mech, ct, mac_key,
1118 	    decr_key, mac_tmpl, decr_tmpl, mac, pt, crq, B_TRUE));
1119 }
1120 
1121 int
1122 crypto_mac_verify_decrypt_prov(crypto_provider_t provider,
1123     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1124     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1125     crypto_key_t *mac_key, crypto_key_t *decr_key,
1126     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1127     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1128 {
1129 	return (crypto_mac_decrypt_common_prov(provider, sid, mac_mech,
1130 	    decr_mech, ct, mac_key, decr_key, mac_tmpl, decr_tmpl, mac, pt,
1131 	    crq, B_TRUE));
1132 }
1133 
1134 /*
1135  * Called by both crypto_mac_decrypt() and crypto_mac_verify_decrypt().
1136  * optionally verified if the MACs match before calling the decryption step.
1137  */
1138 static int
1139 crypto_mac_decrypt_common(crypto_mechanism_t *mac_mech,
1140     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1141     crypto_key_t *mac_key, crypto_key_t *decr_key,
1142     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1143     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq,
1144     boolean_t do_verify)
1145 {
1146 	/*
1147 	 * First try to find a provider for the decryption mechanism, that
1148 	 * is also capable of the MAC mechanism.
1149 	 * We still favor optimizing the costlier decryption.
1150 	 */
1151 	int error;
1152 	kcf_mech_entry_t *me;
1153 	kcf_provider_desc_t *pd;
1154 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1155 	kcf_req_params_t params;
1156 	kcf_mac_decrypt_ops_params_t *cmops;
1157 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1158 	crypto_mech_type_t prov_decr_mechid, prov_mac_mechid;
1159 	kcf_prov_tried_t *list = NULL;
1160 	boolean_t decr_tmpl_checked = B_FALSE;
1161 	boolean_t mac_tmpl_checked = B_FALSE;
1162 	kcf_dual_req_t *next_req = NULL;
1163 	crypto_call_req_t mac_req, *mac_reqp = NULL;
1164 
1165 retry:
1166 	/* pd is returned held on success */
1167 	pd = kcf_get_dual_provider(decr_mech, mac_mech, &me, &prov_decr_mechid,
1168 	    &prov_mac_mechid, &error, list,
1169 	    CRYPTO_FG_DECRYPT_ATOMIC | CRYPTO_FG_MAC_DECRYPT_ATOMIC,
1170 	    CRYPTO_FG_MAC_ATOMIC | CRYPTO_FG_MAC_DECRYPT_ATOMIC,
1171 	    CHECK_RESTRICT(crq), ct->dd_len2);
1172 	if (pd == NULL) {
1173 		if (list != NULL)
1174 			kcf_free_triedlist(list);
1175 		if (next_req != NULL)
1176 			kmem_free(next_req, sizeof (kcf_dual_req_t));
1177 		return (CRYPTO_MECH_NOT_SUPPORTED);
1178 	}
1179 
1180 	/*
1181 	 * For SW providers, check the validity of the context template
1182 	 * It is very rare that the generation number mis-matches, so
1183 	 * is acceptable to fail here, and let the consumer recover by
1184 	 * freeing this tmpl and create a new one for the key and new SW
1185 	 * provider
1186 	 */
1187 
1188 	if ((!decr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1189 		if (decr_tmpl != NULL) {
1190 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1191 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1192 				if (next_req != NULL)
1193 					kmem_free(next_req,
1194 					    sizeof (kcf_dual_req_t));
1195 				if (list != NULL)
1196 					kcf_free_triedlist(list);
1197 				KCF_PROV_REFRELE(pd);
1198 
1199 				/* Which one is the the old one ? */
1200 				return (CRYPTO_OLD_CTX_TEMPLATE);
1201 			}
1202 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1203 		}
1204 		decr_tmpl_checked = B_TRUE;
1205 	}
1206 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
1207 		/* Need to emulate with 2 internal calls */
1208 
1209 		/* Prepare the call_req to be submitted for the MAC step */
1210 
1211 		if (crq != NULL) {
1212 
1213 			if (next_req == NULL) {
1214 				/*
1215 				 * allocate, initialize and prepare the
1216 				 * params for the next step only in the
1217 				 * first pass (not on every retry).
1218 				 */
1219 				next_req = kcf_alloc_req(crq);
1220 
1221 				if (next_req == NULL) {
1222 					KCF_PROV_REFRELE(pd);
1223 					if (list != NULL)
1224 						kcf_free_triedlist(list);
1225 					return (CRYPTO_HOST_MEMORY);
1226 				}
1227 				KCF_WRAP_DECRYPT_OPS_PARAMS(
1228 				    &(next_req->kr_params), KCF_OP_ATOMIC,
1229 				    NULL, decr_mech, decr_key,
1230 				    (crypto_data_t *)ct, pt, spi_decr_tmpl);
1231 			}
1232 
1233 			mac_req.cr_flag = (crq != NULL) ? crq->cr_flag : 0;
1234 			mac_req.cr_flag |= CRYPTO_SETDUAL;
1235 			mac_req.cr_callback_func = kcf_next_req;
1236 			mac_req.cr_callback_arg = next_req;
1237 			mac_reqp = &mac_req;
1238 		}
1239 
1240 		/* 'pd' is the decryption provider. */
1241 
1242 		if (do_verify)
1243 			error = crypto_mac_verify(mac_mech, (crypto_data_t *)ct,
1244 			    mac_key, mac_tmpl, mac,
1245 			    (crq == NULL) ? NULL : mac_reqp);
1246 		else
1247 			error = crypto_mac(mac_mech, (crypto_data_t *)ct,
1248 			    mac_key, mac_tmpl, mac,
1249 			    (crq == NULL) ? NULL : mac_reqp);
1250 
1251 		switch (error) {
1252 		case CRYPTO_SUCCESS: {
1253 			off_t saveoffset;
1254 			size_t savelen;
1255 
1256 			if (next_req == NULL) {
1257 				saveoffset = ct->dd_offset1;
1258 				savelen = ct->dd_len1;
1259 			} else {
1260 				saveoffset = next_req->kr_saveoffset =
1261 				    ct->dd_offset1;
1262 				savelen = next_req->kr_savelen = ct->dd_len1;
1263 
1264 				ASSERT(mac_reqp != NULL);
1265 				mac_req.cr_flag &= ~CRYPTO_SETDUAL;
1266 				mac_req.cr_callback_func = kcf_last_req;
1267 			}
1268 			ct->dd_offset1 = ct->dd_offset2;
1269 			ct->dd_len1 = ct->dd_len2;
1270 
1271 			if (CHECK_FASTPATH(crq, pd)) {
1272 				crypto_mechanism_t lmech;
1273 
1274 				lmech = *decr_mech;
1275 				KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type,
1276 				    pd, &lmech);
1277 
1278 				error = KCF_PROV_DECRYPT_ATOMIC(pd, pd->pd_sid,
1279 				    &lmech, decr_key, (crypto_data_t *)ct,
1280 				    (crypto_data_t *)pt, spi_decr_tmpl,
1281 				    KCF_SWFP_RHNDL(mac_reqp));
1282 
1283 				KCF_PROV_INCRSTATS(pd, error);
1284 			} else {
1285 				KCF_WRAP_DECRYPT_OPS_PARAMS(&params,
1286 				    KCF_OP_ATOMIC, pd->pd_sid, decr_mech,
1287 				    decr_key, (crypto_data_t *)ct, pt,
1288 				    spi_decr_tmpl);
1289 
1290 				error = kcf_submit_request(pd, NULL,
1291 				    (crq == NULL) ? NULL : mac_reqp,
1292 				    &params, B_FALSE);
1293 			}
1294 			if (error != CRYPTO_QUEUED) {
1295 				KCF_PROV_INCRSTATS(pd, error);
1296 				ct->dd_offset1 = saveoffset;
1297 				ct->dd_len1 = savelen;
1298 			}
1299 			break;
1300 		}
1301 
1302 		case CRYPTO_QUEUED:
1303 			if ((crq != NULL) && (crq->cr_flag & CRYPTO_SKIP_REQID))
1304 				crq->cr_reqid = mac_req.cr_reqid;
1305 			break;
1306 
1307 		default:
1308 			if (IS_RECOVERABLE(error)) {
1309 				if (kcf_insert_triedlist(&list, pd,
1310 				    KCF_KMFLAG(crq)) != NULL)
1311 					goto retry;
1312 			}
1313 		}
1314 		if (error != CRYPTO_QUEUED && next_req != NULL)
1315 			kmem_free(next_req, sizeof (kcf_dual_req_t));
1316 		if (list != NULL)
1317 			kcf_free_triedlist(list);
1318 		KCF_PROV_REFRELE(pd);
1319 		return (error);
1320 	}
1321 
1322 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1323 		if ((mac_tmpl != NULL) &&
1324 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
1325 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1326 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1327 				if (next_req != NULL)
1328 					kmem_free(next_req,
1329 					    sizeof (kcf_dual_req_t));
1330 				if (list != NULL)
1331 					kcf_free_triedlist(list);
1332 				KCF_PROV_REFRELE(pd);
1333 
1334 				/* Which one is the the old one ? */
1335 				return (CRYPTO_OLD_CTX_TEMPLATE);
1336 			}
1337 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1338 		}
1339 		mac_tmpl_checked = B_TRUE;
1340 	}
1341 
1342 	/* The fast path for SW providers. */
1343 	if (CHECK_FASTPATH(crq, pd)) {
1344 		crypto_mechanism_t lmac_mech;
1345 		crypto_mechanism_t ldecr_mech;
1346 
1347 		/* careful! structs assignments */
1348 		ldecr_mech = *decr_mech;
1349 		ldecr_mech.cm_type = prov_decr_mechid;
1350 		lmac_mech = *mac_mech;
1351 		lmac_mech.cm_type = prov_mac_mechid;
1352 
1353 		if (do_verify)
1354 			error = KCF_PROV_MAC_VERIFY_DECRYPT_ATOMIC(pd,
1355 			    pd->pd_sid, &lmac_mech, mac_key, &ldecr_mech,
1356 			    decr_key, ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1357 			    KCF_SWFP_RHNDL(crq));
1358 		else
1359 			error = KCF_PROV_MAC_DECRYPT_ATOMIC(pd, pd->pd_sid,
1360 			    &lmac_mech, mac_key, &ldecr_mech, decr_key,
1361 			    ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1362 			    KCF_SWFP_RHNDL(crq));
1363 
1364 		KCF_PROV_INCRSTATS(pd, error);
1365 	} else {
1366 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params,
1367 		    (do_verify) ? KCF_OP_MAC_VERIFY_DECRYPT_ATOMIC :
1368 		    KCF_OP_ATOMIC, pd->pd_sid, mac_key, decr_key, ct, mac, pt,
1369 		    spi_mac_tmpl, spi_decr_tmpl);
1370 
1371 		cmops = &(params.rp_u.mac_decrypt_params);
1372 
1373 		/* careful! structs assignments */
1374 		cmops->md_decr_mech = *decr_mech;
1375 		cmops->md_decr_mech.cm_type = prov_decr_mechid;
1376 		cmops->md_framework_decr_mechtype = decr_mech->cm_type;
1377 		cmops->md_mac_mech = *mac_mech;
1378 		cmops->md_mac_mech.cm_type = prov_mac_mechid;
1379 		cmops->md_framework_mac_mechtype = mac_mech->cm_type;
1380 
1381 		error = kcf_submit_request(pd, NULL, crq, &params, B_FALSE);
1382 	}
1383 
1384 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED &&
1385 	    IS_RECOVERABLE(error)) {
1386 		/* Add pd to the linked list of providers tried. */
1387 		if (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(crq)) != NULL)
1388 			goto retry;
1389 	}
1390 
1391 	if (list != NULL)
1392 		kcf_free_triedlist(list);
1393 
1394 	if (next_req != NULL)
1395 		kmem_free(next_req, sizeof (kcf_dual_req_t));
1396 	KCF_PROV_REFRELE(pd);
1397 	return (error);
1398 }
1399 
1400 static int
1401 crypto_mac_decrypt_common_prov(crypto_provider_t provider,
1402     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1403     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1404     crypto_key_t *mac_key, crypto_key_t *decr_key,
1405     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1406     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq,
1407     boolean_t do_verify)
1408 {
1409 	/*
1410 	 * First try to find a provider for the decryption mechanism, that
1411 	 * is also capable of the MAC mechanism.
1412 	 * We still favor optimizing the costlier decryption.
1413 	 */
1414 	int error;
1415 	kcf_mech_entry_t *me;
1416 	kcf_provider_desc_t *pd = provider;
1417 	kcf_provider_desc_t *real_provider = pd;
1418 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1419 	kcf_req_params_t params;
1420 	kcf_mac_decrypt_ops_params_t *cmops;
1421 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1422 
1423 	ASSERT(KCF_PROV_REFHELD(pd));
1424 
1425 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
1426 		if (do_verify) {
1427 			error = kcf_get_hardware_provider(decr_mech->cm_type,
1428 			    mac_mech->cm_type,
1429 			    CRYPTO_OPS_OFFSET(dual_cipher_mac_ops),
1430 			    CRYPTO_CIPHER_MAC_OFFSET(mac_verify_decrypt_atomic),
1431 			    CHECK_RESTRICT(crq), pd, &real_provider);
1432 		} else {
1433 			error = kcf_get_hardware_provider(decr_mech->cm_type,
1434 			    mac_mech->cm_type,
1435 			    CRYPTO_OPS_OFFSET(dual_cipher_mac_ops),
1436 			    CRYPTO_CIPHER_MAC_OFFSET(mac_decrypt_atomic),
1437 			    CHECK_RESTRICT(crq), pd, &real_provider);
1438 		}
1439 
1440 		if (error != CRYPTO_SUCCESS)
1441 			return (error);
1442 	}
1443 
1444 	/*
1445 	 * For SW providers, check the validity of the context template
1446 	 * It is very rare that the generation number mis-matches, so
1447 	 * is acceptable to fail here, and let the consumer recover by
1448 	 * freeing this tmpl and create a new one for the key and new SW
1449 	 * provider
1450 	 */
1451 
1452 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
1453 		if (decr_tmpl != NULL) {
1454 			if (kcf_get_mech_entry(decr_mech->cm_type, &me) !=
1455 			    KCF_SUCCESS) {
1456 				error = CRYPTO_MECHANISM_INVALID;
1457 				goto out;
1458 			}
1459 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1460 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1461 				error = CRYPTO_OLD_CTX_TEMPLATE;
1462 				goto out;
1463 			}
1464 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1465 		}
1466 
1467 		if (mac_tmpl != NULL) {
1468 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
1469 			    KCF_SUCCESS) {
1470 				error = CRYPTO_MECHANISM_INVALID;
1471 				goto out;
1472 			}
1473 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1474 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1475 				error = CRYPTO_OLD_CTX_TEMPLATE;
1476 				goto out;
1477 			}
1478 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1479 		}
1480 	}
1481 
1482 	/* The fast path for SW providers. */
1483 	if (CHECK_FASTPATH(crq, pd)) {
1484 		crypto_mechanism_t lmac_mech;
1485 		crypto_mechanism_t ldecr_mech;
1486 
1487 		/* careful! structs assignments */
1488 		ldecr_mech = *decr_mech;
1489 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1490 		    &ldecr_mech);
1491 
1492 		lmac_mech = *mac_mech;
1493 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1494 		    &lmac_mech);
1495 
1496 		if (do_verify)
1497 			error = KCF_PROV_MAC_VERIFY_DECRYPT_ATOMIC(
1498 			    real_provider, sid, &lmac_mech, mac_key,
1499 			    &ldecr_mech, decr_key, ct, mac, pt, spi_mac_tmpl,
1500 			    spi_decr_tmpl, KCF_SWFP_RHNDL(crq));
1501 		else
1502 			error = KCF_PROV_MAC_DECRYPT_ATOMIC(real_provider, sid,
1503 			    &lmac_mech, mac_key, &ldecr_mech, decr_key,
1504 			    ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1505 			    KCF_SWFP_RHNDL(crq));
1506 
1507 		KCF_PROV_INCRSTATS(pd, error);
1508 	} else {
1509 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params,
1510 		    (do_verify) ? KCF_OP_MAC_VERIFY_DECRYPT_ATOMIC :
1511 		    KCF_OP_ATOMIC, sid, mac_key, decr_key, ct, mac, pt,
1512 		    spi_mac_tmpl, spi_decr_tmpl);
1513 
1514 		cmops = &(params.rp_u.mac_decrypt_params);
1515 
1516 		/* careful! structs assignments */
1517 		cmops->md_decr_mech = *decr_mech;
1518 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1519 		    &cmops->md_decr_mech);
1520 		cmops->md_framework_decr_mechtype = decr_mech->cm_type;
1521 
1522 		cmops->md_mac_mech = *mac_mech;
1523 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1524 		    &cmops->md_mac_mech);
1525 		cmops->md_framework_mac_mechtype = mac_mech->cm_type;
1526 
1527 		error = kcf_submit_request(real_provider, NULL, crq, &params,
1528 		    B_FALSE);
1529 	}
1530 
1531 out:
1532 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
1533 		KCF_PROV_REFRELE(real_provider);
1534 	return (error);
1535 }
1536 
1537 /*
1538  * Starts a multi-part dual mac/decrypt operation. The provider to
1539  * use is determined by the KCF dispatcher.
1540  */
1541 /* ARGSUSED */
1542 int
1543 crypto_mac_decrypt_init(crypto_mechanism_t *mac_mech,
1544     crypto_mechanism_t *decr_mech, crypto_key_t *mac_key,
1545     crypto_key_t *decr_key, crypto_ctx_template_t mac_tmpl,
1546     crypto_ctx_template_t decr_tmpl, crypto_context_t *ctxp,
1547     crypto_call_req_t *cr)
1548 {
1549 	/*
1550 	 * First try to find a provider for the decryption mechanism, that
1551 	 * is also capable of the MAC mechanism.
1552 	 * We still favor optimizing the costlier decryption.
1553 	 */
1554 	int error;
1555 	kcf_mech_entry_t *me;
1556 	kcf_provider_desc_t *pd;
1557 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1558 	kcf_req_params_t params;
1559 	kcf_mac_decrypt_ops_params_t *mdops;
1560 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1561 	crypto_mech_type_t prov_decr_mechid, prov_mac_mechid;
1562 	kcf_prov_tried_t *list = NULL;
1563 	boolean_t decr_tmpl_checked = B_FALSE;
1564 	boolean_t mac_tmpl_checked = B_FALSE;
1565 	crypto_ctx_t *ctx = NULL;
1566 	kcf_context_t *decr_kcf_context = NULL, *mac_kcf_context = NULL;
1567 	crypto_call_flag_t save_flag;
1568 
1569 retry:
1570 	/* pd is returned held on success */
1571 	pd = kcf_get_dual_provider(decr_mech, mac_mech, &me, &prov_decr_mechid,
1572 	    &prov_mac_mechid, &error, list,
1573 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_MAC_DECRYPT, CRYPTO_FG_MAC,
1574 	    CHECK_RESTRICT(cr), 0);
1575 	if (pd == NULL) {
1576 		if (list != NULL)
1577 			kcf_free_triedlist(list);
1578 		return (error);
1579 	}
1580 
1581 	/*
1582 	 * For SW providers, check the validity of the context template
1583 	 * It is very rare that the generation number mis-matches, so
1584 	 * is acceptable to fail here, and let the consumer recover by
1585 	 * freeing this tmpl and create a new one for the key and new SW
1586 	 * provider
1587 	 * Warning! will need to change when multiple software providers
1588 	 * per mechanism are supported.
1589 	 */
1590 
1591 	if ((!decr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1592 		if (decr_tmpl != NULL) {
1593 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1594 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1595 
1596 				if (list != NULL)
1597 					kcf_free_triedlist(list);
1598 				if (decr_kcf_context != NULL)
1599 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1600 
1601 				KCF_PROV_REFRELE(pd);
1602 				/* Which one is the the old one ? */
1603 				return (CRYPTO_OLD_CTX_TEMPLATE);
1604 			}
1605 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1606 		}
1607 		decr_tmpl_checked = B_TRUE;
1608 	}
1609 
1610 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
1611 		/* Need to emulate with 2 internal calls */
1612 
1613 		/*
1614 		 * We avoid code complexity by limiting the pure async.
1615 		 * case to be done using only a SW provider.
1616 		 * XXX - Redo the emulation code below so that we can
1617 		 * remove this limitation.
1618 		 */
1619 		if (cr != NULL && pd->pd_prov_type == CRYPTO_HW_PROVIDER) {
1620 			if ((kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr))
1621 			    != NULL))
1622 				goto retry;
1623 			if (list != NULL)
1624 				kcf_free_triedlist(list);
1625 			if (decr_kcf_context != NULL)
1626 				KCF_CONTEXT_REFRELE(decr_kcf_context);
1627 			KCF_PROV_REFRELE(pd);
1628 			return (CRYPTO_HOST_MEMORY);
1629 		}
1630 
1631 		if (ctx == NULL && pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
1632 			ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
1633 			if (ctx == NULL) {
1634 				if (list != NULL)
1635 					kcf_free_triedlist(list);
1636 				if (decr_kcf_context != NULL)
1637 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1638 				KCF_PROV_REFRELE(pd);
1639 				return (CRYPTO_HOST_MEMORY);
1640 			}
1641 			decr_kcf_context = (kcf_context_t *)
1642 			    ctx->cc_framework_private;
1643 		}
1644 		/*
1645 		 * Trade-off speed vs avoidance of code complexity and
1646 		 * duplication:
1647 		 * Could do all the combinations of fastpath / synch / asynch
1648 		 * for the decryption and the mac steps. Early attempts
1649 		 * showed the code grew wild and bug-prone, for little gain.
1650 		 * Therefore, the adaptative asynch case is not implemented.
1651 		 * It's either pure synchronous, or pure asynchronous.
1652 		 * We still preserve a fastpath for the pure synchronous
1653 		 * requests to SW providers.
1654 		 */
1655 		if (cr == NULL) {
1656 			crypto_context_t mac_context;
1657 
1658 			error = crypto_mac_init(mac_mech, mac_key, mac_tmpl,
1659 			    &mac_context, NULL);
1660 
1661 			if (error != CRYPTO_SUCCESS) {
1662 				/* Can't be CRYPTO_QUEUED. return the failure */
1663 				if (list != NULL)
1664 					kcf_free_triedlist(list);
1665 
1666 				if (decr_kcf_context != NULL)
1667 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1668 				return (error);
1669 			}
1670 			if (pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
1671 				crypto_mechanism_t lmech = *decr_mech;
1672 
1673 				lmech.cm_type = prov_decr_mechid;
1674 
1675 				error = KCF_PROV_DECRYPT_INIT(pd, ctx, &lmech,
1676 				    decr_key, spi_decr_tmpl,
1677 				    KCF_RHNDL(KM_SLEEP));
1678 			} else {
1679 				/*
1680 				 * If we did the 'goto retry' then ctx may not
1681 				 * be NULL.  In general, we can't reuse another
1682 				 * provider's context, so we free it now so
1683 				 * we don't leak it.
1684 				 */
1685 				if (ctx != NULL) {
1686 					KCF_CONTEXT_REFRELE((kcf_context_t *)
1687 					    ctx->cc_framework_private);
1688 					decr_kcf_context = NULL;
1689 				}
1690 				error = crypto_decrypt_init_prov(pd, pd->pd_sid,
1691 				    decr_mech, decr_key, &decr_tmpl,
1692 				    (crypto_context_t *)&ctx, NULL);
1693 
1694 				if (error == CRYPTO_SUCCESS) {
1695 					decr_kcf_context = (kcf_context_t *)
1696 					    ctx->cc_framework_private;
1697 				}
1698 			}
1699 
1700 			KCF_PROV_INCRSTATS(pd, error);
1701 
1702 			KCF_PROV_REFRELE(pd);
1703 
1704 			if (error != CRYPTO_SUCCESS) {
1705 				/* Can't be CRYPTO_QUEUED. return the failure */
1706 				if (list != NULL)
1707 					kcf_free_triedlist(list);
1708 				if (mac_kcf_context != NULL)
1709 					KCF_CONTEXT_REFRELE(mac_kcf_context);
1710 
1711 				return (error);
1712 			}
1713 			mac_kcf_context = (kcf_context_t *)
1714 			    ((crypto_ctx_t *)mac_context)->
1715 			    cc_framework_private;
1716 
1717 			decr_kcf_context = (kcf_context_t *)
1718 			    ctx->cc_framework_private;
1719 
1720 			/*
1721 			 * Here also, the mac context is second. The callback
1722 			 * case can't overwrite the context returned to
1723 			 * the caller.
1724 			 */
1725 			decr_kcf_context->kc_secondctx = mac_kcf_context;
1726 			KCF_CONTEXT_REFHOLD(mac_kcf_context);
1727 
1728 			*ctxp = (crypto_context_t)ctx;
1729 
1730 			return (error);
1731 		}
1732 		/* submit a pure asynchronous request. */
1733 		save_flag = cr->cr_flag;
1734 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
1735 
1736 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1737 		    pd->pd_sid, mac_key, decr_key, NULL, NULL, NULL,
1738 		    spi_mac_tmpl, spi_decr_tmpl);
1739 
1740 		mdops = &(params.rp_u.mac_decrypt_params);
1741 
1742 		/* careful! structs assignments */
1743 		mdops->md_decr_mech = *decr_mech;
1744 		/*
1745 		 * mdops->md_decr_mech.cm_type will be set when we get to
1746 		 * kcf_emulate_dual() routine.
1747 		 */
1748 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1749 		mdops->md_mac_mech = *mac_mech;
1750 
1751 		/*
1752 		 * mdops->md_mac_mech.cm_type will be set when we know the
1753 		 * MAC provider.
1754 		 */
1755 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1756 
1757 		/*
1758 		 * non-NULL ctx->kc_secondctx tells common_submit_request
1759 		 * that this request uses separate cipher and MAC contexts.
1760 		 * That function will set the MAC context's kc_secondctx to
1761 		 * this decrypt context.
1762 		 */
1763 		decr_kcf_context->kc_secondctx = decr_kcf_context;
1764 
1765 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1766 
1767 		cr->cr_flag = save_flag;
1768 
1769 		if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
1770 			KCF_CONTEXT_REFRELE(decr_kcf_context);
1771 		}
1772 		if (list != NULL)
1773 			kcf_free_triedlist(list);
1774 		*ctxp =  ctx;
1775 		KCF_PROV_REFRELE(pd);
1776 		return (error);
1777 	}
1778 
1779 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1780 		if ((mac_tmpl != NULL) &&
1781 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
1782 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1783 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1784 
1785 				if (list != NULL)
1786 					kcf_free_triedlist(list);
1787 
1788 				KCF_PROV_REFRELE(pd);
1789 				/* Which one is the the old one ? */
1790 				return (CRYPTO_OLD_CTX_TEMPLATE);
1791 			}
1792 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1793 		}
1794 		mac_tmpl_checked = B_TRUE;
1795 	}
1796 
1797 	if (ctx == NULL) {
1798 		ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
1799 		if (ctx == NULL) {
1800 			error = CRYPTO_HOST_MEMORY;
1801 			if (list != NULL)
1802 				kcf_free_triedlist(list);
1803 			return (CRYPTO_HOST_MEMORY);
1804 		}
1805 		decr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
1806 	}
1807 
1808 	/* The fast path for SW providers. */
1809 	if (CHECK_FASTPATH(cr, pd)) {
1810 		crypto_mechanism_t ldecr_mech;
1811 		crypto_mechanism_t lmac_mech;
1812 
1813 		/* careful! structs assignments */
1814 		ldecr_mech = *decr_mech;
1815 		ldecr_mech.cm_type = prov_decr_mechid;
1816 		lmac_mech = *mac_mech;
1817 		lmac_mech.cm_type = prov_mac_mechid;
1818 
1819 		error = KCF_PROV_MAC_DECRYPT_INIT(pd, ctx, &lmac_mech,
1820 		    mac_key, &ldecr_mech, decr_key, spi_mac_tmpl, spi_decr_tmpl,
1821 		    KCF_SWFP_RHNDL(cr));
1822 
1823 		KCF_PROV_INCRSTATS(pd, error);
1824 	} else {
1825 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1826 		    pd->pd_sid, mac_key, decr_key, NULL, NULL, NULL,
1827 		    spi_mac_tmpl, spi_decr_tmpl);
1828 
1829 		mdops = &(params.rp_u.mac_decrypt_params);
1830 
1831 		/* careful! structs assignments */
1832 		mdops->md_decr_mech = *decr_mech;
1833 		mdops->md_decr_mech.cm_type = prov_decr_mechid;
1834 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1835 		mdops->md_mac_mech = *mac_mech;
1836 		mdops->md_mac_mech.cm_type = prov_mac_mechid;
1837 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1838 
1839 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1840 	}
1841 
1842 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
1843 		if ((IS_RECOVERABLE(error)) &&
1844 		    (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr)) != NULL))
1845 			goto retry;
1846 
1847 		KCF_CONTEXT_REFRELE(decr_kcf_context);
1848 	} else
1849 		*ctxp = (crypto_context_t)ctx;
1850 
1851 	if (list != NULL)
1852 		kcf_free_triedlist(list);
1853 
1854 	KCF_PROV_REFRELE(pd);
1855 	return (error);
1856 }
1857 
1858 int
1859 crypto_mac_decrypt_init_prov(crypto_provider_t provider,
1860     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1861     crypto_mechanism_t *decr_mech, crypto_key_t *mac_key,
1862     crypto_key_t *decr_key, crypto_ctx_template_t mac_tmpl,
1863     crypto_ctx_template_t decr_tmpl, crypto_context_t *ctxp,
1864     crypto_call_req_t *cr)
1865 {
1866 	/*
1867 	 * First try to find a provider for the decryption mechanism, that
1868 	 * is also capable of the MAC mechanism.
1869 	 * We still favor optimizing the costlier decryption.
1870 	 */
1871 	int rv;
1872 	kcf_mech_entry_t *me;
1873 	kcf_provider_desc_t *pd = provider;
1874 	kcf_provider_desc_t *real_provider = pd;
1875 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1876 	kcf_req_params_t params;
1877 	kcf_mac_decrypt_ops_params_t *mdops;
1878 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1879 	crypto_ctx_t *ctx;
1880 	kcf_context_t *decr_kcf_context = NULL;
1881 
1882 	ASSERT(KCF_PROV_REFHELD(pd));
1883 
1884 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
1885 		rv = kcf_get_hardware_provider(decr_mech->cm_type,
1886 		    mac_mech->cm_type, CRYPTO_OPS_OFFSET(dual_cipher_mac_ops),
1887 		    CRYPTO_CIPHER_MAC_OFFSET(mac_decrypt_init),
1888 		    CHECK_RESTRICT(cr), pd, &real_provider);
1889 
1890 		if (rv != CRYPTO_SUCCESS)
1891 			return (rv);
1892 	}
1893 
1894 	/*
1895 	 * For SW providers, check the validity of the context template
1896 	 * It is very rare that the generation number mis-matches, so
1897 	 * is acceptable to fail here, and let the consumer recover by
1898 	 * freeing this tmpl and create a new one for the key and new SW
1899 	 * provider
1900 	 * Warning! will need to change when multiple software providers
1901 	 * per mechanism are supported.
1902 	 */
1903 
1904 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
1905 		if (decr_tmpl != NULL) {
1906 			if (kcf_get_mech_entry(decr_mech->cm_type, &me) !=
1907 			    KCF_SUCCESS) {
1908 				rv = CRYPTO_MECHANISM_INVALID;
1909 				goto out;
1910 			}
1911 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1912 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1913 				rv = CRYPTO_OLD_CTX_TEMPLATE;
1914 				goto out;
1915 			}
1916 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1917 		}
1918 
1919 		if (mac_tmpl != NULL) {
1920 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
1921 			    KCF_SUCCESS) {
1922 				rv = CRYPTO_MECHANISM_INVALID;
1923 				goto out;
1924 			}
1925 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1926 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1927 				rv = CRYPTO_OLD_CTX_TEMPLATE;
1928 				goto out;
1929 			}
1930 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1931 		}
1932 	}
1933 
1934 	ctx = kcf_new_ctx(cr, real_provider, sid);
1935 	if (ctx == NULL) {
1936 		rv = CRYPTO_HOST_MEMORY;
1937 		goto out;
1938 	}
1939 	decr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
1940 
1941 	/* The fast path for SW providers. */
1942 	if (CHECK_FASTPATH(cr, pd)) {
1943 		crypto_mechanism_t ldecr_mech;
1944 		crypto_mechanism_t lmac_mech;
1945 
1946 		/* careful! structs assignments */
1947 		ldecr_mech = *decr_mech;
1948 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1949 		    &ldecr_mech);
1950 
1951 		lmac_mech = *mac_mech;
1952 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1953 		    &lmac_mech);
1954 
1955 		rv = KCF_PROV_MAC_DECRYPT_INIT(real_provider, ctx, &lmac_mech,
1956 		    mac_key, &ldecr_mech, decr_key, spi_mac_tmpl, spi_decr_tmpl,
1957 		    KCF_SWFP_RHNDL(cr));
1958 
1959 		KCF_PROV_INCRSTATS(pd, rv);
1960 	} else {
1961 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1962 		    sid, mac_key, decr_key, NULL, NULL, NULL,
1963 		    spi_mac_tmpl, spi_decr_tmpl);
1964 
1965 		mdops = &(params.rp_u.mac_decrypt_params);
1966 
1967 		/* careful! structs assignments */
1968 		mdops->md_decr_mech = *decr_mech;
1969 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1970 		    &mdops->md_decr_mech);
1971 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1972 
1973 		mdops->md_mac_mech = *mac_mech;
1974 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1975 		    &mdops->md_mac_mech);
1976 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1977 
1978 		rv = kcf_submit_request(real_provider, ctx, cr, &params,
1979 		    B_FALSE);
1980 	}
1981 
1982 	if (rv != CRYPTO_SUCCESS && rv != CRYPTO_QUEUED) {
1983 		KCF_CONTEXT_REFRELE(decr_kcf_context);
1984 	} else
1985 		*ctxp = (crypto_context_t)ctx;
1986 
1987 out:
1988 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
1989 		KCF_PROV_REFRELE(real_provider);
1990 	return (rv);
1991 }
1992 /*
1993  * Continues a multi-part dual mac/decrypt operation.
1994  */
1995 /* ARGSUSED */
1996 int
1997 crypto_mac_decrypt_update(crypto_context_t context,
1998     crypto_dual_data_t *ct, crypto_data_t *pt, crypto_call_req_t *cr)
1999 {
2000 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
2001 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
2002 	kcf_provider_desc_t *pd;
2003 	int error;
2004 	kcf_req_params_t params;
2005 
2006 	if ((ctx == NULL) ||
2007 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
2008 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
2009 		return (CRYPTO_INVALID_CONTEXT);
2010 	}
2011 
2012 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
2013 	KCF_PROV_REFHOLD(pd);
2014 
2015 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
2016 		off_t save_offset;
2017 		size_t save_len;
2018 		crypto_call_flag_t save_flag;
2019 
2020 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
2021 			error = CRYPTO_INVALID_CONTEXT;
2022 			goto out;
2023 		}
2024 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
2025 
2026 		/* First we submit the MAC request */
2027 		if (cr == NULL) {
2028 			/*
2029 			 * 'ct' is always not NULL.
2030 			 */
2031 			error = crypto_mac_update((crypto_context_t)mac_ctx,
2032 			    (crypto_data_t *)ct, NULL);
2033 
2034 			if (error != CRYPTO_SUCCESS)
2035 				goto out;
2036 
2037 			/* Decrypt a different length only when told so */
2038 
2039 			save_offset = ct->dd_offset1;
2040 			save_len = ct->dd_len1;
2041 
2042 			if (ct->dd_len2 > 0) {
2043 				ct->dd_offset1 = ct->dd_offset2;
2044 				ct->dd_len1 = ct->dd_len2;
2045 			}
2046 
2047 			error = crypto_decrypt_update(context,
2048 			    (crypto_data_t *)ct, pt, NULL);
2049 
2050 			ct->dd_offset1 = save_offset;
2051 			ct->dd_len1 = save_len;
2052 
2053 			goto out;
2054 		}
2055 		/* submit a pure asynchronous request. */
2056 		save_flag = cr->cr_flag;
2057 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
2058 
2059 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_UPDATE,
2060 		    pd->pd_sid, NULL, NULL, ct, NULL, pt, NULL, NULL)
2061 
2062 
2063 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2064 
2065 		cr->cr_flag = save_flag;
2066 		goto out;
2067 	}
2068 
2069 	/* The fast path for SW providers. */
2070 	if (CHECK_FASTPATH(cr, pd)) {
2071 		error = KCF_PROV_MAC_DECRYPT_UPDATE(pd, ctx, ct, pt, NULL);
2072 		KCF_PROV_INCRSTATS(pd, error);
2073 	} else {
2074 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_UPDATE,
2075 		    ctx->cc_session, NULL, NULL, ct, NULL, pt, NULL, NULL);
2076 
2077 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2078 	}
2079 out:
2080 	KCF_PROV_REFRELE(pd);
2081 	return (error);
2082 }
2083 
2084 /*
2085  * Terminates a multi-part dual mac/decrypt operation.
2086  */
2087 /* ARGSUSED */
2088 int
2089 crypto_mac_decrypt_final(crypto_context_t context, crypto_data_t *mac,
2090     crypto_data_t *pt, crypto_call_req_t *cr)
2091 {
2092 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
2093 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
2094 	kcf_provider_desc_t *pd;
2095 	int error;
2096 	kcf_req_params_t params;
2097 
2098 	if ((ctx == NULL) ||
2099 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
2100 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
2101 		return (CRYPTO_INVALID_CONTEXT);
2102 	}
2103 
2104 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
2105 	KCF_PROV_REFHOLD(pd);
2106 
2107 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
2108 		crypto_call_flag_t save_flag;
2109 
2110 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
2111 			error = CRYPTO_INVALID_CONTEXT;
2112 			goto out;
2113 		}
2114 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
2115 
2116 		/* First we collect the MAC */
2117 		if (cr == NULL) {
2118 
2119 			error = crypto_mac_final((crypto_context_t)mac_ctx,
2120 			    mac, NULL);
2121 
2122 			if (error != CRYPTO_SUCCESS) {
2123 				crypto_cancel_ctx(ctx);
2124 			} else {
2125 				/* Get the last chunk of plaintext */
2126 				error = crypto_decrypt_final(context, pt, NULL);
2127 			}
2128 
2129 			KCF_PROV_REFRELE(pd);
2130 			return (error);
2131 		}
2132 		/* submit a pure asynchronous request. */
2133 		save_flag = cr->cr_flag;
2134 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
2135 
2136 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_FINAL,
2137 		    pd->pd_sid, NULL, NULL, NULL, mac, pt, NULL, NULL)
2138 
2139 
2140 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2141 
2142 		cr->cr_flag = save_flag;
2143 
2144 		KCF_PROV_REFRELE(pd);
2145 		return (error);
2146 	}
2147 
2148 	/* The fast path for SW providers. */
2149 	if (CHECK_FASTPATH(cr, pd)) {
2150 		error = KCF_PROV_MAC_DECRYPT_FINAL(pd, ctx, mac, pt, NULL);
2151 		KCF_PROV_INCRSTATS(pd, error);
2152 	} else {
2153 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_FINAL,
2154 		    ctx->cc_session, NULL, NULL, NULL, mac, pt, NULL, NULL);
2155 
2156 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2157 	}
2158 out:
2159 	KCF_PROV_REFRELE(pd);
2160 	/* Release the hold done in kcf_new_ctx() during init step. */
2161 	KCF_CONTEXT_COND_RELEASE(error, kcf_ctx);
2162 	return (error);
2163 }
2164 
2165 /*
2166  * Digest/Encrypt dual operation. Project-private entry point, not part of
2167  * the k-API.
2168  */
2169 /* ARGSUSED */
2170 int
2171 crypto_digest_encrypt_update(crypto_context_t digest_ctx,
2172     crypto_context_t encrypt_ctx, crypto_data_t *plaintext,
2173     crypto_data_t *ciphertext, crypto_call_req_t *crq)
2174 {
2175 	/*
2176 	 * RFE 4688647:
2177 	 * core functions needed by ioctl interface missing from impl.h
2178 	 */
2179 	return (CRYPTO_NOT_SUPPORTED);
2180 }
2181 
2182 /*
2183  * Decrypt/Digest dual operation. Project-private entry point, not part of
2184  * the k-API.
2185  */
2186 /* ARGSUSED */
2187 int
2188 crypto_decrypt_digest_update(crypto_context_t decryptctx,
2189     crypto_context_t encrypt_ctx, crypto_data_t *ciphertext,
2190     crypto_data_t *plaintext, crypto_call_req_t *crq)
2191 {
2192 	/*
2193 	 * RFE 4688647:
2194 	 * core functions needed by ioctl interface missing from impl.h
2195 	 */
2196 	return (CRYPTO_NOT_SUPPORTED);
2197 }
2198 
2199 /*
2200  * Sign/Encrypt dual operation. Project-private entry point, not part of
2201  * the k-API.
2202  */
2203 /* ARGSUSED */
2204 int
2205 crypto_sign_encrypt_update(crypto_context_t sign_ctx,
2206     crypto_context_t encrypt_ctx, crypto_data_t *plaintext,
2207     crypto_data_t *ciphertext, crypto_call_req_t *crq)
2208 {
2209 	/*
2210 	 * RFE 4688647:
2211 	 * core functions needed by ioctl interface missing from impl.h
2212 	 */
2213 	return (CRYPTO_NOT_SUPPORTED);
2214 }
2215 
2216 /*
2217  * Decrypt/Verify dual operation. Project-private entry point, not part of
2218  * the k-API.
2219  */
2220 /* ARGSUSED */
2221 int
2222 crypto_decrypt_verify_update(crypto_context_t decrypt_ctx,
2223     crypto_context_t verify_ctx, crypto_data_t *ciphertext,
2224     crypto_data_t *plaintext, crypto_call_req_t *crq)
2225 {
2226 	/*
2227 	 * RFE 4688647:
2228 	 * core functions needed by ioctl interface missing from impl.h
2229 	 */
2230 	return (CRYPTO_NOT_SUPPORTED);
2231 }
2232