xref: /titanic_44/usr/src/uts/common/crypto/api/kcf_dual.c (revision bb25c06cca41ca78e5fb87fbb8e81d55beb18c95)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2006 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 #pragma ident	"%Z%%M%	%I%	%E% SMI"
27 
28 #include <sys/errno.h>
29 #include <sys/types.h>
30 #include <sys/kmem.h>
31 #include <sys/sysmacros.h>
32 #include <sys/crypto/common.h>
33 #include <sys/crypto/impl.h>
34 #include <sys/crypto/api.h>
35 #include <sys/crypto/spi.h>
36 #include <sys/crypto/sched_impl.h>
37 
38 #define	CRYPTO_OPS_OFFSET(f)		offsetof(crypto_ops_t, co_##f)
39 #define	CRYPTO_CIPHER_MAC_OFFSET(f) offsetof(crypto_dual_cipher_mac_ops_t, f)
40 
41 static int crypto_mac_decrypt_common(crypto_mechanism_t *,
42     crypto_mechanism_t *, crypto_dual_data_t *, crypto_key_t *, crypto_key_t *,
43     crypto_ctx_template_t, crypto_ctx_template_t, crypto_data_t *,
44     crypto_data_t *, crypto_call_req_t *, boolean_t);
45 
46 static int crypto_mac_decrypt_common_prov(crypto_provider_t provider,
47     crypto_session_id_t sid, crypto_mechanism_t *, crypto_mechanism_t *,
48     crypto_dual_data_t *, crypto_key_t *, crypto_key_t *,
49     crypto_ctx_template_t, crypto_ctx_template_t, crypto_data_t *,
50     crypto_data_t *, crypto_call_req_t *, boolean_t);
51 
52 int
53 crypto_encrypt_mac_prov(crypto_provider_t provider, crypto_session_id_t sid,
54     crypto_mechanism_t *encr_mech, crypto_mechanism_t *mac_mech,
55     crypto_data_t *pt, crypto_key_t *encr_key, crypto_key_t *mac_key,
56     crypto_ctx_template_t encr_tmpl, crypto_ctx_template_t mac_tmpl,
57     crypto_dual_data_t *ct, crypto_data_t *mac, crypto_call_req_t *crq)
58 {
59 	/*
60 	 * First try to find a provider for the encryption mechanism, that
61 	 * is also capable of the MAC mechanism.
62 	 */
63 	int rv;
64 	kcf_mech_entry_t *me;
65 	kcf_provider_desc_t *pd = provider;
66 	kcf_provider_desc_t *real_provider = pd;
67 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
68 	kcf_req_params_t params;
69 	kcf_encrypt_mac_ops_params_t *cmops;
70 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
71 
72 	ASSERT(KCF_PROV_REFHELD(pd));
73 
74 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
75 		rv = kcf_get_hardware_provider(encr_mech->cm_type,
76 		    mac_mech->cm_type, CHECK_RESTRICT(crq), pd,
77 		    &real_provider, CRYPTO_FG_ENCRYPT_MAC_ATOMIC);
78 
79 		if (rv != CRYPTO_SUCCESS)
80 			return (rv);
81 	}
82 
83 	/*
84 	 * For SW providers, check the validity of the context template
85 	 * It is very rare that the generation number mis-matches, so
86 	 * is acceptable to fail here, and let the consumer recover by
87 	 * freeing this tmpl and create a new one for the key and new SW
88 	 * provider
89 	 * Warning! will need to change when multiple software providers
90 	 * per mechanism are supported.
91 	 */
92 
93 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
94 		if (encr_tmpl != NULL) {
95 			if (kcf_get_mech_entry(encr_mech->cm_type, &me) !=
96 			    KCF_SUCCESS) {
97 				rv = CRYPTO_MECHANISM_INVALID;
98 				goto out;
99 			}
100 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
101 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
102 				rv = CRYPTO_OLD_CTX_TEMPLATE;
103 				goto out;
104 			}
105 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
106 		}
107 
108 		if (mac_tmpl != NULL) {
109 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
110 			    KCF_SUCCESS) {
111 				rv = CRYPTO_MECHANISM_INVALID;
112 				goto out;
113 			}
114 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
115 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
116 				rv = CRYPTO_OLD_CTX_TEMPLATE;
117 				goto out;
118 			}
119 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
120 		}
121 	}
122 
123 	/* The fast path for SW providers. */
124 	if (CHECK_FASTPATH(crq, real_provider)) {
125 		crypto_mechanism_t lencr_mech;
126 		crypto_mechanism_t lmac_mech;
127 
128 		/* careful! structs assignments */
129 		lencr_mech = *encr_mech;
130 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
131 		    &lencr_mech);
132 
133 		lmac_mech = *mac_mech;
134 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
135 		    &lmac_mech);
136 
137 		rv = KCF_PROV_ENCRYPT_MAC_ATOMIC(real_provider, sid,
138 		    &lencr_mech, encr_key, &lmac_mech, mac_key, pt, ct,
139 		    mac, spi_encr_tmpl, spi_mac_tmpl, KCF_SWFP_RHNDL(crq));
140 
141 		KCF_PROV_INCRSTATS(pd, rv);
142 	} else {
143 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_ATOMIC,
144 		    sid, encr_key, mac_key, pt, ct, mac, spi_encr_tmpl,
145 		    spi_mac_tmpl);
146 
147 		cmops = &(params.rp_u.encrypt_mac_params);
148 
149 		/* careful! structs assignments */
150 		cmops->em_encr_mech = *encr_mech;
151 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
152 		    &cmops->em_encr_mech);
153 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
154 
155 		cmops->em_mac_mech = *mac_mech;
156 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
157 		    &cmops->em_mac_mech);
158 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
159 
160 		rv = kcf_submit_request(real_provider, NULL, crq, &params,
161 		    B_FALSE);
162 	}
163 
164 out:
165 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
166 		KCF_PROV_REFRELE(real_provider);
167 	return (rv);
168 }
169 
170 /*
171  * Performs a dual encrypt/mac atomic operation. The provider and session
172  * to use are determined by the KCF dispatcher.
173  */
174 int
175 crypto_encrypt_mac(crypto_mechanism_t *encr_mech,
176     crypto_mechanism_t *mac_mech, crypto_data_t *pt,
177     crypto_key_t *encr_key, crypto_key_t *mac_key,
178     crypto_ctx_template_t encr_tmpl, crypto_ctx_template_t mac_tmpl,
179     crypto_dual_data_t *ct, crypto_data_t *mac, crypto_call_req_t *crq)
180 {
181 	/*
182 	 * First try to find a provider for the encryption mechanism, that
183 	 * is also capable of the MAC mechanism.
184 	 */
185 	int error;
186 	kcf_mech_entry_t *me;
187 	kcf_provider_desc_t *pd;
188 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
189 	kcf_req_params_t params;
190 	kcf_encrypt_mac_ops_params_t *cmops;
191 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
192 	crypto_mech_type_t prov_encr_mechid, prov_mac_mechid;
193 	kcf_prov_tried_t *list = NULL;
194 	boolean_t encr_tmpl_checked = B_FALSE;
195 	boolean_t mac_tmpl_checked = B_FALSE;
196 	kcf_dual_req_t *next_req = NULL;
197 
198 retry:
199 	/* pd is returned held on success */
200 	pd = kcf_get_dual_provider(encr_mech, mac_mech, &me, &prov_encr_mechid,
201 	    &prov_mac_mechid, &error, list,
202 	    CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_ENCRYPT_MAC_ATOMIC,
203 	    CRYPTO_FG_MAC_ATOMIC | CRYPTO_FG_ENCRYPT_MAC_ATOMIC,
204 	    CHECK_RESTRICT(crq), ct->dd_len1);
205 	if (pd == NULL) {
206 		if (list != NULL)
207 			kcf_free_triedlist(list);
208 		if (next_req != NULL)
209 			kmem_free(next_req, sizeof (kcf_dual_req_t));
210 		return (error);
211 	}
212 
213 	/*
214 	 * For SW providers, check the validity of the context template
215 	 * It is very rare that the generation number mis-matches, so
216 	 * is acceptable to fail here, and let the consumer recover by
217 	 * freeing this tmpl and create a new one for the key and new SW
218 	 * provider
219 	 * Warning! will need to change when multiple software providers
220 	 * per mechanism are supported.
221 	 */
222 
223 	if ((!encr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
224 		if (encr_tmpl != NULL) {
225 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
226 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
227 
228 				if (next_req != NULL)
229 					kmem_free(next_req,
230 					    sizeof (kcf_dual_req_t));
231 				if (list != NULL)
232 					kcf_free_triedlist(list);
233 
234 				KCF_PROV_REFRELE(pd);
235 				/* Which one is the the old one ? */
236 				return (CRYPTO_OLD_CTX_TEMPLATE);
237 			}
238 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
239 		}
240 		encr_tmpl_checked = B_TRUE;
241 	}
242 
243 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
244 		crypto_call_req_t encr_req;
245 
246 		/* Need to emulate with 2 internal calls */
247 		/* Allocate and initialize the MAC req for the callback */
248 
249 		if (crq != NULL) {
250 			if (next_req == NULL) {
251 				next_req = kcf_alloc_req(crq);
252 
253 				if (next_req == NULL) {
254 					KCF_PROV_REFRELE(pd);
255 					if (list != NULL)
256 						kcf_free_triedlist(list);
257 					return (CRYPTO_HOST_MEMORY);
258 				}
259 				/*
260 				 * Careful! we're wrapping-in mac_tmpl instead
261 				 * of an spi_mac_tmpl. The callback routine will
262 				 * have to validate mac_tmpl, and use the
263 				 * mac_ctx_tmpl, once it picks a MAC provider.
264 				 */
265 				KCF_WRAP_MAC_OPS_PARAMS(&(next_req->kr_params),
266 				    KCF_OP_ATOMIC, NULL, mac_mech, mac_key,
267 				    (crypto_data_t *)ct, mac, mac_tmpl);
268 			}
269 
270 			encr_req.cr_flag = crq->cr_flag;
271 			encr_req.cr_callback_func = kcf_next_req;
272 			encr_req.cr_callback_arg = next_req;
273 		}
274 
275 		if (pt == NULL) {
276 			KCF_WRAP_ENCRYPT_OPS_PARAMS(&params, KCF_OP_ATOMIC,
277 			    pd->pd_sid, encr_mech, encr_key,
278 			    (crypto_data_t *)ct, NULL, spi_encr_tmpl);
279 		} else {
280 			KCF_WRAP_ENCRYPT_OPS_PARAMS(&params, KCF_OP_ATOMIC,
281 			    pd->pd_sid, encr_mech, encr_key, pt,
282 			    (crypto_data_t *)ct, spi_encr_tmpl);
283 		}
284 
285 		error = kcf_submit_request(pd, NULL, (crq == NULL) ? NULL :
286 		    &encr_req, &params, B_TRUE);
287 
288 		switch (error) {
289 		case CRYPTO_SUCCESS: {
290 			off_t saveoffset;
291 			size_t savelen;
292 
293 			/*
294 			 * The encryption step is done. Reuse the encr_req
295 			 * for submitting the MAC step.
296 			 */
297 			if (next_req == NULL) {
298 				saveoffset = ct->dd_offset1;
299 				savelen = ct->dd_len1;
300 			} else {
301 				saveoffset = next_req->kr_saveoffset =
302 				    ct->dd_offset1;
303 				savelen = next_req->kr_savelen = ct->dd_len1;
304 				encr_req.cr_callback_func = kcf_last_req;
305 			}
306 
307 			ct->dd_offset1 = ct->dd_offset2;
308 			ct->dd_len1 = ct->dd_len2;
309 
310 			error = crypto_mac(mac_mech, (crypto_data_t *)ct,
311 			    mac_key, mac_tmpl, mac, (crq == NULL) ? NULL :
312 			    &encr_req);
313 
314 			if (error != CRYPTO_QUEUED) {
315 				ct->dd_offset1 = saveoffset;
316 				ct->dd_len1 = savelen;
317 			}
318 			break;
319 		}
320 
321 		case CRYPTO_QUEUED:
322 			if ((crq != NULL) &&
323 			    !(crq->cr_flag & CRYPTO_SKIP_REQID))
324 				crq->cr_reqid = encr_req.cr_reqid;
325 			break;
326 
327 		default:
328 
329 			/* Add pd to the linked list of providers tried. */
330 			if (IS_RECOVERABLE(error)) {
331 				if (kcf_insert_triedlist(&list, pd,
332 				    KCF_KMFLAG(crq)) != NULL)
333 					goto retry;
334 			}
335 		}
336 		if (error != CRYPTO_QUEUED && next_req != NULL)
337 			kmem_free(next_req, sizeof (kcf_dual_req_t));
338 		if (list != NULL)
339 			kcf_free_triedlist(list);
340 		KCF_PROV_REFRELE(pd);
341 		return (error);
342 	}
343 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
344 		if ((mac_tmpl != NULL) &&
345 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
346 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
347 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
348 
349 				if (next_req != NULL)
350 					kmem_free(next_req,
351 					    sizeof (kcf_dual_req_t));
352 				if (list != NULL)
353 					kcf_free_triedlist(list);
354 
355 				KCF_PROV_REFRELE(pd);
356 				/* Which one is the the old one ? */
357 				return (CRYPTO_OLD_CTX_TEMPLATE);
358 			}
359 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
360 		}
361 		mac_tmpl_checked = B_TRUE;
362 	}
363 
364 	/* The fast path for SW providers. */
365 	if (CHECK_FASTPATH(crq, pd)) {
366 		crypto_mechanism_t lencr_mech;
367 		crypto_mechanism_t lmac_mech;
368 
369 		/* careful! structs assignments */
370 		lencr_mech = *encr_mech;
371 		lencr_mech.cm_type = prov_encr_mechid;
372 		lmac_mech = *mac_mech;
373 		lmac_mech.cm_type = prov_mac_mechid;
374 
375 		error = KCF_PROV_ENCRYPT_MAC_ATOMIC(pd, pd->pd_sid,
376 		    &lencr_mech, encr_key, &lmac_mech, mac_key, pt, ct,
377 		    mac, spi_encr_tmpl, spi_mac_tmpl, KCF_SWFP_RHNDL(crq));
378 
379 		KCF_PROV_INCRSTATS(pd, error);
380 	} else {
381 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_ATOMIC,
382 		    pd->pd_sid, encr_key, mac_key, pt, ct, mac, spi_encr_tmpl,
383 		    spi_mac_tmpl);
384 
385 		cmops = &(params.rp_u.encrypt_mac_params);
386 
387 		/* careful! structs assignments */
388 		cmops->em_encr_mech = *encr_mech;
389 		cmops->em_encr_mech.cm_type = prov_encr_mechid;
390 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
391 		cmops->em_mac_mech = *mac_mech;
392 		cmops->em_mac_mech.cm_type = prov_mac_mechid;
393 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
394 
395 		error = kcf_submit_request(pd, NULL, crq, &params, B_FALSE);
396 	}
397 
398 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED &&
399 	    IS_RECOVERABLE(error)) {
400 		/* Add pd to the linked list of providers tried. */
401 		if (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(crq)) != NULL)
402 			goto retry;
403 	}
404 
405 	if (next_req != NULL)
406 		kmem_free(next_req, sizeof (kcf_dual_req_t));
407 
408 	if (list != NULL)
409 		kcf_free_triedlist(list);
410 
411 	KCF_PROV_REFRELE(pd);
412 	return (error);
413 }
414 
415 int
416 crypto_encrypt_mac_init_prov(crypto_provider_t provider,
417     crypto_session_id_t sid, crypto_mechanism_t *encr_mech,
418     crypto_mechanism_t *mac_mech, crypto_key_t *encr_key,
419     crypto_key_t *mac_key, crypto_ctx_template_t encr_tmpl,
420     crypto_ctx_template_t mac_tmpl, crypto_context_t *ctxp,
421     crypto_call_req_t *cr)
422 {
423 	/*
424 	 * First try to find a provider for the encryption mechanism, that
425 	 * is also capable of the MAC mechanism.
426 	 */
427 	int rv;
428 	kcf_mech_entry_t *me;
429 	kcf_provider_desc_t *pd = provider;
430 	kcf_provider_desc_t *real_provider = pd;
431 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
432 	kcf_req_params_t params;
433 	kcf_encrypt_mac_ops_params_t *cmops;
434 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
435 	crypto_ctx_t *ctx;
436 	kcf_context_t *encr_kcf_context = NULL;
437 
438 	ASSERT(KCF_PROV_REFHELD(pd));
439 
440 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
441 		rv = kcf_get_hardware_provider(encr_mech->cm_type,
442 		    mac_mech->cm_type, CHECK_RESTRICT(cr), pd, &real_provider,
443 		    CRYPTO_FG_ENCRYPT_MAC);
444 
445 		if (rv != CRYPTO_SUCCESS)
446 			return (rv);
447 	}
448 
449 	/*
450 	 * For SW providers, check the validity of the context template
451 	 * It is very rare that the generation number mis-matches, so
452 	 * is acceptable to fail here, and let the consumer recover by
453 	 * freeing this tmpl and create a new one for the key and new SW
454 	 * provider
455 	 * Warning! will need to change when multiple software providers
456 	 * per mechanism are supported.
457 	 */
458 
459 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
460 		if (encr_tmpl != NULL) {
461 			if (kcf_get_mech_entry(encr_mech->cm_type, &me) !=
462 			    KCF_SUCCESS) {
463 				rv = CRYPTO_MECHANISM_INVALID;
464 				goto out;
465 			}
466 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
467 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
468 				rv = CRYPTO_OLD_CTX_TEMPLATE;
469 				goto out;
470 			}
471 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
472 		}
473 
474 		if (mac_tmpl != NULL) {
475 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
476 			    KCF_SUCCESS) {
477 				rv = CRYPTO_MECHANISM_INVALID;
478 				goto out;
479 			}
480 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
481 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
482 				rv = CRYPTO_OLD_CTX_TEMPLATE;
483 				goto out;
484 			}
485 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
486 		}
487 	}
488 
489 	ctx = kcf_new_ctx(cr, real_provider, sid);
490 	if (ctx == NULL) {
491 		rv = CRYPTO_HOST_MEMORY;
492 		goto out;
493 	}
494 	encr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
495 
496 	/* The fast path for SW providers. */
497 	if (CHECK_FASTPATH(cr, real_provider)) {
498 		crypto_mechanism_t lencr_mech;
499 		crypto_mechanism_t lmac_mech;
500 
501 		/* careful! structs assignments */
502 		lencr_mech = *encr_mech;
503 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
504 		    &lencr_mech);
505 
506 		lmac_mech = *mac_mech;
507 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
508 		    &lmac_mech);
509 
510 		rv = KCF_PROV_ENCRYPT_MAC_INIT(real_provider, ctx, &lencr_mech,
511 		    encr_key, &lmac_mech, mac_key, spi_encr_tmpl, spi_mac_tmpl,
512 		    KCF_SWFP_RHNDL(cr));
513 
514 		KCF_PROV_INCRSTATS(pd, rv);
515 	} else {
516 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
517 		    sid, encr_key, mac_key, NULL, NULL, NULL,
518 		    spi_encr_tmpl, spi_mac_tmpl);
519 
520 		cmops = &(params.rp_u.encrypt_mac_params);
521 
522 		/* careful! structs assignments */
523 		cmops->em_encr_mech = *encr_mech;
524 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
525 		    &cmops->em_encr_mech);
526 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
527 
528 		cmops->em_mac_mech = *mac_mech;
529 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
530 		    &cmops->em_mac_mech);
531 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
532 
533 		rv = kcf_submit_request(real_provider, ctx, cr, &params,
534 		    B_FALSE);
535 	}
536 
537 	if (rv != CRYPTO_SUCCESS && rv != CRYPTO_QUEUED) {
538 		KCF_CONTEXT_REFRELE(encr_kcf_context);
539 	} else
540 		*ctxp = (crypto_context_t)ctx;
541 
542 out:
543 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
544 		KCF_PROV_REFRELE(real_provider);
545 	return (rv);
546 }
547 
548 /*
549  * Starts a multi-part dual encrypt/mac operation. The provider and session
550  * to use are determined by the KCF dispatcher.
551  */
552 /* ARGSUSED */
553 int
554 crypto_encrypt_mac_init(crypto_mechanism_t *encr_mech,
555     crypto_mechanism_t *mac_mech, crypto_key_t *encr_key,
556     crypto_key_t *mac_key, crypto_ctx_template_t encr_tmpl,
557     crypto_ctx_template_t mac_tmpl, crypto_context_t *ctxp,
558     crypto_call_req_t *cr)
559 {
560 	/*
561 	 * First try to find a provider for the encryption mechanism, that
562 	 * is also capable of the MAC mechanism.
563 	 */
564 	int error;
565 	kcf_mech_entry_t *me;
566 	kcf_provider_desc_t *pd;
567 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
568 	kcf_req_params_t params;
569 	kcf_encrypt_mac_ops_params_t *cmops;
570 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
571 	crypto_mech_type_t prov_encr_mechid, prov_mac_mechid;
572 	kcf_prov_tried_t *list = NULL;
573 	boolean_t encr_tmpl_checked = B_FALSE;
574 	boolean_t mac_tmpl_checked = B_FALSE;
575 	crypto_ctx_t *ctx = NULL;
576 	kcf_context_t *encr_kcf_context = NULL, *mac_kcf_context;
577 	crypto_call_flag_t save_flag;
578 
579 retry:
580 	/* pd is returned held on success */
581 	pd = kcf_get_dual_provider(encr_mech, mac_mech, &me, &prov_encr_mechid,
582 	    &prov_mac_mechid, &error, list,
583 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_MAC, CRYPTO_FG_MAC,
584 	    CHECK_RESTRICT(cr), 0);
585 	if (pd == NULL) {
586 		if (list != NULL)
587 			kcf_free_triedlist(list);
588 		return (error);
589 	}
590 
591 	/*
592 	 * For SW providers, check the validity of the context template
593 	 * It is very rare that the generation number mis-matches, so
594 	 * is acceptable to fail here, and let the consumer recover by
595 	 * freeing this tmpl and create a new one for the key and new SW
596 	 * provider
597 	 * Warning! will need to change when multiple software providers
598 	 * per mechanism are supported.
599 	 */
600 
601 	if ((!encr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
602 		if (encr_tmpl != NULL) {
603 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
604 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
605 
606 				if (list != NULL)
607 					kcf_free_triedlist(list);
608 				if (encr_kcf_context != NULL)
609 					KCF_CONTEXT_REFRELE(encr_kcf_context);
610 
611 				KCF_PROV_REFRELE(pd);
612 				/* Which one is the the old one ? */
613 				return (CRYPTO_OLD_CTX_TEMPLATE);
614 			}
615 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
616 		}
617 		encr_tmpl_checked = B_TRUE;
618 	}
619 
620 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
621 		/* Need to emulate with 2 internal calls */
622 
623 		/*
624 		 * We avoid code complexity by limiting the pure async.
625 		 * case to be done using only a SW provider.
626 		 * XXX - Redo the emulation code below so that we can
627 		 * remove this limitation.
628 		 */
629 		if (cr != NULL && pd->pd_prov_type == CRYPTO_HW_PROVIDER) {
630 			if ((kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr))
631 			    != NULL))
632 				goto retry;
633 			if (list != NULL)
634 				kcf_free_triedlist(list);
635 			if (encr_kcf_context != NULL)
636 				KCF_CONTEXT_REFRELE(encr_kcf_context);
637 			KCF_PROV_REFRELE(pd);
638 			return (CRYPTO_HOST_MEMORY);
639 		}
640 
641 		if (ctx == NULL && pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
642 			ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
643 			if (ctx == NULL) {
644 				if (list != NULL)
645 					kcf_free_triedlist(list);
646 				if (encr_kcf_context != NULL)
647 					KCF_CONTEXT_REFRELE(encr_kcf_context);
648 				KCF_PROV_REFRELE(pd);
649 				return (CRYPTO_HOST_MEMORY);
650 			}
651 			encr_kcf_context = (kcf_context_t *)
652 			    ctx->cc_framework_private;
653 		}
654 		/*
655 		 * Trade-off speed vs avoidance of code complexity and
656 		 * duplication:
657 		 * Could do all the combinations of fastpath / synch / asynch
658 		 * for the encryption and the mac steps. Early attempts
659 		 * showed the code grew wild and bug-prone, for little gain.
660 		 * Therefore, the adaptative asynch case is not implemented.
661 		 * It's either pure synchronous, or pure asynchronous.
662 		 * We still preserve a fastpath for the pure synchronous
663 		 * requests to SW providers.
664 		 */
665 		if (cr == NULL) {
666 			crypto_context_t mac_context;
667 
668 			if (pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
669 				crypto_mechanism_t lmech = *encr_mech;
670 
671 				lmech.cm_type = prov_encr_mechid;
672 
673 				error = KCF_PROV_ENCRYPT_INIT(pd, ctx, &lmech,
674 				    encr_key, spi_encr_tmpl,
675 				    KCF_RHNDL(KM_SLEEP));
676 			} else {
677 				/*
678 				 * If we did the 'goto retry' then ctx may not
679 				 * be NULL.  In general, we can't reuse another
680 				 * provider's context, so we free it now so
681 				 * we don't leak it.
682 				 */
683 				if (ctx != NULL) {
684 					KCF_CONTEXT_REFRELE((kcf_context_t *)
685 					    ctx->cc_framework_private);
686 					encr_kcf_context = NULL;
687 				}
688 				error = crypto_encrypt_init_prov(pd, pd->pd_sid,
689 				    encr_mech, encr_key, &encr_tmpl,
690 				    (crypto_context_t *)&ctx, NULL);
691 
692 				if (error == CRYPTO_SUCCESS) {
693 					encr_kcf_context = (kcf_context_t *)
694 					    ctx->cc_framework_private;
695 				}
696 			}
697 			KCF_PROV_INCRSTATS(pd, error);
698 
699 			KCF_PROV_REFRELE(pd);
700 
701 			if (error != CRYPTO_SUCCESS) {
702 				/* Can't be CRYPTO_QUEUED. return the failure */
703 				if (list != NULL)
704 					kcf_free_triedlist(list);
705 				if (encr_kcf_context != NULL)
706 					KCF_CONTEXT_REFRELE(encr_kcf_context);
707 
708 				return (error);
709 			}
710 			error = crypto_mac_init(mac_mech, mac_key, mac_tmpl,
711 			    &mac_context, NULL);
712 
713 			if (list != NULL)
714 				kcf_free_triedlist(list);
715 
716 			if (error != CRYPTO_SUCCESS) {
717 				/* Should this be an ASSERT() ? */
718 
719 				KCF_CONTEXT_REFRELE(encr_kcf_context);
720 			} else {
721 				encr_kcf_context = (kcf_context_t *)
722 				    ctx->cc_framework_private;
723 				mac_kcf_context = (kcf_context_t *)
724 				    ((crypto_ctx_t *)mac_context)->
725 				    cc_framework_private;
726 
727 				encr_kcf_context->kc_secondctx =
728 				    mac_kcf_context;
729 				KCF_CONTEXT_REFHOLD(mac_kcf_context);
730 
731 				*ctxp = (crypto_context_t)ctx;
732 			}
733 
734 			return (error);
735 		}
736 
737 		/* submit a pure asynchronous request. */
738 		save_flag = cr->cr_flag;
739 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
740 
741 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
742 		    pd->pd_sid, encr_key, mac_key, NULL, NULL, NULL,
743 		    spi_encr_tmpl, spi_mac_tmpl);
744 
745 		cmops = &(params.rp_u.encrypt_mac_params);
746 
747 		/* careful! structs assignments */
748 		cmops->em_encr_mech = *encr_mech;
749 		/*
750 		 * cmops->em_encr_mech.cm_type will be set when we get to
751 		 * kcf_emulate_dual() routine.
752 		 */
753 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
754 		cmops->em_mac_mech = *mac_mech;
755 
756 		/*
757 		 * cmops->em_mac_mech.cm_type will be set when we know the
758 		 * MAC provider.
759 		 */
760 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
761 
762 		/*
763 		 * non-NULL ctx->kc_secondctx tells common_submit_request
764 		 * that this request uses separate cipher and MAC contexts.
765 		 * That function will set ctx->kc_secondctx to the new
766 		 * MAC context, once it gets one.
767 		 */
768 		encr_kcf_context->kc_secondctx = encr_kcf_context;
769 
770 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
771 
772 		cr->cr_flag = save_flag;
773 
774 		if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
775 			KCF_CONTEXT_REFRELE(encr_kcf_context);
776 		}
777 		if (list != NULL)
778 			kcf_free_triedlist(list);
779 		*ctxp = (crypto_context_t)ctx;
780 		KCF_PROV_REFRELE(pd);
781 		return (error);
782 	}
783 
784 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
785 		if ((mac_tmpl != NULL) &&
786 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
787 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
788 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
789 
790 				if (list != NULL)
791 					kcf_free_triedlist(list);
792 
793 				KCF_PROV_REFRELE(pd);
794 				/* Which one is the the old one ? */
795 				return (CRYPTO_OLD_CTX_TEMPLATE);
796 			}
797 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
798 		}
799 		mac_tmpl_checked = B_TRUE;
800 	}
801 
802 	if (ctx == NULL) {
803 		ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
804 		if (ctx == NULL) {
805 			if (list != NULL)
806 				kcf_free_triedlist(list);
807 
808 			KCF_PROV_REFRELE(pd);
809 			return (CRYPTO_HOST_MEMORY);
810 		}
811 		encr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
812 	}
813 
814 	/* The fast path for SW providers. */
815 	if (CHECK_FASTPATH(cr, pd)) {
816 		crypto_mechanism_t lencr_mech;
817 		crypto_mechanism_t lmac_mech;
818 
819 		/* careful! structs assignments */
820 		lencr_mech = *encr_mech;
821 		lencr_mech.cm_type = prov_encr_mechid;
822 		lmac_mech = *mac_mech;
823 		lmac_mech.cm_type = prov_mac_mechid;
824 
825 		error = KCF_PROV_ENCRYPT_MAC_INIT(pd, ctx, &lencr_mech,
826 		    encr_key, &lmac_mech, mac_key, spi_encr_tmpl, spi_mac_tmpl,
827 		    KCF_SWFP_RHNDL(cr));
828 
829 		KCF_PROV_INCRSTATS(pd, error);
830 	} else {
831 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
832 		    pd->pd_sid, encr_key, mac_key, NULL, NULL, NULL,
833 		    spi_encr_tmpl, spi_mac_tmpl);
834 
835 		cmops = &(params.rp_u.encrypt_mac_params);
836 
837 		/* careful! structs assignments */
838 		cmops->em_encr_mech = *encr_mech;
839 		cmops->em_encr_mech.cm_type = prov_encr_mechid;
840 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
841 		cmops->em_mac_mech = *mac_mech;
842 		cmops->em_mac_mech.cm_type = prov_mac_mechid;
843 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
844 
845 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
846 	}
847 
848 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
849 		if ((IS_RECOVERABLE(error)) &&
850 		    (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr)) != NULL))
851 			goto retry;
852 
853 		KCF_CONTEXT_REFRELE(encr_kcf_context);
854 	} else
855 		*ctxp = (crypto_context_t)ctx;
856 
857 	if (list != NULL)
858 		kcf_free_triedlist(list);
859 
860 	KCF_PROV_REFRELE(pd);
861 	return (error);
862 }
863 
864 /*
865  * Continues a multi-part dual encrypt/mac operation.
866  */
867 /* ARGSUSED */
868 int
869 crypto_encrypt_mac_update(crypto_context_t context,
870     crypto_data_t *pt, crypto_dual_data_t *ct, crypto_call_req_t *cr)
871 {
872 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
873 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
874 	kcf_provider_desc_t *pd;
875 	int error;
876 	kcf_req_params_t params;
877 
878 	if ((ctx == NULL) ||
879 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
880 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
881 		return (CRYPTO_INVALID_CONTEXT);
882 	}
883 
884 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
885 	KCF_PROV_REFHOLD(pd);
886 
887 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
888 		off_t save_offset;
889 		size_t save_len;
890 		crypto_call_flag_t save_flag;
891 
892 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
893 			error = CRYPTO_INVALID_CONTEXT;
894 			goto out;
895 		}
896 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
897 
898 		/* First we submit the encryption request */
899 		if (cr == NULL) {
900 			/*
901 			 * 'ct' is always not NULL.
902 			 * A NULL 'pt' means in-place.
903 			 */
904 			if (pt == NULL)
905 				error = crypto_encrypt_update(context,
906 				    (crypto_data_t *)ct, NULL, NULL);
907 			else
908 				error = crypto_encrypt_update(context, pt,
909 				    (crypto_data_t *)ct, NULL);
910 
911 			if (error != CRYPTO_SUCCESS)
912 				goto out;
913 
914 			/*
915 			 * call  mac_update when there is data to throw in
916 			 * the mix. Either an explicitly non-zero ct->dd_len2,
917 			 * or the last ciphertext portion.
918 			 */
919 			save_offset = ct->dd_offset1;
920 			save_len = ct->dd_len1;
921 			if (ct->dd_len2 == 0) {
922 				/*
923 				 * The previous encrypt step was an
924 				 * accumulation only and didn't produce any
925 				 * partial output
926 				 */
927 				if (ct->dd_len1 == 0)
928 					goto out;
929 			} else {
930 				ct->dd_offset1 = ct->dd_offset2;
931 				ct->dd_len1 = ct->dd_len2;
932 			}
933 			error = crypto_mac_update((crypto_context_t)mac_ctx,
934 			    (crypto_data_t *)ct, NULL);
935 
936 			ct->dd_offset1 = save_offset;
937 			ct->dd_len1 = save_len;
938 
939 			goto out;
940 		}
941 		/* submit a pure asynchronous request. */
942 		save_flag = cr->cr_flag;
943 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
944 
945 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_UPDATE,
946 		    pd->pd_sid, NULL, NULL, pt, ct, NULL, NULL, NULL)
947 
948 
949 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
950 
951 		cr->cr_flag = save_flag;
952 		goto out;
953 	}
954 
955 	/* The fast path for SW providers. */
956 	if (CHECK_FASTPATH(cr, pd)) {
957 		error = KCF_PROV_ENCRYPT_MAC_UPDATE(pd, ctx, pt, ct, NULL);
958 		KCF_PROV_INCRSTATS(pd, error);
959 	} else {
960 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_UPDATE,
961 		    ctx->cc_session, NULL, NULL, pt, ct, NULL, NULL, NULL);
962 
963 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
964 	}
965 out:
966 	KCF_PROV_REFRELE(pd);
967 	return (error);
968 }
969 
970 /*
971  * Terminates a multi-part dual encrypt/mac operation.
972  */
973 /* ARGSUSED */
974 int crypto_encrypt_mac_final(crypto_context_t context, crypto_dual_data_t *ct,
975     crypto_data_t *mac, crypto_call_req_t *cr)
976 {
977 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
978 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
979 	kcf_provider_desc_t *pd;
980 	int error;
981 	kcf_req_params_t params;
982 
983 	if ((ctx == NULL) ||
984 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
985 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
986 		return (CRYPTO_INVALID_CONTEXT);
987 	}
988 
989 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
990 	KCF_PROV_REFHOLD(pd);
991 
992 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
993 		off_t save_offset;
994 		size_t save_len;
995 		crypto_context_t mac_context;
996 		crypto_call_flag_t save_flag;
997 
998 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
999 			KCF_PROV_REFRELE(pd);
1000 			return (CRYPTO_INVALID_CONTEXT);
1001 		}
1002 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
1003 		mac_context = (crypto_context_t)mac_ctx;
1004 
1005 		if (cr == NULL) {
1006 			/* Get the last chunk of ciphertext */
1007 			error = crypto_encrypt_final(context,
1008 			    (crypto_data_t *)ct, NULL);
1009 
1010 			KCF_PROV_REFRELE(pd);
1011 
1012 			if (error != CRYPTO_SUCCESS)  {
1013 				/*
1014 				 * Needed here, because the caller of
1015 				 * crypto_encrypt_mac_final() lost all
1016 				 * refs to the mac_ctx.
1017 				 */
1018 				crypto_cancel_ctx(mac_context);
1019 				return (error);
1020 			}
1021 			if (ct->dd_len2 > 0) {
1022 				save_offset = ct->dd_offset1;
1023 				save_len = ct->dd_len1;
1024 				ct->dd_offset1 = ct->dd_offset2;
1025 				ct->dd_len1 = ct->dd_len2;
1026 
1027 				error = crypto_mac_update(mac_context,
1028 				    (crypto_data_t *)ct, NULL);
1029 
1030 				ct->dd_offset1 = save_offset;
1031 				ct->dd_len1 = save_len;
1032 
1033 				if (error != CRYPTO_SUCCESS)  {
1034 					crypto_cancel_ctx(mac_context);
1035 					return (error);
1036 				}
1037 			}
1038 
1039 			/* and finally, collect the MAC */
1040 			error = crypto_mac_final(mac_context, mac, NULL);
1041 
1042 			return (error);
1043 		}
1044 		/* submit a pure asynchronous request. */
1045 		save_flag = cr->cr_flag;
1046 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
1047 
1048 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_FINAL,
1049 		    pd->pd_sid, NULL, NULL, NULL, ct, mac, NULL, NULL)
1050 
1051 
1052 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1053 
1054 		cr->cr_flag = save_flag;
1055 		KCF_PROV_REFRELE(pd);
1056 		return (error);
1057 	}
1058 	/* The fast path for SW providers. */
1059 	if (CHECK_FASTPATH(cr, pd)) {
1060 		error = KCF_PROV_ENCRYPT_MAC_FINAL(pd, ctx, ct, mac, NULL);
1061 		KCF_PROV_INCRSTATS(pd, error);
1062 	} else {
1063 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_FINAL,
1064 		    ctx->cc_session, NULL, NULL, NULL, ct, mac, NULL, NULL);
1065 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1066 	}
1067 out:
1068 	KCF_PROV_REFRELE(pd);
1069 	/* Release the hold done in kcf_new_ctx() during init step. */
1070 	KCF_CONTEXT_COND_RELEASE(error, kcf_ctx);
1071 	return (error);
1072 }
1073 
1074 /*
1075  * Performs an atomic dual mac/decrypt operation. The provider to use
1076  * is determined by the KCF dispatcher.
1077  */
1078 int
1079 crypto_mac_decrypt(crypto_mechanism_t *mac_mech,
1080     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1081     crypto_key_t *mac_key, crypto_key_t *decr_key,
1082     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1083     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1084 {
1085 	return (crypto_mac_decrypt_common(mac_mech, decr_mech, ct, mac_key,
1086 	    decr_key, mac_tmpl, decr_tmpl, mac, pt, crq, B_FALSE));
1087 }
1088 
1089 int
1090 crypto_mac_decrypt_prov(crypto_provider_t provider, crypto_session_id_t sid,
1091     crypto_mechanism_t *mac_mech, crypto_mechanism_t *decr_mech,
1092     crypto_dual_data_t *ct, crypto_key_t *mac_key, crypto_key_t *decr_key,
1093     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1094     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1095 {
1096 	return (crypto_mac_decrypt_common_prov(provider, sid, mac_mech,
1097 	    decr_mech, ct, mac_key, decr_key, mac_tmpl, decr_tmpl, mac, pt,
1098 	    crq, B_FALSE));
1099 }
1100 
1101 /*
1102  * Performs an atomic dual mac/decrypt operation. The provider to use
1103  * is determined by the KCF dispatcher. 'mac' specifies the expected
1104  * value for the MAC. The decryption is not performed if the computed
1105  * MAC does not match the expected MAC.
1106  */
1107 int
1108 crypto_mac_verify_decrypt(crypto_mechanism_t *mac_mech,
1109     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1110     crypto_key_t *mac_key, crypto_key_t *decr_key,
1111     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1112     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1113 {
1114 	return (crypto_mac_decrypt_common(mac_mech, decr_mech, ct, mac_key,
1115 	    decr_key, mac_tmpl, decr_tmpl, mac, pt, crq, B_TRUE));
1116 }
1117 
1118 int
1119 crypto_mac_verify_decrypt_prov(crypto_provider_t provider,
1120     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1121     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1122     crypto_key_t *mac_key, crypto_key_t *decr_key,
1123     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1124     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1125 {
1126 	return (crypto_mac_decrypt_common_prov(provider, sid, mac_mech,
1127 	    decr_mech, ct, mac_key, decr_key, mac_tmpl, decr_tmpl, mac, pt,
1128 	    crq, B_TRUE));
1129 }
1130 
1131 /*
1132  * Called by both crypto_mac_decrypt() and crypto_mac_verify_decrypt().
1133  * optionally verified if the MACs match before calling the decryption step.
1134  */
1135 static int
1136 crypto_mac_decrypt_common(crypto_mechanism_t *mac_mech,
1137     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1138     crypto_key_t *mac_key, crypto_key_t *decr_key,
1139     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1140     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq,
1141     boolean_t do_verify)
1142 {
1143 	/*
1144 	 * First try to find a provider for the decryption mechanism, that
1145 	 * is also capable of the MAC mechanism.
1146 	 * We still favor optimizing the costlier decryption.
1147 	 */
1148 	int error;
1149 	kcf_mech_entry_t *me;
1150 	kcf_provider_desc_t *pd;
1151 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1152 	kcf_req_params_t params;
1153 	kcf_mac_decrypt_ops_params_t *cmops;
1154 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1155 	crypto_mech_type_t prov_decr_mechid, prov_mac_mechid;
1156 	kcf_prov_tried_t *list = NULL;
1157 	boolean_t decr_tmpl_checked = B_FALSE;
1158 	boolean_t mac_tmpl_checked = B_FALSE;
1159 	kcf_dual_req_t *next_req = NULL;
1160 	crypto_call_req_t mac_req, *mac_reqp = NULL;
1161 
1162 retry:
1163 	/* pd is returned held on success */
1164 	pd = kcf_get_dual_provider(decr_mech, mac_mech, &me, &prov_decr_mechid,
1165 	    &prov_mac_mechid, &error, list,
1166 	    CRYPTO_FG_DECRYPT_ATOMIC | CRYPTO_FG_MAC_DECRYPT_ATOMIC,
1167 	    CRYPTO_FG_MAC_ATOMIC | CRYPTO_FG_MAC_DECRYPT_ATOMIC,
1168 	    CHECK_RESTRICT(crq), ct->dd_len2);
1169 	if (pd == NULL) {
1170 		if (list != NULL)
1171 			kcf_free_triedlist(list);
1172 		if (next_req != NULL)
1173 			kmem_free(next_req, sizeof (kcf_dual_req_t));
1174 		return (CRYPTO_MECH_NOT_SUPPORTED);
1175 	}
1176 
1177 	/*
1178 	 * For SW providers, check the validity of the context template
1179 	 * It is very rare that the generation number mis-matches, so
1180 	 * is acceptable to fail here, and let the consumer recover by
1181 	 * freeing this tmpl and create a new one for the key and new SW
1182 	 * provider
1183 	 */
1184 
1185 	if ((!decr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1186 		if (decr_tmpl != NULL) {
1187 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1188 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1189 				if (next_req != NULL)
1190 					kmem_free(next_req,
1191 					    sizeof (kcf_dual_req_t));
1192 				if (list != NULL)
1193 					kcf_free_triedlist(list);
1194 				KCF_PROV_REFRELE(pd);
1195 
1196 				/* Which one is the the old one ? */
1197 				return (CRYPTO_OLD_CTX_TEMPLATE);
1198 			}
1199 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1200 		}
1201 		decr_tmpl_checked = B_TRUE;
1202 	}
1203 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
1204 		/* Need to emulate with 2 internal calls */
1205 
1206 		/* Prepare the call_req to be submitted for the MAC step */
1207 
1208 		if (crq != NULL) {
1209 
1210 			if (next_req == NULL) {
1211 				/*
1212 				 * allocate, initialize and prepare the
1213 				 * params for the next step only in the
1214 				 * first pass (not on every retry).
1215 				 */
1216 				next_req = kcf_alloc_req(crq);
1217 
1218 				if (next_req == NULL) {
1219 					KCF_PROV_REFRELE(pd);
1220 					if (list != NULL)
1221 						kcf_free_triedlist(list);
1222 					return (CRYPTO_HOST_MEMORY);
1223 				}
1224 				KCF_WRAP_DECRYPT_OPS_PARAMS(
1225 				    &(next_req->kr_params), KCF_OP_ATOMIC,
1226 				    NULL, decr_mech, decr_key,
1227 				    (crypto_data_t *)ct, pt, spi_decr_tmpl);
1228 			}
1229 
1230 			mac_req.cr_flag = (crq != NULL) ? crq->cr_flag : 0;
1231 			mac_req.cr_flag |= CRYPTO_SETDUAL;
1232 			mac_req.cr_callback_func = kcf_next_req;
1233 			mac_req.cr_callback_arg = next_req;
1234 			mac_reqp = &mac_req;
1235 		}
1236 
1237 		/* 'pd' is the decryption provider. */
1238 
1239 		if (do_verify)
1240 			error = crypto_mac_verify(mac_mech, (crypto_data_t *)ct,
1241 			    mac_key, mac_tmpl, mac,
1242 			    (crq == NULL) ? NULL : mac_reqp);
1243 		else
1244 			error = crypto_mac(mac_mech, (crypto_data_t *)ct,
1245 			    mac_key, mac_tmpl, mac,
1246 			    (crq == NULL) ? NULL : mac_reqp);
1247 
1248 		switch (error) {
1249 		case CRYPTO_SUCCESS: {
1250 			off_t saveoffset;
1251 			size_t savelen;
1252 
1253 			if (next_req == NULL) {
1254 				saveoffset = ct->dd_offset1;
1255 				savelen = ct->dd_len1;
1256 			} else {
1257 				saveoffset = next_req->kr_saveoffset =
1258 				    ct->dd_offset1;
1259 				savelen = next_req->kr_savelen = ct->dd_len1;
1260 
1261 				ASSERT(mac_reqp != NULL);
1262 				mac_req.cr_flag &= ~CRYPTO_SETDUAL;
1263 				mac_req.cr_callback_func = kcf_last_req;
1264 			}
1265 			ct->dd_offset1 = ct->dd_offset2;
1266 			ct->dd_len1 = ct->dd_len2;
1267 
1268 			if (CHECK_FASTPATH(crq, pd)) {
1269 				crypto_mechanism_t lmech;
1270 
1271 				lmech = *decr_mech;
1272 				KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type,
1273 				    pd, &lmech);
1274 
1275 				error = KCF_PROV_DECRYPT_ATOMIC(pd, pd->pd_sid,
1276 				    &lmech, decr_key, (crypto_data_t *)ct,
1277 				    (crypto_data_t *)pt, spi_decr_tmpl,
1278 				    KCF_SWFP_RHNDL(mac_reqp));
1279 
1280 				KCF_PROV_INCRSTATS(pd, error);
1281 			} else {
1282 				KCF_WRAP_DECRYPT_OPS_PARAMS(&params,
1283 				    KCF_OP_ATOMIC, pd->pd_sid, decr_mech,
1284 				    decr_key, (crypto_data_t *)ct, pt,
1285 				    spi_decr_tmpl);
1286 
1287 				error = kcf_submit_request(pd, NULL,
1288 				    (crq == NULL) ? NULL : mac_reqp,
1289 				    &params, B_FALSE);
1290 			}
1291 			if (error != CRYPTO_QUEUED) {
1292 				KCF_PROV_INCRSTATS(pd, error);
1293 				ct->dd_offset1 = saveoffset;
1294 				ct->dd_len1 = savelen;
1295 			}
1296 			break;
1297 		}
1298 
1299 		case CRYPTO_QUEUED:
1300 			if ((crq != NULL) && (crq->cr_flag & CRYPTO_SKIP_REQID))
1301 				crq->cr_reqid = mac_req.cr_reqid;
1302 			break;
1303 
1304 		default:
1305 			if (IS_RECOVERABLE(error)) {
1306 				if (kcf_insert_triedlist(&list, pd,
1307 				    KCF_KMFLAG(crq)) != NULL)
1308 					goto retry;
1309 			}
1310 		}
1311 		if (error != CRYPTO_QUEUED && next_req != NULL)
1312 			kmem_free(next_req, sizeof (kcf_dual_req_t));
1313 		if (list != NULL)
1314 			kcf_free_triedlist(list);
1315 		KCF_PROV_REFRELE(pd);
1316 		return (error);
1317 	}
1318 
1319 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1320 		if ((mac_tmpl != NULL) &&
1321 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
1322 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1323 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1324 				if (next_req != NULL)
1325 					kmem_free(next_req,
1326 					    sizeof (kcf_dual_req_t));
1327 				if (list != NULL)
1328 					kcf_free_triedlist(list);
1329 				KCF_PROV_REFRELE(pd);
1330 
1331 				/* Which one is the the old one ? */
1332 				return (CRYPTO_OLD_CTX_TEMPLATE);
1333 			}
1334 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1335 		}
1336 		mac_tmpl_checked = B_TRUE;
1337 	}
1338 
1339 	/* The fast path for SW providers. */
1340 	if (CHECK_FASTPATH(crq, pd)) {
1341 		crypto_mechanism_t lmac_mech;
1342 		crypto_mechanism_t ldecr_mech;
1343 
1344 		/* careful! structs assignments */
1345 		ldecr_mech = *decr_mech;
1346 		ldecr_mech.cm_type = prov_decr_mechid;
1347 		lmac_mech = *mac_mech;
1348 		lmac_mech.cm_type = prov_mac_mechid;
1349 
1350 		if (do_verify)
1351 			error = KCF_PROV_MAC_VERIFY_DECRYPT_ATOMIC(pd,
1352 			    pd->pd_sid, &lmac_mech, mac_key, &ldecr_mech,
1353 			    decr_key, ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1354 			    KCF_SWFP_RHNDL(crq));
1355 		else
1356 			error = KCF_PROV_MAC_DECRYPT_ATOMIC(pd, pd->pd_sid,
1357 			    &lmac_mech, mac_key, &ldecr_mech, decr_key,
1358 			    ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1359 			    KCF_SWFP_RHNDL(crq));
1360 
1361 		KCF_PROV_INCRSTATS(pd, error);
1362 	} else {
1363 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params,
1364 		    (do_verify) ? KCF_OP_MAC_VERIFY_DECRYPT_ATOMIC :
1365 		    KCF_OP_ATOMIC, pd->pd_sid, mac_key, decr_key, ct, mac, pt,
1366 		    spi_mac_tmpl, spi_decr_tmpl);
1367 
1368 		cmops = &(params.rp_u.mac_decrypt_params);
1369 
1370 		/* careful! structs assignments */
1371 		cmops->md_decr_mech = *decr_mech;
1372 		cmops->md_decr_mech.cm_type = prov_decr_mechid;
1373 		cmops->md_framework_decr_mechtype = decr_mech->cm_type;
1374 		cmops->md_mac_mech = *mac_mech;
1375 		cmops->md_mac_mech.cm_type = prov_mac_mechid;
1376 		cmops->md_framework_mac_mechtype = mac_mech->cm_type;
1377 
1378 		error = kcf_submit_request(pd, NULL, crq, &params, B_FALSE);
1379 	}
1380 
1381 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED &&
1382 	    IS_RECOVERABLE(error)) {
1383 		/* Add pd to the linked list of providers tried. */
1384 		if (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(crq)) != NULL)
1385 			goto retry;
1386 	}
1387 
1388 	if (list != NULL)
1389 		kcf_free_triedlist(list);
1390 
1391 	if (next_req != NULL)
1392 		kmem_free(next_req, sizeof (kcf_dual_req_t));
1393 	KCF_PROV_REFRELE(pd);
1394 	return (error);
1395 }
1396 
1397 static int
1398 crypto_mac_decrypt_common_prov(crypto_provider_t provider,
1399     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1400     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1401     crypto_key_t *mac_key, crypto_key_t *decr_key,
1402     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1403     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq,
1404     boolean_t do_verify)
1405 {
1406 	/*
1407 	 * First try to find a provider for the decryption mechanism, that
1408 	 * is also capable of the MAC mechanism.
1409 	 * We still favor optimizing the costlier decryption.
1410 	 */
1411 	int error;
1412 	kcf_mech_entry_t *me;
1413 	kcf_provider_desc_t *pd = provider;
1414 	kcf_provider_desc_t *real_provider = pd;
1415 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1416 	kcf_req_params_t params;
1417 	kcf_mac_decrypt_ops_params_t *cmops;
1418 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1419 
1420 	ASSERT(KCF_PROV_REFHELD(pd));
1421 
1422 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
1423 		error = kcf_get_hardware_provider(decr_mech->cm_type,
1424 		    mac_mech->cm_type, CHECK_RESTRICT(crq), pd,
1425 		    &real_provider, CRYPTO_FG_MAC_DECRYPT_ATOMIC);
1426 
1427 		if (error != CRYPTO_SUCCESS)
1428 			return (error);
1429 	}
1430 
1431 	/*
1432 	 * For SW providers, check the validity of the context template
1433 	 * It is very rare that the generation number mis-matches, so
1434 	 * is acceptable to fail here, and let the consumer recover by
1435 	 * freeing this tmpl and create a new one for the key and new SW
1436 	 * provider
1437 	 */
1438 
1439 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
1440 		if (decr_tmpl != NULL) {
1441 			if (kcf_get_mech_entry(decr_mech->cm_type, &me) !=
1442 			    KCF_SUCCESS) {
1443 				error = CRYPTO_MECHANISM_INVALID;
1444 				goto out;
1445 			}
1446 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1447 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1448 				error = CRYPTO_OLD_CTX_TEMPLATE;
1449 				goto out;
1450 			}
1451 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1452 		}
1453 
1454 		if (mac_tmpl != NULL) {
1455 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
1456 			    KCF_SUCCESS) {
1457 				error = CRYPTO_MECHANISM_INVALID;
1458 				goto out;
1459 			}
1460 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1461 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1462 				error = CRYPTO_OLD_CTX_TEMPLATE;
1463 				goto out;
1464 			}
1465 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1466 		}
1467 	}
1468 
1469 	/* The fast path for SW providers. */
1470 	if (CHECK_FASTPATH(crq, pd)) {
1471 		crypto_mechanism_t lmac_mech;
1472 		crypto_mechanism_t ldecr_mech;
1473 
1474 		/* careful! structs assignments */
1475 		ldecr_mech = *decr_mech;
1476 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1477 		    &ldecr_mech);
1478 
1479 		lmac_mech = *mac_mech;
1480 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1481 		    &lmac_mech);
1482 
1483 		if (do_verify)
1484 			error = KCF_PROV_MAC_VERIFY_DECRYPT_ATOMIC(
1485 			    real_provider, sid, &lmac_mech, mac_key,
1486 			    &ldecr_mech, decr_key, ct, mac, pt, spi_mac_tmpl,
1487 			    spi_decr_tmpl, KCF_SWFP_RHNDL(crq));
1488 		else
1489 			error = KCF_PROV_MAC_DECRYPT_ATOMIC(real_provider, sid,
1490 			    &lmac_mech, mac_key, &ldecr_mech, decr_key,
1491 			    ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1492 			    KCF_SWFP_RHNDL(crq));
1493 
1494 		KCF_PROV_INCRSTATS(pd, error);
1495 	} else {
1496 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params,
1497 		    (do_verify) ? KCF_OP_MAC_VERIFY_DECRYPT_ATOMIC :
1498 		    KCF_OP_ATOMIC, sid, mac_key, decr_key, ct, mac, pt,
1499 		    spi_mac_tmpl, spi_decr_tmpl);
1500 
1501 		cmops = &(params.rp_u.mac_decrypt_params);
1502 
1503 		/* careful! structs assignments */
1504 		cmops->md_decr_mech = *decr_mech;
1505 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1506 		    &cmops->md_decr_mech);
1507 		cmops->md_framework_decr_mechtype = decr_mech->cm_type;
1508 
1509 		cmops->md_mac_mech = *mac_mech;
1510 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1511 		    &cmops->md_mac_mech);
1512 		cmops->md_framework_mac_mechtype = mac_mech->cm_type;
1513 
1514 		error = kcf_submit_request(real_provider, NULL, crq, &params,
1515 		    B_FALSE);
1516 	}
1517 
1518 out:
1519 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
1520 		KCF_PROV_REFRELE(real_provider);
1521 	return (error);
1522 }
1523 
1524 /*
1525  * Starts a multi-part dual mac/decrypt operation. The provider to
1526  * use is determined by the KCF dispatcher.
1527  */
1528 /* ARGSUSED */
1529 int
1530 crypto_mac_decrypt_init(crypto_mechanism_t *mac_mech,
1531     crypto_mechanism_t *decr_mech, crypto_key_t *mac_key,
1532     crypto_key_t *decr_key, crypto_ctx_template_t mac_tmpl,
1533     crypto_ctx_template_t decr_tmpl, crypto_context_t *ctxp,
1534     crypto_call_req_t *cr)
1535 {
1536 	/*
1537 	 * First try to find a provider for the decryption mechanism, that
1538 	 * is also capable of the MAC mechanism.
1539 	 * We still favor optimizing the costlier decryption.
1540 	 */
1541 	int error;
1542 	kcf_mech_entry_t *me;
1543 	kcf_provider_desc_t *pd;
1544 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1545 	kcf_req_params_t params;
1546 	kcf_mac_decrypt_ops_params_t *mdops;
1547 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1548 	crypto_mech_type_t prov_decr_mechid, prov_mac_mechid;
1549 	kcf_prov_tried_t *list = NULL;
1550 	boolean_t decr_tmpl_checked = B_FALSE;
1551 	boolean_t mac_tmpl_checked = B_FALSE;
1552 	crypto_ctx_t *ctx = NULL;
1553 	kcf_context_t *decr_kcf_context = NULL, *mac_kcf_context = NULL;
1554 	crypto_call_flag_t save_flag;
1555 
1556 retry:
1557 	/* pd is returned held on success */
1558 	pd = kcf_get_dual_provider(decr_mech, mac_mech, &me, &prov_decr_mechid,
1559 	    &prov_mac_mechid, &error, list,
1560 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_MAC_DECRYPT, CRYPTO_FG_MAC,
1561 	    CHECK_RESTRICT(cr), 0);
1562 	if (pd == NULL) {
1563 		if (list != NULL)
1564 			kcf_free_triedlist(list);
1565 		return (error);
1566 	}
1567 
1568 	/*
1569 	 * For SW providers, check the validity of the context template
1570 	 * It is very rare that the generation number mis-matches, so
1571 	 * is acceptable to fail here, and let the consumer recover by
1572 	 * freeing this tmpl and create a new one for the key and new SW
1573 	 * provider
1574 	 * Warning! will need to change when multiple software providers
1575 	 * per mechanism are supported.
1576 	 */
1577 
1578 	if ((!decr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1579 		if (decr_tmpl != NULL) {
1580 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1581 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1582 
1583 				if (list != NULL)
1584 					kcf_free_triedlist(list);
1585 				if (decr_kcf_context != NULL)
1586 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1587 
1588 				KCF_PROV_REFRELE(pd);
1589 				/* Which one is the the old one ? */
1590 				return (CRYPTO_OLD_CTX_TEMPLATE);
1591 			}
1592 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1593 		}
1594 		decr_tmpl_checked = B_TRUE;
1595 	}
1596 
1597 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
1598 		/* Need to emulate with 2 internal calls */
1599 
1600 		/*
1601 		 * We avoid code complexity by limiting the pure async.
1602 		 * case to be done using only a SW provider.
1603 		 * XXX - Redo the emulation code below so that we can
1604 		 * remove this limitation.
1605 		 */
1606 		if (cr != NULL && pd->pd_prov_type == CRYPTO_HW_PROVIDER) {
1607 			if ((kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr))
1608 			    != NULL))
1609 				goto retry;
1610 			if (list != NULL)
1611 				kcf_free_triedlist(list);
1612 			if (decr_kcf_context != NULL)
1613 				KCF_CONTEXT_REFRELE(decr_kcf_context);
1614 			KCF_PROV_REFRELE(pd);
1615 			return (CRYPTO_HOST_MEMORY);
1616 		}
1617 
1618 		if (ctx == NULL && pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
1619 			ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
1620 			if (ctx == NULL) {
1621 				if (list != NULL)
1622 					kcf_free_triedlist(list);
1623 				if (decr_kcf_context != NULL)
1624 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1625 				KCF_PROV_REFRELE(pd);
1626 				return (CRYPTO_HOST_MEMORY);
1627 			}
1628 			decr_kcf_context = (kcf_context_t *)
1629 			    ctx->cc_framework_private;
1630 		}
1631 		/*
1632 		 * Trade-off speed vs avoidance of code complexity and
1633 		 * duplication:
1634 		 * Could do all the combinations of fastpath / synch / asynch
1635 		 * for the decryption and the mac steps. Early attempts
1636 		 * showed the code grew wild and bug-prone, for little gain.
1637 		 * Therefore, the adaptative asynch case is not implemented.
1638 		 * It's either pure synchronous, or pure asynchronous.
1639 		 * We still preserve a fastpath for the pure synchronous
1640 		 * requests to SW providers.
1641 		 */
1642 		if (cr == NULL) {
1643 			crypto_context_t mac_context;
1644 
1645 			error = crypto_mac_init(mac_mech, mac_key, mac_tmpl,
1646 			    &mac_context, NULL);
1647 
1648 			if (error != CRYPTO_SUCCESS) {
1649 				/* Can't be CRYPTO_QUEUED. return the failure */
1650 				if (list != NULL)
1651 					kcf_free_triedlist(list);
1652 
1653 				if (decr_kcf_context != NULL)
1654 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1655 				return (error);
1656 			}
1657 			if (pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
1658 				crypto_mechanism_t lmech = *decr_mech;
1659 
1660 				lmech.cm_type = prov_decr_mechid;
1661 
1662 				error = KCF_PROV_DECRYPT_INIT(pd, ctx, &lmech,
1663 				    decr_key, spi_decr_tmpl,
1664 				    KCF_RHNDL(KM_SLEEP));
1665 			} else {
1666 				/*
1667 				 * If we did the 'goto retry' then ctx may not
1668 				 * be NULL.  In general, we can't reuse another
1669 				 * provider's context, so we free it now so
1670 				 * we don't leak it.
1671 				 */
1672 				if (ctx != NULL) {
1673 					KCF_CONTEXT_REFRELE((kcf_context_t *)
1674 					    ctx->cc_framework_private);
1675 					decr_kcf_context = NULL;
1676 				}
1677 				error = crypto_decrypt_init_prov(pd, pd->pd_sid,
1678 				    decr_mech, decr_key, &decr_tmpl,
1679 				    (crypto_context_t *)&ctx, NULL);
1680 
1681 				if (error == CRYPTO_SUCCESS) {
1682 					decr_kcf_context = (kcf_context_t *)
1683 					    ctx->cc_framework_private;
1684 				}
1685 			}
1686 
1687 			KCF_PROV_INCRSTATS(pd, error);
1688 
1689 			KCF_PROV_REFRELE(pd);
1690 
1691 			if (error != CRYPTO_SUCCESS) {
1692 				/* Can't be CRYPTO_QUEUED. return the failure */
1693 				if (list != NULL)
1694 					kcf_free_triedlist(list);
1695 				if (mac_kcf_context != NULL)
1696 					KCF_CONTEXT_REFRELE(mac_kcf_context);
1697 
1698 				return (error);
1699 			}
1700 			mac_kcf_context = (kcf_context_t *)
1701 			    ((crypto_ctx_t *)mac_context)->
1702 			    cc_framework_private;
1703 
1704 			decr_kcf_context = (kcf_context_t *)
1705 			    ctx->cc_framework_private;
1706 
1707 			/*
1708 			 * Here also, the mac context is second. The callback
1709 			 * case can't overwrite the context returned to
1710 			 * the caller.
1711 			 */
1712 			decr_kcf_context->kc_secondctx = mac_kcf_context;
1713 			KCF_CONTEXT_REFHOLD(mac_kcf_context);
1714 
1715 			*ctxp = (crypto_context_t)ctx;
1716 
1717 			return (error);
1718 		}
1719 		/* submit a pure asynchronous request. */
1720 		save_flag = cr->cr_flag;
1721 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
1722 
1723 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1724 		    pd->pd_sid, mac_key, decr_key, NULL, NULL, NULL,
1725 		    spi_mac_tmpl, spi_decr_tmpl);
1726 
1727 		mdops = &(params.rp_u.mac_decrypt_params);
1728 
1729 		/* careful! structs assignments */
1730 		mdops->md_decr_mech = *decr_mech;
1731 		/*
1732 		 * mdops->md_decr_mech.cm_type will be set when we get to
1733 		 * kcf_emulate_dual() routine.
1734 		 */
1735 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1736 		mdops->md_mac_mech = *mac_mech;
1737 
1738 		/*
1739 		 * mdops->md_mac_mech.cm_type will be set when we know the
1740 		 * MAC provider.
1741 		 */
1742 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1743 
1744 		/*
1745 		 * non-NULL ctx->kc_secondctx tells common_submit_request
1746 		 * that this request uses separate cipher and MAC contexts.
1747 		 * That function will set the MAC context's kc_secondctx to
1748 		 * this decrypt context.
1749 		 */
1750 		decr_kcf_context->kc_secondctx = decr_kcf_context;
1751 
1752 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1753 
1754 		cr->cr_flag = save_flag;
1755 
1756 		if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
1757 			KCF_CONTEXT_REFRELE(decr_kcf_context);
1758 		}
1759 		if (list != NULL)
1760 			kcf_free_triedlist(list);
1761 		*ctxp =  ctx;
1762 		KCF_PROV_REFRELE(pd);
1763 		return (error);
1764 	}
1765 
1766 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1767 		if ((mac_tmpl != NULL) &&
1768 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
1769 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1770 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1771 
1772 				if (list != NULL)
1773 					kcf_free_triedlist(list);
1774 
1775 				KCF_PROV_REFRELE(pd);
1776 				/* Which one is the the old one ? */
1777 				return (CRYPTO_OLD_CTX_TEMPLATE);
1778 			}
1779 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1780 		}
1781 		mac_tmpl_checked = B_TRUE;
1782 	}
1783 
1784 	if (ctx == NULL) {
1785 		ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
1786 		if (ctx == NULL) {
1787 			error = CRYPTO_HOST_MEMORY;
1788 			if (list != NULL)
1789 				kcf_free_triedlist(list);
1790 			return (CRYPTO_HOST_MEMORY);
1791 		}
1792 		decr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
1793 	}
1794 
1795 	/* The fast path for SW providers. */
1796 	if (CHECK_FASTPATH(cr, pd)) {
1797 		crypto_mechanism_t ldecr_mech;
1798 		crypto_mechanism_t lmac_mech;
1799 
1800 		/* careful! structs assignments */
1801 		ldecr_mech = *decr_mech;
1802 		ldecr_mech.cm_type = prov_decr_mechid;
1803 		lmac_mech = *mac_mech;
1804 		lmac_mech.cm_type = prov_mac_mechid;
1805 
1806 		error = KCF_PROV_MAC_DECRYPT_INIT(pd, ctx, &lmac_mech,
1807 		    mac_key, &ldecr_mech, decr_key, spi_mac_tmpl, spi_decr_tmpl,
1808 		    KCF_SWFP_RHNDL(cr));
1809 
1810 		KCF_PROV_INCRSTATS(pd, error);
1811 	} else {
1812 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1813 		    pd->pd_sid, mac_key, decr_key, NULL, NULL, NULL,
1814 		    spi_mac_tmpl, spi_decr_tmpl);
1815 
1816 		mdops = &(params.rp_u.mac_decrypt_params);
1817 
1818 		/* careful! structs assignments */
1819 		mdops->md_decr_mech = *decr_mech;
1820 		mdops->md_decr_mech.cm_type = prov_decr_mechid;
1821 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1822 		mdops->md_mac_mech = *mac_mech;
1823 		mdops->md_mac_mech.cm_type = prov_mac_mechid;
1824 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1825 
1826 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1827 	}
1828 
1829 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
1830 		if ((IS_RECOVERABLE(error)) &&
1831 		    (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr)) != NULL))
1832 			goto retry;
1833 
1834 		KCF_CONTEXT_REFRELE(decr_kcf_context);
1835 	} else
1836 		*ctxp = (crypto_context_t)ctx;
1837 
1838 	if (list != NULL)
1839 		kcf_free_triedlist(list);
1840 
1841 	KCF_PROV_REFRELE(pd);
1842 	return (error);
1843 }
1844 
1845 int
1846 crypto_mac_decrypt_init_prov(crypto_provider_t provider,
1847     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1848     crypto_mechanism_t *decr_mech, crypto_key_t *mac_key,
1849     crypto_key_t *decr_key, crypto_ctx_template_t mac_tmpl,
1850     crypto_ctx_template_t decr_tmpl, crypto_context_t *ctxp,
1851     crypto_call_req_t *cr)
1852 {
1853 	/*
1854 	 * First try to find a provider for the decryption mechanism, that
1855 	 * is also capable of the MAC mechanism.
1856 	 * We still favor optimizing the costlier decryption.
1857 	 */
1858 	int rv;
1859 	kcf_mech_entry_t *me;
1860 	kcf_provider_desc_t *pd = provider;
1861 	kcf_provider_desc_t *real_provider = pd;
1862 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1863 	kcf_req_params_t params;
1864 	kcf_mac_decrypt_ops_params_t *mdops;
1865 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1866 	crypto_ctx_t *ctx;
1867 	kcf_context_t *decr_kcf_context = NULL;
1868 
1869 	ASSERT(KCF_PROV_REFHELD(pd));
1870 
1871 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
1872 		rv = kcf_get_hardware_provider(decr_mech->cm_type,
1873 		    mac_mech->cm_type, CHECK_RESTRICT(cr), pd, &real_provider,
1874 		    CRYPTO_FG_MAC_DECRYPT);
1875 
1876 		if (rv != CRYPTO_SUCCESS)
1877 			return (rv);
1878 	}
1879 
1880 	/*
1881 	 * For SW providers, check the validity of the context template
1882 	 * It is very rare that the generation number mis-matches, so
1883 	 * is acceptable to fail here, and let the consumer recover by
1884 	 * freeing this tmpl and create a new one for the key and new SW
1885 	 * provider
1886 	 * Warning! will need to change when multiple software providers
1887 	 * per mechanism are supported.
1888 	 */
1889 
1890 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
1891 		if (decr_tmpl != NULL) {
1892 			if (kcf_get_mech_entry(decr_mech->cm_type, &me) !=
1893 			    KCF_SUCCESS) {
1894 				rv = CRYPTO_MECHANISM_INVALID;
1895 				goto out;
1896 			}
1897 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1898 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1899 				rv = CRYPTO_OLD_CTX_TEMPLATE;
1900 				goto out;
1901 			}
1902 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1903 		}
1904 
1905 		if (mac_tmpl != NULL) {
1906 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
1907 			    KCF_SUCCESS) {
1908 				rv = CRYPTO_MECHANISM_INVALID;
1909 				goto out;
1910 			}
1911 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1912 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1913 				rv = CRYPTO_OLD_CTX_TEMPLATE;
1914 				goto out;
1915 			}
1916 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1917 		}
1918 	}
1919 
1920 	ctx = kcf_new_ctx(cr, real_provider, sid);
1921 	if (ctx == NULL) {
1922 		rv = CRYPTO_HOST_MEMORY;
1923 		goto out;
1924 	}
1925 	decr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
1926 
1927 	/* The fast path for SW providers. */
1928 	if (CHECK_FASTPATH(cr, pd)) {
1929 		crypto_mechanism_t ldecr_mech;
1930 		crypto_mechanism_t lmac_mech;
1931 
1932 		/* careful! structs assignments */
1933 		ldecr_mech = *decr_mech;
1934 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1935 		    &ldecr_mech);
1936 
1937 		lmac_mech = *mac_mech;
1938 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1939 		    &lmac_mech);
1940 
1941 		rv = KCF_PROV_MAC_DECRYPT_INIT(real_provider, ctx, &lmac_mech,
1942 		    mac_key, &ldecr_mech, decr_key, spi_mac_tmpl, spi_decr_tmpl,
1943 		    KCF_SWFP_RHNDL(cr));
1944 
1945 		KCF_PROV_INCRSTATS(pd, rv);
1946 	} else {
1947 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1948 		    sid, mac_key, decr_key, NULL, NULL, NULL,
1949 		    spi_mac_tmpl, spi_decr_tmpl);
1950 
1951 		mdops = &(params.rp_u.mac_decrypt_params);
1952 
1953 		/* careful! structs assignments */
1954 		mdops->md_decr_mech = *decr_mech;
1955 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1956 		    &mdops->md_decr_mech);
1957 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1958 
1959 		mdops->md_mac_mech = *mac_mech;
1960 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1961 		    &mdops->md_mac_mech);
1962 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1963 
1964 		rv = kcf_submit_request(real_provider, ctx, cr, &params,
1965 		    B_FALSE);
1966 	}
1967 
1968 	if (rv != CRYPTO_SUCCESS && rv != CRYPTO_QUEUED) {
1969 		KCF_CONTEXT_REFRELE(decr_kcf_context);
1970 	} else
1971 		*ctxp = (crypto_context_t)ctx;
1972 
1973 out:
1974 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
1975 		KCF_PROV_REFRELE(real_provider);
1976 	return (rv);
1977 }
1978 /*
1979  * Continues a multi-part dual mac/decrypt operation.
1980  */
1981 /* ARGSUSED */
1982 int
1983 crypto_mac_decrypt_update(crypto_context_t context,
1984     crypto_dual_data_t *ct, crypto_data_t *pt, crypto_call_req_t *cr)
1985 {
1986 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
1987 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
1988 	kcf_provider_desc_t *pd;
1989 	int error;
1990 	kcf_req_params_t params;
1991 
1992 	if ((ctx == NULL) ||
1993 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
1994 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
1995 		return (CRYPTO_INVALID_CONTEXT);
1996 	}
1997 
1998 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
1999 	KCF_PROV_REFHOLD(pd);
2000 
2001 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
2002 		off_t save_offset;
2003 		size_t save_len;
2004 		crypto_call_flag_t save_flag;
2005 
2006 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
2007 			error = CRYPTO_INVALID_CONTEXT;
2008 			goto out;
2009 		}
2010 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
2011 
2012 		/* First we submit the MAC request */
2013 		if (cr == NULL) {
2014 			/*
2015 			 * 'ct' is always not NULL.
2016 			 */
2017 			error = crypto_mac_update((crypto_context_t)mac_ctx,
2018 			    (crypto_data_t *)ct, NULL);
2019 
2020 			if (error != CRYPTO_SUCCESS)
2021 				goto out;
2022 
2023 			/* Decrypt a different length only when told so */
2024 
2025 			save_offset = ct->dd_offset1;
2026 			save_len = ct->dd_len1;
2027 
2028 			if (ct->dd_len2 > 0) {
2029 				ct->dd_offset1 = ct->dd_offset2;
2030 				ct->dd_len1 = ct->dd_len2;
2031 			}
2032 
2033 			error = crypto_decrypt_update(context,
2034 			    (crypto_data_t *)ct, pt, NULL);
2035 
2036 			ct->dd_offset1 = save_offset;
2037 			ct->dd_len1 = save_len;
2038 
2039 			goto out;
2040 		}
2041 		/* submit a pure asynchronous request. */
2042 		save_flag = cr->cr_flag;
2043 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
2044 
2045 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_UPDATE,
2046 		    pd->pd_sid, NULL, NULL, ct, NULL, pt, NULL, NULL)
2047 
2048 
2049 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2050 
2051 		cr->cr_flag = save_flag;
2052 		goto out;
2053 	}
2054 
2055 	/* The fast path for SW providers. */
2056 	if (CHECK_FASTPATH(cr, pd)) {
2057 		error = KCF_PROV_MAC_DECRYPT_UPDATE(pd, ctx, ct, pt, NULL);
2058 		KCF_PROV_INCRSTATS(pd, error);
2059 	} else {
2060 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_UPDATE,
2061 		    ctx->cc_session, NULL, NULL, ct, NULL, pt, NULL, NULL);
2062 
2063 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2064 	}
2065 out:
2066 	KCF_PROV_REFRELE(pd);
2067 	return (error);
2068 }
2069 
2070 /*
2071  * Terminates a multi-part dual mac/decrypt operation.
2072  */
2073 /* ARGSUSED */
2074 int
2075 crypto_mac_decrypt_final(crypto_context_t context, crypto_data_t *mac,
2076     crypto_data_t *pt, crypto_call_req_t *cr)
2077 {
2078 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
2079 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
2080 	kcf_provider_desc_t *pd;
2081 	int error;
2082 	kcf_req_params_t params;
2083 
2084 	if ((ctx == NULL) ||
2085 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
2086 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
2087 		return (CRYPTO_INVALID_CONTEXT);
2088 	}
2089 
2090 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
2091 	KCF_PROV_REFHOLD(pd);
2092 
2093 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
2094 		crypto_call_flag_t save_flag;
2095 
2096 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
2097 			error = CRYPTO_INVALID_CONTEXT;
2098 			goto out;
2099 		}
2100 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
2101 
2102 		/* First we collect the MAC */
2103 		if (cr == NULL) {
2104 
2105 			error = crypto_mac_final((crypto_context_t)mac_ctx,
2106 			    mac, NULL);
2107 
2108 			if (error != CRYPTO_SUCCESS) {
2109 				crypto_cancel_ctx(ctx);
2110 			} else {
2111 				/* Get the last chunk of plaintext */
2112 				error = crypto_decrypt_final(context, pt, NULL);
2113 			}
2114 
2115 			KCF_PROV_REFRELE(pd);
2116 			return (error);
2117 		}
2118 		/* submit a pure asynchronous request. */
2119 		save_flag = cr->cr_flag;
2120 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
2121 
2122 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_FINAL,
2123 		    pd->pd_sid, NULL, NULL, NULL, mac, pt, NULL, NULL)
2124 
2125 
2126 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2127 
2128 		cr->cr_flag = save_flag;
2129 
2130 		KCF_PROV_REFRELE(pd);
2131 		return (error);
2132 	}
2133 
2134 	/* The fast path for SW providers. */
2135 	if (CHECK_FASTPATH(cr, pd)) {
2136 		error = KCF_PROV_MAC_DECRYPT_FINAL(pd, ctx, mac, pt, NULL);
2137 		KCF_PROV_INCRSTATS(pd, error);
2138 	} else {
2139 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_FINAL,
2140 		    ctx->cc_session, NULL, NULL, NULL, mac, pt, NULL, NULL);
2141 
2142 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2143 	}
2144 out:
2145 	KCF_PROV_REFRELE(pd);
2146 	/* Release the hold done in kcf_new_ctx() during init step. */
2147 	KCF_CONTEXT_COND_RELEASE(error, kcf_ctx);
2148 	return (error);
2149 }
2150 
2151 /*
2152  * Digest/Encrypt dual operation. Project-private entry point, not part of
2153  * the k-API.
2154  */
2155 /* ARGSUSED */
2156 int
2157 crypto_digest_encrypt_update(crypto_context_t digest_ctx,
2158     crypto_context_t encrypt_ctx, crypto_data_t *plaintext,
2159     crypto_data_t *ciphertext, crypto_call_req_t *crq)
2160 {
2161 	/*
2162 	 * RFE 4688647:
2163 	 * core functions needed by ioctl interface missing from impl.h
2164 	 */
2165 	return (CRYPTO_NOT_SUPPORTED);
2166 }
2167 
2168 /*
2169  * Decrypt/Digest dual operation. Project-private entry point, not part of
2170  * the k-API.
2171  */
2172 /* ARGSUSED */
2173 int
2174 crypto_decrypt_digest_update(crypto_context_t decryptctx,
2175     crypto_context_t encrypt_ctx, crypto_data_t *ciphertext,
2176     crypto_data_t *plaintext, crypto_call_req_t *crq)
2177 {
2178 	/*
2179 	 * RFE 4688647:
2180 	 * core functions needed by ioctl interface missing from impl.h
2181 	 */
2182 	return (CRYPTO_NOT_SUPPORTED);
2183 }
2184 
2185 /*
2186  * Sign/Encrypt dual operation. Project-private entry point, not part of
2187  * the k-API.
2188  */
2189 /* ARGSUSED */
2190 int
2191 crypto_sign_encrypt_update(crypto_context_t sign_ctx,
2192     crypto_context_t encrypt_ctx, crypto_data_t *plaintext,
2193     crypto_data_t *ciphertext, crypto_call_req_t *crq)
2194 {
2195 	/*
2196 	 * RFE 4688647:
2197 	 * core functions needed by ioctl interface missing from impl.h
2198 	 */
2199 	return (CRYPTO_NOT_SUPPORTED);
2200 }
2201 
2202 /*
2203  * Decrypt/Verify dual operation. Project-private entry point, not part of
2204  * the k-API.
2205  */
2206 /* ARGSUSED */
2207 int
2208 crypto_decrypt_verify_update(crypto_context_t decrypt_ctx,
2209     crypto_context_t verify_ctx, crypto_data_t *ciphertext,
2210     crypto_data_t *plaintext, crypto_call_req_t *crq)
2211 {
2212 	/*
2213 	 * RFE 4688647:
2214 	 * core functions needed by ioctl interface missing from impl.h
2215 	 */
2216 	return (CRYPTO_NOT_SUPPORTED);
2217 }
2218