xref: /titanic_41/usr/src/uts/common/crypto/api/kcf_dual.c (revision d24234c24aeaca4ca56ee3ac2794507968f274c4)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 #include <sys/errno.h>
27 #include <sys/types.h>
28 #include <sys/kmem.h>
29 #include <sys/sysmacros.h>
30 #include <sys/crypto/common.h>
31 #include <sys/crypto/impl.h>
32 #include <sys/crypto/api.h>
33 #include <sys/crypto/spi.h>
34 #include <sys/crypto/sched_impl.h>
35 
36 #define	CRYPTO_OPS_OFFSET(f)		offsetof(crypto_ops_t, co_##f)
37 #define	CRYPTO_CIPHER_MAC_OFFSET(f) offsetof(crypto_dual_cipher_mac_ops_t, f)
38 
39 static int crypto_mac_decrypt_common(crypto_mechanism_t *,
40     crypto_mechanism_t *, crypto_dual_data_t *, crypto_key_t *, crypto_key_t *,
41     crypto_ctx_template_t, crypto_ctx_template_t, crypto_data_t *,
42     crypto_data_t *, crypto_call_req_t *, boolean_t);
43 
44 static int crypto_mac_decrypt_common_prov(crypto_provider_t provider,
45     crypto_session_id_t sid, crypto_mechanism_t *, crypto_mechanism_t *,
46     crypto_dual_data_t *, crypto_key_t *, crypto_key_t *,
47     crypto_ctx_template_t, crypto_ctx_template_t, crypto_data_t *,
48     crypto_data_t *, crypto_call_req_t *, boolean_t);
49 
50 int
51 crypto_encrypt_mac_prov(crypto_provider_t provider, crypto_session_id_t sid,
52     crypto_mechanism_t *encr_mech, crypto_mechanism_t *mac_mech,
53     crypto_data_t *pt, crypto_key_t *encr_key, crypto_key_t *mac_key,
54     crypto_ctx_template_t encr_tmpl, crypto_ctx_template_t mac_tmpl,
55     crypto_dual_data_t *ct, crypto_data_t *mac, crypto_call_req_t *crq)
56 {
57 	/*
58 	 * First try to find a provider for the encryption mechanism, that
59 	 * is also capable of the MAC mechanism.
60 	 */
61 	int rv;
62 	kcf_mech_entry_t *me;
63 	kcf_provider_desc_t *pd = provider;
64 	kcf_provider_desc_t *real_provider = pd;
65 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
66 	kcf_req_params_t params;
67 	kcf_encrypt_mac_ops_params_t *cmops;
68 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
69 
70 	ASSERT(KCF_PROV_REFHELD(pd));
71 
72 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
73 		rv = kcf_get_hardware_provider(encr_mech->cm_type, encr_key,
74 		    mac_mech->cm_type, mac_key, CHECK_RESTRICT(crq), pd,
75 		    &real_provider, CRYPTO_FG_ENCRYPT_MAC_ATOMIC);
76 
77 		if (rv != CRYPTO_SUCCESS)
78 			return (rv);
79 	}
80 
81 	/*
82 	 * For SW providers, check the validity of the context template
83 	 * It is very rare that the generation number mis-matches, so
84 	 * is acceptable to fail here, and let the consumer recover by
85 	 * freeing this tmpl and create a new one for the key and new SW
86 	 * provider
87 	 * Warning! will need to change when multiple software providers
88 	 * per mechanism are supported.
89 	 */
90 
91 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
92 		if (encr_tmpl != NULL) {
93 			if (kcf_get_mech_entry(encr_mech->cm_type, &me) !=
94 			    KCF_SUCCESS) {
95 				rv = CRYPTO_MECHANISM_INVALID;
96 				goto out;
97 			}
98 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
99 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
100 				rv = CRYPTO_OLD_CTX_TEMPLATE;
101 				goto out;
102 			}
103 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
104 		}
105 
106 		if (mac_tmpl != NULL) {
107 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
108 			    KCF_SUCCESS) {
109 				rv = CRYPTO_MECHANISM_INVALID;
110 				goto out;
111 			}
112 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
113 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
114 				rv = CRYPTO_OLD_CTX_TEMPLATE;
115 				goto out;
116 			}
117 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
118 		}
119 	}
120 
121 	/* The fast path for SW providers. */
122 	if (CHECK_FASTPATH(crq, real_provider)) {
123 		crypto_mechanism_t lencr_mech;
124 		crypto_mechanism_t lmac_mech;
125 
126 		/* careful! structs assignments */
127 		lencr_mech = *encr_mech;
128 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
129 		    &lencr_mech);
130 
131 		lmac_mech = *mac_mech;
132 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
133 		    &lmac_mech);
134 
135 		rv = KCF_PROV_ENCRYPT_MAC_ATOMIC(real_provider, sid,
136 		    &lencr_mech, encr_key, &lmac_mech, mac_key, pt, ct,
137 		    mac, spi_encr_tmpl, spi_mac_tmpl, KCF_SWFP_RHNDL(crq));
138 
139 		KCF_PROV_INCRSTATS(pd, rv);
140 	} else {
141 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_ATOMIC,
142 		    sid, encr_key, mac_key, pt, ct, mac, spi_encr_tmpl,
143 		    spi_mac_tmpl);
144 
145 		cmops = &(params.rp_u.encrypt_mac_params);
146 
147 		/* careful! structs assignments */
148 		cmops->em_encr_mech = *encr_mech;
149 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
150 		    &cmops->em_encr_mech);
151 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
152 
153 		cmops->em_mac_mech = *mac_mech;
154 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
155 		    &cmops->em_mac_mech);
156 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
157 
158 		rv = kcf_submit_request(real_provider, NULL, crq, &params,
159 		    B_FALSE);
160 	}
161 
162 out:
163 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
164 		KCF_PROV_REFRELE(real_provider);
165 	return (rv);
166 }
167 
168 /*
169  * Performs a dual encrypt/mac atomic operation. The provider and session
170  * to use are determined by the KCF dispatcher.
171  */
172 int
173 crypto_encrypt_mac(crypto_mechanism_t *encr_mech,
174     crypto_mechanism_t *mac_mech, crypto_data_t *pt,
175     crypto_key_t *encr_key, crypto_key_t *mac_key,
176     crypto_ctx_template_t encr_tmpl, crypto_ctx_template_t mac_tmpl,
177     crypto_dual_data_t *ct, crypto_data_t *mac, crypto_call_req_t *crq)
178 {
179 	/*
180 	 * First try to find a provider for the encryption mechanism, that
181 	 * is also capable of the MAC mechanism.
182 	 */
183 	int error;
184 	kcf_mech_entry_t *me;
185 	kcf_provider_desc_t *pd;
186 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
187 	kcf_req_params_t params;
188 	kcf_encrypt_mac_ops_params_t *cmops;
189 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
190 	crypto_mech_type_t prov_encr_mechid, prov_mac_mechid;
191 	kcf_prov_tried_t *list = NULL;
192 	boolean_t encr_tmpl_checked = B_FALSE;
193 	boolean_t mac_tmpl_checked = B_FALSE;
194 	kcf_dual_req_t *next_req = NULL;
195 
196 retry:
197 	/* pd is returned held on success */
198 	pd = kcf_get_dual_provider(encr_mech, encr_key, mac_mech, mac_key,
199 	    &me, &prov_encr_mechid,
200 	    &prov_mac_mechid, &error, list,
201 	    CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_ENCRYPT_MAC_ATOMIC,
202 	    CRYPTO_FG_MAC_ATOMIC | CRYPTO_FG_ENCRYPT_MAC_ATOMIC,
203 	    CHECK_RESTRICT(crq), ct->dd_len1);
204 	if (pd == NULL) {
205 		if (list != NULL)
206 			kcf_free_triedlist(list);
207 		if (next_req != NULL)
208 			kmem_free(next_req, sizeof (kcf_dual_req_t));
209 		return (error);
210 	}
211 
212 	/*
213 	 * For SW providers, check the validity of the context template
214 	 * It is very rare that the generation number mis-matches, so
215 	 * is acceptable to fail here, and let the consumer recover by
216 	 * freeing this tmpl and create a new one for the key and new SW
217 	 * provider
218 	 * Warning! will need to change when multiple software providers
219 	 * per mechanism are supported.
220 	 */
221 
222 	if ((!encr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
223 		if (encr_tmpl != NULL) {
224 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
225 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
226 
227 				if (next_req != NULL)
228 					kmem_free(next_req,
229 					    sizeof (kcf_dual_req_t));
230 				if (list != NULL)
231 					kcf_free_triedlist(list);
232 
233 				KCF_PROV_REFRELE(pd);
234 				/* Which one is the the old one ? */
235 				return (CRYPTO_OLD_CTX_TEMPLATE);
236 			}
237 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
238 		}
239 		encr_tmpl_checked = B_TRUE;
240 	}
241 
242 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
243 		crypto_call_req_t encr_req;
244 
245 		/* Need to emulate with 2 internal calls */
246 		/* Allocate and initialize the MAC req for the callback */
247 
248 		if (crq != NULL) {
249 			if (next_req == NULL) {
250 				next_req = kcf_alloc_req(crq);
251 
252 				if (next_req == NULL) {
253 					KCF_PROV_REFRELE(pd);
254 					if (list != NULL)
255 						kcf_free_triedlist(list);
256 					return (CRYPTO_HOST_MEMORY);
257 				}
258 				/*
259 				 * Careful! we're wrapping-in mac_tmpl instead
260 				 * of an spi_mac_tmpl. The callback routine will
261 				 * have to validate mac_tmpl, and use the
262 				 * mac_ctx_tmpl, once it picks a MAC provider.
263 				 */
264 				KCF_WRAP_MAC_OPS_PARAMS(&(next_req->kr_params),
265 				    KCF_OP_ATOMIC, NULL, mac_mech, mac_key,
266 				    (crypto_data_t *)ct, mac, mac_tmpl);
267 			}
268 
269 			encr_req.cr_flag = crq->cr_flag;
270 			encr_req.cr_callback_func = kcf_next_req;
271 			encr_req.cr_callback_arg = next_req;
272 		}
273 
274 		if (pt == NULL) {
275 			KCF_WRAP_ENCRYPT_OPS_PARAMS(&params, KCF_OP_ATOMIC,
276 			    pd->pd_sid, encr_mech, encr_key,
277 			    (crypto_data_t *)ct, NULL, spi_encr_tmpl);
278 		} else {
279 			KCF_WRAP_ENCRYPT_OPS_PARAMS(&params, KCF_OP_ATOMIC,
280 			    pd->pd_sid, encr_mech, encr_key, pt,
281 			    (crypto_data_t *)ct, spi_encr_tmpl);
282 		}
283 
284 		error = kcf_submit_request(pd, NULL, (crq == NULL) ? NULL :
285 		    &encr_req, &params, B_TRUE);
286 
287 		switch (error) {
288 		case CRYPTO_SUCCESS: {
289 			off_t saveoffset;
290 			size_t savelen;
291 
292 			/*
293 			 * The encryption step is done. Reuse the encr_req
294 			 * for submitting the MAC step.
295 			 */
296 			if (next_req == NULL) {
297 				saveoffset = ct->dd_offset1;
298 				savelen = ct->dd_len1;
299 			} else {
300 				saveoffset = next_req->kr_saveoffset =
301 				    ct->dd_offset1;
302 				savelen = next_req->kr_savelen = ct->dd_len1;
303 				encr_req.cr_callback_func = kcf_last_req;
304 			}
305 
306 			ct->dd_offset1 = ct->dd_offset2;
307 			ct->dd_len1 = ct->dd_len2;
308 
309 			error = crypto_mac(mac_mech, (crypto_data_t *)ct,
310 			    mac_key, mac_tmpl, mac, (crq == NULL) ? NULL :
311 			    &encr_req);
312 
313 			if (error != CRYPTO_QUEUED) {
314 				ct->dd_offset1 = saveoffset;
315 				ct->dd_len1 = savelen;
316 			}
317 			break;
318 		}
319 
320 		case CRYPTO_QUEUED:
321 			if ((crq != NULL) &&
322 			    !(crq->cr_flag & CRYPTO_SKIP_REQID))
323 				crq->cr_reqid = encr_req.cr_reqid;
324 			break;
325 
326 		default:
327 
328 			/* Add pd to the linked list of providers tried. */
329 			if (IS_RECOVERABLE(error)) {
330 				if (kcf_insert_triedlist(&list, pd,
331 				    KCF_KMFLAG(crq)) != NULL)
332 					goto retry;
333 			}
334 		}
335 		if (error != CRYPTO_QUEUED && next_req != NULL)
336 			kmem_free(next_req, sizeof (kcf_dual_req_t));
337 		if (list != NULL)
338 			kcf_free_triedlist(list);
339 		KCF_PROV_REFRELE(pd);
340 		return (error);
341 	}
342 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
343 		if ((mac_tmpl != NULL) &&
344 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
345 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
346 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
347 
348 				if (next_req != NULL)
349 					kmem_free(next_req,
350 					    sizeof (kcf_dual_req_t));
351 				if (list != NULL)
352 					kcf_free_triedlist(list);
353 
354 				KCF_PROV_REFRELE(pd);
355 				/* Which one is the the old one ? */
356 				return (CRYPTO_OLD_CTX_TEMPLATE);
357 			}
358 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
359 		}
360 		mac_tmpl_checked = B_TRUE;
361 	}
362 
363 	/* The fast path for SW providers. */
364 	if (CHECK_FASTPATH(crq, pd)) {
365 		crypto_mechanism_t lencr_mech;
366 		crypto_mechanism_t lmac_mech;
367 
368 		/* careful! structs assignments */
369 		lencr_mech = *encr_mech;
370 		lencr_mech.cm_type = prov_encr_mechid;
371 		lmac_mech = *mac_mech;
372 		lmac_mech.cm_type = prov_mac_mechid;
373 
374 		error = KCF_PROV_ENCRYPT_MAC_ATOMIC(pd, pd->pd_sid,
375 		    &lencr_mech, encr_key, &lmac_mech, mac_key, pt, ct,
376 		    mac, spi_encr_tmpl, spi_mac_tmpl, KCF_SWFP_RHNDL(crq));
377 
378 		KCF_PROV_INCRSTATS(pd, error);
379 	} else {
380 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_ATOMIC,
381 		    pd->pd_sid, encr_key, mac_key, pt, ct, mac, spi_encr_tmpl,
382 		    spi_mac_tmpl);
383 
384 		cmops = &(params.rp_u.encrypt_mac_params);
385 
386 		/* careful! structs assignments */
387 		cmops->em_encr_mech = *encr_mech;
388 		cmops->em_encr_mech.cm_type = prov_encr_mechid;
389 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
390 		cmops->em_mac_mech = *mac_mech;
391 		cmops->em_mac_mech.cm_type = prov_mac_mechid;
392 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
393 
394 		error = kcf_submit_request(pd, NULL, crq, &params, B_FALSE);
395 	}
396 
397 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED &&
398 	    IS_RECOVERABLE(error)) {
399 		/* Add pd to the linked list of providers tried. */
400 		if (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(crq)) != NULL)
401 			goto retry;
402 	}
403 
404 	if (next_req != NULL)
405 		kmem_free(next_req, sizeof (kcf_dual_req_t));
406 
407 	if (list != NULL)
408 		kcf_free_triedlist(list);
409 
410 	KCF_PROV_REFRELE(pd);
411 	return (error);
412 }
413 
414 int
415 crypto_encrypt_mac_init_prov(crypto_provider_t provider,
416     crypto_session_id_t sid, crypto_mechanism_t *encr_mech,
417     crypto_mechanism_t *mac_mech, crypto_key_t *encr_key,
418     crypto_key_t *mac_key, crypto_ctx_template_t encr_tmpl,
419     crypto_ctx_template_t mac_tmpl, crypto_context_t *ctxp,
420     crypto_call_req_t *cr)
421 {
422 	/*
423 	 * First try to find a provider for the encryption mechanism, that
424 	 * is also capable of the MAC mechanism.
425 	 */
426 	int rv;
427 	kcf_mech_entry_t *me;
428 	kcf_provider_desc_t *pd = provider;
429 	kcf_provider_desc_t *real_provider = pd;
430 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
431 	kcf_req_params_t params;
432 	kcf_encrypt_mac_ops_params_t *cmops;
433 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
434 	crypto_ctx_t *ctx;
435 	kcf_context_t *encr_kcf_context = NULL;
436 
437 	ASSERT(KCF_PROV_REFHELD(pd));
438 
439 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
440 		rv = kcf_get_hardware_provider(encr_mech->cm_type, encr_key,
441 		    mac_mech->cm_type, mac_key, CHECK_RESTRICT(cr), pd,
442 		    &real_provider, CRYPTO_FG_ENCRYPT_MAC);
443 
444 		if (rv != CRYPTO_SUCCESS)
445 			return (rv);
446 	}
447 
448 	/*
449 	 * For SW providers, check the validity of the context template
450 	 * It is very rare that the generation number mis-matches, so
451 	 * is acceptable to fail here, and let the consumer recover by
452 	 * freeing this tmpl and create a new one for the key and new SW
453 	 * provider
454 	 * Warning! will need to change when multiple software providers
455 	 * per mechanism are supported.
456 	 */
457 
458 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
459 		if (encr_tmpl != NULL) {
460 			if (kcf_get_mech_entry(encr_mech->cm_type, &me) !=
461 			    KCF_SUCCESS) {
462 				rv = CRYPTO_MECHANISM_INVALID;
463 				goto out;
464 			}
465 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
466 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
467 				rv = CRYPTO_OLD_CTX_TEMPLATE;
468 				goto out;
469 			}
470 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
471 		}
472 
473 		if (mac_tmpl != NULL) {
474 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
475 			    KCF_SUCCESS) {
476 				rv = CRYPTO_MECHANISM_INVALID;
477 				goto out;
478 			}
479 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
480 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
481 				rv = CRYPTO_OLD_CTX_TEMPLATE;
482 				goto out;
483 			}
484 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
485 		}
486 	}
487 
488 	ctx = kcf_new_ctx(cr, real_provider, sid);
489 	if (ctx == NULL) {
490 		rv = CRYPTO_HOST_MEMORY;
491 		goto out;
492 	}
493 	encr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
494 
495 	/* The fast path for SW providers. */
496 	if (CHECK_FASTPATH(cr, real_provider)) {
497 		crypto_mechanism_t lencr_mech;
498 		crypto_mechanism_t lmac_mech;
499 
500 		/* careful! structs assignments */
501 		lencr_mech = *encr_mech;
502 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
503 		    &lencr_mech);
504 
505 		lmac_mech = *mac_mech;
506 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
507 		    &lmac_mech);
508 
509 		rv = KCF_PROV_ENCRYPT_MAC_INIT(real_provider, ctx, &lencr_mech,
510 		    encr_key, &lmac_mech, mac_key, spi_encr_tmpl, spi_mac_tmpl,
511 		    KCF_SWFP_RHNDL(cr));
512 
513 		KCF_PROV_INCRSTATS(pd, rv);
514 	} else {
515 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
516 		    sid, encr_key, mac_key, NULL, NULL, NULL,
517 		    spi_encr_tmpl, spi_mac_tmpl);
518 
519 		cmops = &(params.rp_u.encrypt_mac_params);
520 
521 		/* careful! structs assignments */
522 		cmops->em_encr_mech = *encr_mech;
523 		KCF_SET_PROVIDER_MECHNUM(encr_mech->cm_type, real_provider,
524 		    &cmops->em_encr_mech);
525 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
526 
527 		cmops->em_mac_mech = *mac_mech;
528 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
529 		    &cmops->em_mac_mech);
530 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
531 
532 		rv = kcf_submit_request(real_provider, ctx, cr, &params,
533 		    B_FALSE);
534 	}
535 
536 	if (rv != CRYPTO_SUCCESS && rv != CRYPTO_QUEUED) {
537 		KCF_CONTEXT_REFRELE(encr_kcf_context);
538 	} else
539 		*ctxp = (crypto_context_t)ctx;
540 
541 out:
542 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
543 		KCF_PROV_REFRELE(real_provider);
544 	return (rv);
545 }
546 
547 /*
548  * Starts a multi-part dual encrypt/mac operation. The provider and session
549  * to use are determined by the KCF dispatcher.
550  */
551 /* ARGSUSED */
552 int
553 crypto_encrypt_mac_init(crypto_mechanism_t *encr_mech,
554     crypto_mechanism_t *mac_mech, crypto_key_t *encr_key,
555     crypto_key_t *mac_key, crypto_ctx_template_t encr_tmpl,
556     crypto_ctx_template_t mac_tmpl, crypto_context_t *ctxp,
557     crypto_call_req_t *cr)
558 {
559 	/*
560 	 * First try to find a provider for the encryption mechanism, that
561 	 * is also capable of the MAC mechanism.
562 	 */
563 	int error;
564 	kcf_mech_entry_t *me;
565 	kcf_provider_desc_t *pd;
566 	kcf_ctx_template_t *ctx_encr_tmpl, *ctx_mac_tmpl;
567 	kcf_req_params_t params;
568 	kcf_encrypt_mac_ops_params_t *cmops;
569 	crypto_spi_ctx_template_t spi_encr_tmpl = NULL, spi_mac_tmpl = NULL;
570 	crypto_mech_type_t prov_encr_mechid, prov_mac_mechid;
571 	kcf_prov_tried_t *list = NULL;
572 	boolean_t encr_tmpl_checked = B_FALSE;
573 	boolean_t mac_tmpl_checked = B_FALSE;
574 	crypto_ctx_t *ctx = NULL;
575 	kcf_context_t *encr_kcf_context = NULL, *mac_kcf_context;
576 	crypto_call_flag_t save_flag;
577 
578 retry:
579 	/* pd is returned held on success */
580 	pd = kcf_get_dual_provider(encr_mech, encr_key, mac_mech, mac_key,
581 	    &me, &prov_encr_mechid,
582 	    &prov_mac_mechid, &error, list,
583 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_MAC, CRYPTO_FG_MAC,
584 	    CHECK_RESTRICT(cr), 0);
585 	if (pd == NULL) {
586 		if (list != NULL)
587 			kcf_free_triedlist(list);
588 		return (error);
589 	}
590 
591 	/*
592 	 * For SW providers, check the validity of the context template
593 	 * It is very rare that the generation number mis-matches, so
594 	 * is acceptable to fail here, and let the consumer recover by
595 	 * freeing this tmpl and create a new one for the key and new SW
596 	 * provider
597 	 * Warning! will need to change when multiple software providers
598 	 * per mechanism are supported.
599 	 */
600 
601 	if ((!encr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
602 		if (encr_tmpl != NULL) {
603 			ctx_encr_tmpl = (kcf_ctx_template_t *)encr_tmpl;
604 			if (ctx_encr_tmpl->ct_generation != me->me_gen_swprov) {
605 
606 				if (list != NULL)
607 					kcf_free_triedlist(list);
608 				if (encr_kcf_context != NULL)
609 					KCF_CONTEXT_REFRELE(encr_kcf_context);
610 
611 				KCF_PROV_REFRELE(pd);
612 				/* Which one is the the old one ? */
613 				return (CRYPTO_OLD_CTX_TEMPLATE);
614 			}
615 			spi_encr_tmpl = ctx_encr_tmpl->ct_prov_tmpl;
616 		}
617 		encr_tmpl_checked = B_TRUE;
618 	}
619 
620 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
621 		/* Need to emulate with 2 internal calls */
622 
623 		/*
624 		 * We avoid code complexity by limiting the pure async.
625 		 * case to be done using only a SW provider.
626 		 * XXX - Redo the emulation code below so that we can
627 		 * remove this limitation.
628 		 */
629 		if (cr != NULL && pd->pd_prov_type == CRYPTO_HW_PROVIDER) {
630 			if ((kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr))
631 			    != NULL))
632 				goto retry;
633 			if (list != NULL)
634 				kcf_free_triedlist(list);
635 			if (encr_kcf_context != NULL)
636 				KCF_CONTEXT_REFRELE(encr_kcf_context);
637 			KCF_PROV_REFRELE(pd);
638 			return (CRYPTO_HOST_MEMORY);
639 		}
640 
641 		if (ctx == NULL && pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
642 			ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
643 			if (ctx == NULL) {
644 				if (list != NULL)
645 					kcf_free_triedlist(list);
646 				if (encr_kcf_context != NULL)
647 					KCF_CONTEXT_REFRELE(encr_kcf_context);
648 				KCF_PROV_REFRELE(pd);
649 				return (CRYPTO_HOST_MEMORY);
650 			}
651 			encr_kcf_context = (kcf_context_t *)
652 			    ctx->cc_framework_private;
653 		}
654 		/*
655 		 * Trade-off speed vs avoidance of code complexity and
656 		 * duplication:
657 		 * Could do all the combinations of fastpath / synch / asynch
658 		 * for the encryption and the mac steps. Early attempts
659 		 * showed the code grew wild and bug-prone, for little gain.
660 		 * Therefore, the adaptative asynch case is not implemented.
661 		 * It's either pure synchronous, or pure asynchronous.
662 		 * We still preserve a fastpath for the pure synchronous
663 		 * requests to SW providers.
664 		 */
665 		if (cr == NULL) {
666 			crypto_context_t mac_context;
667 
668 			if (pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
669 				crypto_mechanism_t lmech = *encr_mech;
670 
671 				lmech.cm_type = prov_encr_mechid;
672 
673 				error = KCF_PROV_ENCRYPT_INIT(pd, ctx, &lmech,
674 				    encr_key, spi_encr_tmpl,
675 				    KCF_RHNDL(KM_SLEEP));
676 			} else {
677 				/*
678 				 * If we did the 'goto retry' then ctx may not
679 				 * be NULL.  In general, we can't reuse another
680 				 * provider's context, so we free it now so
681 				 * we don't leak it.
682 				 */
683 				if (ctx != NULL) {
684 					KCF_CONTEXT_REFRELE((kcf_context_t *)
685 					    ctx->cc_framework_private);
686 					encr_kcf_context = NULL;
687 				}
688 				error = crypto_encrypt_init_prov(pd, pd->pd_sid,
689 				    encr_mech, encr_key, &encr_tmpl,
690 				    (crypto_context_t *)&ctx, NULL);
691 
692 				if (error == CRYPTO_SUCCESS) {
693 					encr_kcf_context = (kcf_context_t *)
694 					    ctx->cc_framework_private;
695 				}
696 			}
697 			KCF_PROV_INCRSTATS(pd, error);
698 
699 			KCF_PROV_REFRELE(pd);
700 
701 			if (error != CRYPTO_SUCCESS) {
702 				/* Can't be CRYPTO_QUEUED. return the failure */
703 				if (list != NULL)
704 					kcf_free_triedlist(list);
705 				if (encr_kcf_context != NULL)
706 					KCF_CONTEXT_REFRELE(encr_kcf_context);
707 
708 				return (error);
709 			}
710 			error = crypto_mac_init(mac_mech, mac_key, mac_tmpl,
711 			    &mac_context, NULL);
712 
713 			if (list != NULL)
714 				kcf_free_triedlist(list);
715 
716 			if (error != CRYPTO_SUCCESS) {
717 				/* Should this be an ASSERT() ? */
718 
719 				KCF_CONTEXT_REFRELE(encr_kcf_context);
720 			} else {
721 				encr_kcf_context = (kcf_context_t *)
722 				    ctx->cc_framework_private;
723 				mac_kcf_context = (kcf_context_t *)
724 				    ((crypto_ctx_t *)mac_context)->
725 				    cc_framework_private;
726 
727 				encr_kcf_context->kc_secondctx =
728 				    mac_kcf_context;
729 				KCF_CONTEXT_REFHOLD(mac_kcf_context);
730 
731 				*ctxp = (crypto_context_t)ctx;
732 			}
733 
734 			return (error);
735 		}
736 
737 		/* submit a pure asynchronous request. */
738 		save_flag = cr->cr_flag;
739 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
740 
741 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
742 		    pd->pd_sid, encr_key, mac_key, NULL, NULL, NULL,
743 		    spi_encr_tmpl, spi_mac_tmpl);
744 
745 		cmops = &(params.rp_u.encrypt_mac_params);
746 
747 		/* careful! structs assignments */
748 		cmops->em_encr_mech = *encr_mech;
749 		/*
750 		 * cmops->em_encr_mech.cm_type will be set when we get to
751 		 * kcf_emulate_dual() routine.
752 		 */
753 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
754 		cmops->em_mac_mech = *mac_mech;
755 
756 		/*
757 		 * cmops->em_mac_mech.cm_type will be set when we know the
758 		 * MAC provider.
759 		 */
760 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
761 
762 		/*
763 		 * non-NULL ctx->kc_secondctx tells common_submit_request
764 		 * that this request uses separate cipher and MAC contexts.
765 		 * That function will set ctx->kc_secondctx to the new
766 		 * MAC context, once it gets one.
767 		 */
768 		encr_kcf_context->kc_secondctx = encr_kcf_context;
769 
770 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
771 
772 		cr->cr_flag = save_flag;
773 
774 		if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
775 			KCF_CONTEXT_REFRELE(encr_kcf_context);
776 		}
777 		if (list != NULL)
778 			kcf_free_triedlist(list);
779 		*ctxp = (crypto_context_t)ctx;
780 		KCF_PROV_REFRELE(pd);
781 		return (error);
782 	}
783 
784 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
785 		if ((mac_tmpl != NULL) &&
786 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
787 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
788 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
789 
790 				if (list != NULL)
791 					kcf_free_triedlist(list);
792 
793 				KCF_PROV_REFRELE(pd);
794 				/* Which one is the the old one ? */
795 				return (CRYPTO_OLD_CTX_TEMPLATE);
796 			}
797 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
798 		}
799 		mac_tmpl_checked = B_TRUE;
800 	}
801 
802 	if (ctx == NULL) {
803 		ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
804 		if (ctx == NULL) {
805 			if (list != NULL)
806 				kcf_free_triedlist(list);
807 
808 			KCF_PROV_REFRELE(pd);
809 			return (CRYPTO_HOST_MEMORY);
810 		}
811 		encr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
812 	}
813 
814 	/* The fast path for SW providers. */
815 	if (CHECK_FASTPATH(cr, pd)) {
816 		crypto_mechanism_t lencr_mech;
817 		crypto_mechanism_t lmac_mech;
818 
819 		/* careful! structs assignments */
820 		lencr_mech = *encr_mech;
821 		lencr_mech.cm_type = prov_encr_mechid;
822 		lmac_mech = *mac_mech;
823 		lmac_mech.cm_type = prov_mac_mechid;
824 
825 		error = KCF_PROV_ENCRYPT_MAC_INIT(pd, ctx, &lencr_mech,
826 		    encr_key, &lmac_mech, mac_key, spi_encr_tmpl, spi_mac_tmpl,
827 		    KCF_SWFP_RHNDL(cr));
828 
829 		KCF_PROV_INCRSTATS(pd, error);
830 	} else {
831 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_INIT,
832 		    pd->pd_sid, encr_key, mac_key, NULL, NULL, NULL,
833 		    spi_encr_tmpl, spi_mac_tmpl);
834 
835 		cmops = &(params.rp_u.encrypt_mac_params);
836 
837 		/* careful! structs assignments */
838 		cmops->em_encr_mech = *encr_mech;
839 		cmops->em_encr_mech.cm_type = prov_encr_mechid;
840 		cmops->em_framework_encr_mechtype = encr_mech->cm_type;
841 		cmops->em_mac_mech = *mac_mech;
842 		cmops->em_mac_mech.cm_type = prov_mac_mechid;
843 		cmops->em_framework_mac_mechtype = mac_mech->cm_type;
844 
845 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
846 	}
847 
848 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
849 		if ((IS_RECOVERABLE(error)) &&
850 		    (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr)) != NULL))
851 			goto retry;
852 
853 		KCF_CONTEXT_REFRELE(encr_kcf_context);
854 	} else
855 		*ctxp = (crypto_context_t)ctx;
856 
857 	if (list != NULL)
858 		kcf_free_triedlist(list);
859 
860 	KCF_PROV_REFRELE(pd);
861 	return (error);
862 }
863 
864 /*
865  * Continues a multi-part dual encrypt/mac operation.
866  */
867 /* ARGSUSED */
868 int
869 crypto_encrypt_mac_update(crypto_context_t context,
870     crypto_data_t *pt, crypto_dual_data_t *ct, crypto_call_req_t *cr)
871 {
872 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
873 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
874 	kcf_provider_desc_t *pd;
875 	int error;
876 	kcf_req_params_t params;
877 
878 	if ((ctx == NULL) ||
879 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
880 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
881 		return (CRYPTO_INVALID_CONTEXT);
882 	}
883 
884 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
885 
886 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
887 		off_t save_offset;
888 		size_t save_len;
889 		crypto_call_flag_t save_flag;
890 
891 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
892 			error = CRYPTO_INVALID_CONTEXT;
893 			goto out;
894 		}
895 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
896 
897 		/* First we submit the encryption request */
898 		if (cr == NULL) {
899 			/*
900 			 * 'ct' is always not NULL.
901 			 * A NULL 'pt' means in-place.
902 			 */
903 			if (pt == NULL)
904 				error = crypto_encrypt_update(context,
905 				    (crypto_data_t *)ct, NULL, NULL);
906 			else
907 				error = crypto_encrypt_update(context, pt,
908 				    (crypto_data_t *)ct, NULL);
909 
910 			if (error != CRYPTO_SUCCESS)
911 				goto out;
912 
913 			/*
914 			 * call  mac_update when there is data to throw in
915 			 * the mix. Either an explicitly non-zero ct->dd_len2,
916 			 * or the last ciphertext portion.
917 			 */
918 			save_offset = ct->dd_offset1;
919 			save_len = ct->dd_len1;
920 			if (ct->dd_len2 == 0) {
921 				/*
922 				 * The previous encrypt step was an
923 				 * accumulation only and didn't produce any
924 				 * partial output
925 				 */
926 				if (ct->dd_len1 == 0)
927 					goto out;
928 			} else {
929 				ct->dd_offset1 = ct->dd_offset2;
930 				ct->dd_len1 = ct->dd_len2;
931 			}
932 			error = crypto_mac_update((crypto_context_t)mac_ctx,
933 			    (crypto_data_t *)ct, NULL);
934 
935 			ct->dd_offset1 = save_offset;
936 			ct->dd_len1 = save_len;
937 
938 			goto out;
939 		}
940 		/* submit a pure asynchronous request. */
941 		save_flag = cr->cr_flag;
942 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
943 
944 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_UPDATE,
945 		    pd->pd_sid, NULL, NULL, pt, ct, NULL, NULL, NULL)
946 
947 
948 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
949 
950 		cr->cr_flag = save_flag;
951 		goto out;
952 	}
953 
954 	/* The fast path for SW providers. */
955 	if (CHECK_FASTPATH(cr, pd)) {
956 		error = KCF_PROV_ENCRYPT_MAC_UPDATE(pd, ctx, pt, ct, NULL);
957 		KCF_PROV_INCRSTATS(pd, error);
958 	} else {
959 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_UPDATE,
960 		    ctx->cc_session, NULL, NULL, pt, ct, NULL, NULL, NULL);
961 
962 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
963 	}
964 out:
965 	return (error);
966 }
967 
968 /*
969  * Terminates a multi-part dual encrypt/mac operation.
970  */
971 /* ARGSUSED */
972 int crypto_encrypt_mac_final(crypto_context_t context, crypto_dual_data_t *ct,
973     crypto_data_t *mac, crypto_call_req_t *cr)
974 {
975 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
976 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
977 	kcf_provider_desc_t *pd;
978 	int error;
979 	kcf_req_params_t params;
980 
981 	if ((ctx == NULL) ||
982 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
983 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
984 		return (CRYPTO_INVALID_CONTEXT);
985 	}
986 
987 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
988 
989 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
990 		off_t save_offset;
991 		size_t save_len;
992 		crypto_context_t mac_context;
993 		crypto_call_flag_t save_flag;
994 
995 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
996 			return (CRYPTO_INVALID_CONTEXT);
997 		}
998 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
999 		mac_context = (crypto_context_t)mac_ctx;
1000 
1001 		if (cr == NULL) {
1002 			/* Get the last chunk of ciphertext */
1003 			error = crypto_encrypt_final(context,
1004 			    (crypto_data_t *)ct, NULL);
1005 
1006 			if (error != CRYPTO_SUCCESS)  {
1007 				/*
1008 				 * Needed here, because the caller of
1009 				 * crypto_encrypt_mac_final() lost all
1010 				 * refs to the mac_ctx.
1011 				 */
1012 				crypto_cancel_ctx(mac_context);
1013 				return (error);
1014 			}
1015 			if (ct->dd_len2 > 0) {
1016 				save_offset = ct->dd_offset1;
1017 				save_len = ct->dd_len1;
1018 				ct->dd_offset1 = ct->dd_offset2;
1019 				ct->dd_len1 = ct->dd_len2;
1020 
1021 				error = crypto_mac_update(mac_context,
1022 				    (crypto_data_t *)ct, NULL);
1023 
1024 				ct->dd_offset1 = save_offset;
1025 				ct->dd_len1 = save_len;
1026 
1027 				if (error != CRYPTO_SUCCESS)  {
1028 					crypto_cancel_ctx(mac_context);
1029 					return (error);
1030 				}
1031 			}
1032 
1033 			/* and finally, collect the MAC */
1034 			error = crypto_mac_final(mac_context, mac, NULL);
1035 
1036 			return (error);
1037 		}
1038 		/* submit a pure asynchronous request. */
1039 		save_flag = cr->cr_flag;
1040 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
1041 
1042 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_FINAL,
1043 		    pd->pd_sid, NULL, NULL, NULL, ct, mac, NULL, NULL)
1044 
1045 
1046 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1047 
1048 		cr->cr_flag = save_flag;
1049 		return (error);
1050 	}
1051 	/* The fast path for SW providers. */
1052 	if (CHECK_FASTPATH(cr, pd)) {
1053 		error = KCF_PROV_ENCRYPT_MAC_FINAL(pd, ctx, ct, mac, NULL);
1054 		KCF_PROV_INCRSTATS(pd, error);
1055 	} else {
1056 		KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(&params, KCF_OP_FINAL,
1057 		    ctx->cc_session, NULL, NULL, NULL, ct, mac, NULL, NULL);
1058 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1059 	}
1060 out:
1061 	/* Release the hold done in kcf_new_ctx() during init step. */
1062 	KCF_CONTEXT_COND_RELEASE(error, kcf_ctx);
1063 	return (error);
1064 }
1065 
1066 /*
1067  * Performs an atomic dual mac/decrypt operation. The provider to use
1068  * is determined by the KCF dispatcher.
1069  */
1070 int
1071 crypto_mac_decrypt(crypto_mechanism_t *mac_mech,
1072     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1073     crypto_key_t *mac_key, crypto_key_t *decr_key,
1074     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1075     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1076 {
1077 	return (crypto_mac_decrypt_common(mac_mech, decr_mech, ct, mac_key,
1078 	    decr_key, mac_tmpl, decr_tmpl, mac, pt, crq, B_FALSE));
1079 }
1080 
1081 int
1082 crypto_mac_decrypt_prov(crypto_provider_t provider, crypto_session_id_t sid,
1083     crypto_mechanism_t *mac_mech, crypto_mechanism_t *decr_mech,
1084     crypto_dual_data_t *ct, crypto_key_t *mac_key, crypto_key_t *decr_key,
1085     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1086     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1087 {
1088 	return (crypto_mac_decrypt_common_prov(provider, sid, mac_mech,
1089 	    decr_mech, ct, mac_key, decr_key, mac_tmpl, decr_tmpl, mac, pt,
1090 	    crq, B_FALSE));
1091 }
1092 
1093 /*
1094  * Performs an atomic dual mac/decrypt operation. The provider to use
1095  * is determined by the KCF dispatcher. 'mac' specifies the expected
1096  * value for the MAC. The decryption is not performed if the computed
1097  * MAC does not match the expected MAC.
1098  */
1099 int
1100 crypto_mac_verify_decrypt(crypto_mechanism_t *mac_mech,
1101     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1102     crypto_key_t *mac_key, crypto_key_t *decr_key,
1103     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1104     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1105 {
1106 	return (crypto_mac_decrypt_common(mac_mech, decr_mech, ct, mac_key,
1107 	    decr_key, mac_tmpl, decr_tmpl, mac, pt, crq, B_TRUE));
1108 }
1109 
1110 int
1111 crypto_mac_verify_decrypt_prov(crypto_provider_t provider,
1112     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1113     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1114     crypto_key_t *mac_key, crypto_key_t *decr_key,
1115     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1116     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq)
1117 {
1118 	return (crypto_mac_decrypt_common_prov(provider, sid, mac_mech,
1119 	    decr_mech, ct, mac_key, decr_key, mac_tmpl, decr_tmpl, mac, pt,
1120 	    crq, B_TRUE));
1121 }
1122 
1123 /*
1124  * Called by both crypto_mac_decrypt() and crypto_mac_verify_decrypt().
1125  * optionally verified if the MACs match before calling the decryption step.
1126  */
1127 static int
1128 crypto_mac_decrypt_common(crypto_mechanism_t *mac_mech,
1129     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1130     crypto_key_t *mac_key, crypto_key_t *decr_key,
1131     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1132     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq,
1133     boolean_t do_verify)
1134 {
1135 	/*
1136 	 * First try to find a provider for the decryption mechanism, that
1137 	 * is also capable of the MAC mechanism.
1138 	 * We still favor optimizing the costlier decryption.
1139 	 */
1140 	int error;
1141 	kcf_mech_entry_t *me;
1142 	kcf_provider_desc_t *pd;
1143 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1144 	kcf_req_params_t params;
1145 	kcf_mac_decrypt_ops_params_t *cmops;
1146 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1147 	crypto_mech_type_t prov_decr_mechid, prov_mac_mechid;
1148 	kcf_prov_tried_t *list = NULL;
1149 	boolean_t decr_tmpl_checked = B_FALSE;
1150 	boolean_t mac_tmpl_checked = B_FALSE;
1151 	kcf_dual_req_t *next_req = NULL;
1152 	crypto_call_req_t mac_req, *mac_reqp = NULL;
1153 
1154 retry:
1155 	/* pd is returned held on success */
1156 	pd = kcf_get_dual_provider(decr_mech, decr_key, mac_mech, mac_key,
1157 	    &me, &prov_decr_mechid,
1158 	    &prov_mac_mechid, &error, list,
1159 	    CRYPTO_FG_DECRYPT_ATOMIC | CRYPTO_FG_MAC_DECRYPT_ATOMIC,
1160 	    CRYPTO_FG_MAC_ATOMIC | CRYPTO_FG_MAC_DECRYPT_ATOMIC,
1161 	    CHECK_RESTRICT(crq), ct->dd_len2);
1162 	if (pd == NULL) {
1163 		if (list != NULL)
1164 			kcf_free_triedlist(list);
1165 		if (next_req != NULL)
1166 			kmem_free(next_req, sizeof (kcf_dual_req_t));
1167 		return (CRYPTO_MECH_NOT_SUPPORTED);
1168 	}
1169 
1170 	/*
1171 	 * For SW providers, check the validity of the context template
1172 	 * It is very rare that the generation number mis-matches, so
1173 	 * is acceptable to fail here, and let the consumer recover by
1174 	 * freeing this tmpl and create a new one for the key and new SW
1175 	 * provider
1176 	 */
1177 
1178 	if ((!decr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1179 		if (decr_tmpl != NULL) {
1180 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1181 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1182 				if (next_req != NULL)
1183 					kmem_free(next_req,
1184 					    sizeof (kcf_dual_req_t));
1185 				if (list != NULL)
1186 					kcf_free_triedlist(list);
1187 				KCF_PROV_REFRELE(pd);
1188 
1189 				/* Which one is the the old one ? */
1190 				return (CRYPTO_OLD_CTX_TEMPLATE);
1191 			}
1192 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1193 		}
1194 		decr_tmpl_checked = B_TRUE;
1195 	}
1196 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
1197 		/* Need to emulate with 2 internal calls */
1198 
1199 		/* Prepare the call_req to be submitted for the MAC step */
1200 
1201 		if (crq != NULL) {
1202 
1203 			if (next_req == NULL) {
1204 				/*
1205 				 * allocate, initialize and prepare the
1206 				 * params for the next step only in the
1207 				 * first pass (not on every retry).
1208 				 */
1209 				next_req = kcf_alloc_req(crq);
1210 
1211 				if (next_req == NULL) {
1212 					KCF_PROV_REFRELE(pd);
1213 					if (list != NULL)
1214 						kcf_free_triedlist(list);
1215 					return (CRYPTO_HOST_MEMORY);
1216 				}
1217 				KCF_WRAP_DECRYPT_OPS_PARAMS(
1218 				    &(next_req->kr_params), KCF_OP_ATOMIC,
1219 				    NULL, decr_mech, decr_key,
1220 				    (crypto_data_t *)ct, pt, spi_decr_tmpl);
1221 			}
1222 
1223 			mac_req.cr_flag = (crq != NULL) ? crq->cr_flag : 0;
1224 			mac_req.cr_flag |= CRYPTO_SETDUAL;
1225 			mac_req.cr_callback_func = kcf_next_req;
1226 			mac_req.cr_callback_arg = next_req;
1227 			mac_reqp = &mac_req;
1228 		}
1229 
1230 		/* 'pd' is the decryption provider. */
1231 
1232 		if (do_verify)
1233 			error = crypto_mac_verify(mac_mech, (crypto_data_t *)ct,
1234 			    mac_key, mac_tmpl, mac,
1235 			    (crq == NULL) ? NULL : mac_reqp);
1236 		else
1237 			error = crypto_mac(mac_mech, (crypto_data_t *)ct,
1238 			    mac_key, mac_tmpl, mac,
1239 			    (crq == NULL) ? NULL : mac_reqp);
1240 
1241 		switch (error) {
1242 		case CRYPTO_SUCCESS: {
1243 			off_t saveoffset;
1244 			size_t savelen;
1245 
1246 			if (next_req == NULL) {
1247 				saveoffset = ct->dd_offset1;
1248 				savelen = ct->dd_len1;
1249 			} else {
1250 				saveoffset = next_req->kr_saveoffset =
1251 				    ct->dd_offset1;
1252 				savelen = next_req->kr_savelen = ct->dd_len1;
1253 
1254 				ASSERT(mac_reqp != NULL);
1255 				mac_req.cr_flag &= ~CRYPTO_SETDUAL;
1256 				mac_req.cr_callback_func = kcf_last_req;
1257 			}
1258 			ct->dd_offset1 = ct->dd_offset2;
1259 			ct->dd_len1 = ct->dd_len2;
1260 
1261 			if (CHECK_FASTPATH(crq, pd)) {
1262 				crypto_mechanism_t lmech;
1263 
1264 				lmech = *decr_mech;
1265 				KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type,
1266 				    pd, &lmech);
1267 
1268 				error = KCF_PROV_DECRYPT_ATOMIC(pd, pd->pd_sid,
1269 				    &lmech, decr_key, (crypto_data_t *)ct,
1270 				    (crypto_data_t *)pt, spi_decr_tmpl,
1271 				    KCF_SWFP_RHNDL(mac_reqp));
1272 
1273 				KCF_PROV_INCRSTATS(pd, error);
1274 			} else {
1275 				KCF_WRAP_DECRYPT_OPS_PARAMS(&params,
1276 				    KCF_OP_ATOMIC, pd->pd_sid, decr_mech,
1277 				    decr_key, (crypto_data_t *)ct, pt,
1278 				    spi_decr_tmpl);
1279 
1280 				error = kcf_submit_request(pd, NULL,
1281 				    (crq == NULL) ? NULL : mac_reqp,
1282 				    &params, B_FALSE);
1283 			}
1284 			if (error != CRYPTO_QUEUED) {
1285 				KCF_PROV_INCRSTATS(pd, error);
1286 				ct->dd_offset1 = saveoffset;
1287 				ct->dd_len1 = savelen;
1288 			}
1289 			break;
1290 		}
1291 
1292 		case CRYPTO_QUEUED:
1293 			if ((crq != NULL) && (crq->cr_flag & CRYPTO_SKIP_REQID))
1294 				crq->cr_reqid = mac_req.cr_reqid;
1295 			break;
1296 
1297 		default:
1298 			if (IS_RECOVERABLE(error)) {
1299 				if (kcf_insert_triedlist(&list, pd,
1300 				    KCF_KMFLAG(crq)) != NULL)
1301 					goto retry;
1302 			}
1303 		}
1304 		if (error != CRYPTO_QUEUED && next_req != NULL)
1305 			kmem_free(next_req, sizeof (kcf_dual_req_t));
1306 		if (list != NULL)
1307 			kcf_free_triedlist(list);
1308 		KCF_PROV_REFRELE(pd);
1309 		return (error);
1310 	}
1311 
1312 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1313 		if ((mac_tmpl != NULL) &&
1314 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
1315 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1316 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1317 				if (next_req != NULL)
1318 					kmem_free(next_req,
1319 					    sizeof (kcf_dual_req_t));
1320 				if (list != NULL)
1321 					kcf_free_triedlist(list);
1322 				KCF_PROV_REFRELE(pd);
1323 
1324 				/* Which one is the the old one ? */
1325 				return (CRYPTO_OLD_CTX_TEMPLATE);
1326 			}
1327 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1328 		}
1329 		mac_tmpl_checked = B_TRUE;
1330 	}
1331 
1332 	/* The fast path for SW providers. */
1333 	if (CHECK_FASTPATH(crq, pd)) {
1334 		crypto_mechanism_t lmac_mech;
1335 		crypto_mechanism_t ldecr_mech;
1336 
1337 		/* careful! structs assignments */
1338 		ldecr_mech = *decr_mech;
1339 		ldecr_mech.cm_type = prov_decr_mechid;
1340 		lmac_mech = *mac_mech;
1341 		lmac_mech.cm_type = prov_mac_mechid;
1342 
1343 		if (do_verify)
1344 			error = KCF_PROV_MAC_VERIFY_DECRYPT_ATOMIC(pd,
1345 			    pd->pd_sid, &lmac_mech, mac_key, &ldecr_mech,
1346 			    decr_key, ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1347 			    KCF_SWFP_RHNDL(crq));
1348 		else
1349 			error = KCF_PROV_MAC_DECRYPT_ATOMIC(pd, pd->pd_sid,
1350 			    &lmac_mech, mac_key, &ldecr_mech, decr_key,
1351 			    ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1352 			    KCF_SWFP_RHNDL(crq));
1353 
1354 		KCF_PROV_INCRSTATS(pd, error);
1355 	} else {
1356 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params,
1357 		    (do_verify) ? KCF_OP_MAC_VERIFY_DECRYPT_ATOMIC :
1358 		    KCF_OP_ATOMIC, pd->pd_sid, mac_key, decr_key, ct, mac, pt,
1359 		    spi_mac_tmpl, spi_decr_tmpl);
1360 
1361 		cmops = &(params.rp_u.mac_decrypt_params);
1362 
1363 		/* careful! structs assignments */
1364 		cmops->md_decr_mech = *decr_mech;
1365 		cmops->md_decr_mech.cm_type = prov_decr_mechid;
1366 		cmops->md_framework_decr_mechtype = decr_mech->cm_type;
1367 		cmops->md_mac_mech = *mac_mech;
1368 		cmops->md_mac_mech.cm_type = prov_mac_mechid;
1369 		cmops->md_framework_mac_mechtype = mac_mech->cm_type;
1370 
1371 		error = kcf_submit_request(pd, NULL, crq, &params, B_FALSE);
1372 	}
1373 
1374 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED &&
1375 	    IS_RECOVERABLE(error)) {
1376 		/* Add pd to the linked list of providers tried. */
1377 		if (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(crq)) != NULL)
1378 			goto retry;
1379 	}
1380 
1381 	if (list != NULL)
1382 		kcf_free_triedlist(list);
1383 
1384 	if (next_req != NULL)
1385 		kmem_free(next_req, sizeof (kcf_dual_req_t));
1386 	KCF_PROV_REFRELE(pd);
1387 	return (error);
1388 }
1389 
1390 static int
1391 crypto_mac_decrypt_common_prov(crypto_provider_t provider,
1392     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1393     crypto_mechanism_t *decr_mech, crypto_dual_data_t *ct,
1394     crypto_key_t *mac_key, crypto_key_t *decr_key,
1395     crypto_ctx_template_t mac_tmpl, crypto_ctx_template_t decr_tmpl,
1396     crypto_data_t *mac, crypto_data_t *pt, crypto_call_req_t *crq,
1397     boolean_t do_verify)
1398 {
1399 	/*
1400 	 * First try to find a provider for the decryption mechanism, that
1401 	 * is also capable of the MAC mechanism.
1402 	 * We still favor optimizing the costlier decryption.
1403 	 */
1404 	int error;
1405 	kcf_mech_entry_t *me;
1406 	kcf_provider_desc_t *pd = provider;
1407 	kcf_provider_desc_t *real_provider = pd;
1408 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1409 	kcf_req_params_t params;
1410 	kcf_mac_decrypt_ops_params_t *cmops;
1411 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1412 
1413 	ASSERT(KCF_PROV_REFHELD(pd));
1414 
1415 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
1416 		error = kcf_get_hardware_provider(decr_mech->cm_type, decr_key,
1417 		    mac_mech->cm_type, mac_key, CHECK_RESTRICT(crq), pd,
1418 		    &real_provider, CRYPTO_FG_MAC_DECRYPT_ATOMIC);
1419 
1420 		if (error != CRYPTO_SUCCESS)
1421 			return (error);
1422 	}
1423 
1424 	/*
1425 	 * For SW providers, check the validity of the context template
1426 	 * It is very rare that the generation number mis-matches, so
1427 	 * is acceptable to fail here, and let the consumer recover by
1428 	 * freeing this tmpl and create a new one for the key and new SW
1429 	 * provider
1430 	 */
1431 
1432 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
1433 		if (decr_tmpl != NULL) {
1434 			if (kcf_get_mech_entry(decr_mech->cm_type, &me) !=
1435 			    KCF_SUCCESS) {
1436 				error = CRYPTO_MECHANISM_INVALID;
1437 				goto out;
1438 			}
1439 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1440 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1441 				error = CRYPTO_OLD_CTX_TEMPLATE;
1442 				goto out;
1443 			}
1444 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1445 		}
1446 
1447 		if (mac_tmpl != NULL) {
1448 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
1449 			    KCF_SUCCESS) {
1450 				error = CRYPTO_MECHANISM_INVALID;
1451 				goto out;
1452 			}
1453 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1454 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1455 				error = CRYPTO_OLD_CTX_TEMPLATE;
1456 				goto out;
1457 			}
1458 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1459 		}
1460 	}
1461 
1462 	/* The fast path for SW providers. */
1463 	if (CHECK_FASTPATH(crq, pd)) {
1464 		crypto_mechanism_t lmac_mech;
1465 		crypto_mechanism_t ldecr_mech;
1466 
1467 		/* careful! structs assignments */
1468 		ldecr_mech = *decr_mech;
1469 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1470 		    &ldecr_mech);
1471 
1472 		lmac_mech = *mac_mech;
1473 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1474 		    &lmac_mech);
1475 
1476 		if (do_verify)
1477 			error = KCF_PROV_MAC_VERIFY_DECRYPT_ATOMIC(
1478 			    real_provider, sid, &lmac_mech, mac_key,
1479 			    &ldecr_mech, decr_key, ct, mac, pt, spi_mac_tmpl,
1480 			    spi_decr_tmpl, KCF_SWFP_RHNDL(crq));
1481 		else
1482 			error = KCF_PROV_MAC_DECRYPT_ATOMIC(real_provider, sid,
1483 			    &lmac_mech, mac_key, &ldecr_mech, decr_key,
1484 			    ct, mac, pt, spi_mac_tmpl, spi_decr_tmpl,
1485 			    KCF_SWFP_RHNDL(crq));
1486 
1487 		KCF_PROV_INCRSTATS(pd, error);
1488 	} else {
1489 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params,
1490 		    (do_verify) ? KCF_OP_MAC_VERIFY_DECRYPT_ATOMIC :
1491 		    KCF_OP_ATOMIC, sid, mac_key, decr_key, ct, mac, pt,
1492 		    spi_mac_tmpl, spi_decr_tmpl);
1493 
1494 		cmops = &(params.rp_u.mac_decrypt_params);
1495 
1496 		/* careful! structs assignments */
1497 		cmops->md_decr_mech = *decr_mech;
1498 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1499 		    &cmops->md_decr_mech);
1500 		cmops->md_framework_decr_mechtype = decr_mech->cm_type;
1501 
1502 		cmops->md_mac_mech = *mac_mech;
1503 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1504 		    &cmops->md_mac_mech);
1505 		cmops->md_framework_mac_mechtype = mac_mech->cm_type;
1506 
1507 		error = kcf_submit_request(real_provider, NULL, crq, &params,
1508 		    B_FALSE);
1509 	}
1510 
1511 out:
1512 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
1513 		KCF_PROV_REFRELE(real_provider);
1514 	return (error);
1515 }
1516 
1517 /*
1518  * Starts a multi-part dual mac/decrypt operation. The provider to
1519  * use is determined by the KCF dispatcher.
1520  */
1521 /* ARGSUSED */
1522 int
1523 crypto_mac_decrypt_init(crypto_mechanism_t *mac_mech,
1524     crypto_mechanism_t *decr_mech, crypto_key_t *mac_key,
1525     crypto_key_t *decr_key, crypto_ctx_template_t mac_tmpl,
1526     crypto_ctx_template_t decr_tmpl, crypto_context_t *ctxp,
1527     crypto_call_req_t *cr)
1528 {
1529 	/*
1530 	 * First try to find a provider for the decryption mechanism, that
1531 	 * is also capable of the MAC mechanism.
1532 	 * We still favor optimizing the costlier decryption.
1533 	 */
1534 	int error;
1535 	kcf_mech_entry_t *me;
1536 	kcf_provider_desc_t *pd;
1537 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1538 	kcf_req_params_t params;
1539 	kcf_mac_decrypt_ops_params_t *mdops;
1540 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1541 	crypto_mech_type_t prov_decr_mechid, prov_mac_mechid;
1542 	kcf_prov_tried_t *list = NULL;
1543 	boolean_t decr_tmpl_checked = B_FALSE;
1544 	boolean_t mac_tmpl_checked = B_FALSE;
1545 	crypto_ctx_t *ctx = NULL;
1546 	kcf_context_t *decr_kcf_context = NULL, *mac_kcf_context = NULL;
1547 	crypto_call_flag_t save_flag;
1548 
1549 retry:
1550 	/* pd is returned held on success */
1551 	pd = kcf_get_dual_provider(decr_mech, decr_key, mac_mech, mac_key,
1552 	    &me, &prov_decr_mechid,
1553 	    &prov_mac_mechid, &error, list,
1554 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_MAC_DECRYPT, CRYPTO_FG_MAC,
1555 	    CHECK_RESTRICT(cr), 0);
1556 	if (pd == NULL) {
1557 		if (list != NULL)
1558 			kcf_free_triedlist(list);
1559 		return (error);
1560 	}
1561 
1562 	/*
1563 	 * For SW providers, check the validity of the context template
1564 	 * It is very rare that the generation number mis-matches, so
1565 	 * is acceptable to fail here, and let the consumer recover by
1566 	 * freeing this tmpl and create a new one for the key and new SW
1567 	 * provider
1568 	 * Warning! will need to change when multiple software providers
1569 	 * per mechanism are supported.
1570 	 */
1571 
1572 	if ((!decr_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1573 		if (decr_tmpl != NULL) {
1574 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1575 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1576 
1577 				if (list != NULL)
1578 					kcf_free_triedlist(list);
1579 				if (decr_kcf_context != NULL)
1580 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1581 
1582 				KCF_PROV_REFRELE(pd);
1583 				/* Which one is the the old one ? */
1584 				return (CRYPTO_OLD_CTX_TEMPLATE);
1585 			}
1586 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1587 		}
1588 		decr_tmpl_checked = B_TRUE;
1589 	}
1590 
1591 	if (prov_mac_mechid == CRYPTO_MECH_INVALID) {
1592 		/* Need to emulate with 2 internal calls */
1593 
1594 		/*
1595 		 * We avoid code complexity by limiting the pure async.
1596 		 * case to be done using only a SW provider.
1597 		 * XXX - Redo the emulation code below so that we can
1598 		 * remove this limitation.
1599 		 */
1600 		if (cr != NULL && pd->pd_prov_type == CRYPTO_HW_PROVIDER) {
1601 			if ((kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr))
1602 			    != NULL))
1603 				goto retry;
1604 			if (list != NULL)
1605 				kcf_free_triedlist(list);
1606 			if (decr_kcf_context != NULL)
1607 				KCF_CONTEXT_REFRELE(decr_kcf_context);
1608 			KCF_PROV_REFRELE(pd);
1609 			return (CRYPTO_HOST_MEMORY);
1610 		}
1611 
1612 		if (ctx == NULL && pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
1613 			ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
1614 			if (ctx == NULL) {
1615 				if (list != NULL)
1616 					kcf_free_triedlist(list);
1617 				if (decr_kcf_context != NULL)
1618 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1619 				KCF_PROV_REFRELE(pd);
1620 				return (CRYPTO_HOST_MEMORY);
1621 			}
1622 			decr_kcf_context = (kcf_context_t *)
1623 			    ctx->cc_framework_private;
1624 		}
1625 		/*
1626 		 * Trade-off speed vs avoidance of code complexity and
1627 		 * duplication:
1628 		 * Could do all the combinations of fastpath / synch / asynch
1629 		 * for the decryption and the mac steps. Early attempts
1630 		 * showed the code grew wild and bug-prone, for little gain.
1631 		 * Therefore, the adaptative asynch case is not implemented.
1632 		 * It's either pure synchronous, or pure asynchronous.
1633 		 * We still preserve a fastpath for the pure synchronous
1634 		 * requests to SW providers.
1635 		 */
1636 		if (cr == NULL) {
1637 			crypto_context_t mac_context;
1638 
1639 			error = crypto_mac_init(mac_mech, mac_key, mac_tmpl,
1640 			    &mac_context, NULL);
1641 
1642 			if (error != CRYPTO_SUCCESS) {
1643 				/* Can't be CRYPTO_QUEUED. return the failure */
1644 				if (list != NULL)
1645 					kcf_free_triedlist(list);
1646 
1647 				if (decr_kcf_context != NULL)
1648 					KCF_CONTEXT_REFRELE(decr_kcf_context);
1649 				return (error);
1650 			}
1651 			if (pd->pd_prov_type == CRYPTO_SW_PROVIDER) {
1652 				crypto_mechanism_t lmech = *decr_mech;
1653 
1654 				lmech.cm_type = prov_decr_mechid;
1655 
1656 				error = KCF_PROV_DECRYPT_INIT(pd, ctx, &lmech,
1657 				    decr_key, spi_decr_tmpl,
1658 				    KCF_RHNDL(KM_SLEEP));
1659 			} else {
1660 				/*
1661 				 * If we did the 'goto retry' then ctx may not
1662 				 * be NULL.  In general, we can't reuse another
1663 				 * provider's context, so we free it now so
1664 				 * we don't leak it.
1665 				 */
1666 				if (ctx != NULL) {
1667 					KCF_CONTEXT_REFRELE((kcf_context_t *)
1668 					    ctx->cc_framework_private);
1669 					decr_kcf_context = NULL;
1670 				}
1671 				error = crypto_decrypt_init_prov(pd, pd->pd_sid,
1672 				    decr_mech, decr_key, &decr_tmpl,
1673 				    (crypto_context_t *)&ctx, NULL);
1674 
1675 				if (error == CRYPTO_SUCCESS) {
1676 					decr_kcf_context = (kcf_context_t *)
1677 					    ctx->cc_framework_private;
1678 				}
1679 			}
1680 
1681 			KCF_PROV_INCRSTATS(pd, error);
1682 
1683 			KCF_PROV_REFRELE(pd);
1684 
1685 			if (error != CRYPTO_SUCCESS) {
1686 				/* Can't be CRYPTO_QUEUED. return the failure */
1687 				if (list != NULL)
1688 					kcf_free_triedlist(list);
1689 				if (mac_kcf_context != NULL)
1690 					KCF_CONTEXT_REFRELE(mac_kcf_context);
1691 
1692 				return (error);
1693 			}
1694 			mac_kcf_context = (kcf_context_t *)
1695 			    ((crypto_ctx_t *)mac_context)->
1696 			    cc_framework_private;
1697 
1698 			decr_kcf_context = (kcf_context_t *)
1699 			    ctx->cc_framework_private;
1700 
1701 			/*
1702 			 * Here also, the mac context is second. The callback
1703 			 * case can't overwrite the context returned to
1704 			 * the caller.
1705 			 */
1706 			decr_kcf_context->kc_secondctx = mac_kcf_context;
1707 			KCF_CONTEXT_REFHOLD(mac_kcf_context);
1708 
1709 			*ctxp = (crypto_context_t)ctx;
1710 
1711 			return (error);
1712 		}
1713 		/* submit a pure asynchronous request. */
1714 		save_flag = cr->cr_flag;
1715 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
1716 
1717 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1718 		    pd->pd_sid, mac_key, decr_key, NULL, NULL, NULL,
1719 		    spi_mac_tmpl, spi_decr_tmpl);
1720 
1721 		mdops = &(params.rp_u.mac_decrypt_params);
1722 
1723 		/* careful! structs assignments */
1724 		mdops->md_decr_mech = *decr_mech;
1725 		/*
1726 		 * mdops->md_decr_mech.cm_type will be set when we get to
1727 		 * kcf_emulate_dual() routine.
1728 		 */
1729 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1730 		mdops->md_mac_mech = *mac_mech;
1731 
1732 		/*
1733 		 * mdops->md_mac_mech.cm_type will be set when we know the
1734 		 * MAC provider.
1735 		 */
1736 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1737 
1738 		/*
1739 		 * non-NULL ctx->kc_secondctx tells common_submit_request
1740 		 * that this request uses separate cipher and MAC contexts.
1741 		 * That function will set the MAC context's kc_secondctx to
1742 		 * this decrypt context.
1743 		 */
1744 		decr_kcf_context->kc_secondctx = decr_kcf_context;
1745 
1746 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1747 
1748 		cr->cr_flag = save_flag;
1749 
1750 		if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
1751 			KCF_CONTEXT_REFRELE(decr_kcf_context);
1752 		}
1753 		if (list != NULL)
1754 			kcf_free_triedlist(list);
1755 		*ctxp =  ctx;
1756 		KCF_PROV_REFRELE(pd);
1757 		return (error);
1758 	}
1759 
1760 	if ((!mac_tmpl_checked) && (pd->pd_prov_type == CRYPTO_SW_PROVIDER)) {
1761 		if ((mac_tmpl != NULL) &&
1762 		    (prov_mac_mechid != CRYPTO_MECH_INVALID)) {
1763 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1764 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1765 
1766 				if (list != NULL)
1767 					kcf_free_triedlist(list);
1768 
1769 				KCF_PROV_REFRELE(pd);
1770 				/* Which one is the the old one ? */
1771 				return (CRYPTO_OLD_CTX_TEMPLATE);
1772 			}
1773 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1774 		}
1775 		mac_tmpl_checked = B_TRUE;
1776 	}
1777 
1778 	if (ctx == NULL) {
1779 		ctx = kcf_new_ctx(cr, pd, pd->pd_sid);
1780 		if (ctx == NULL) {
1781 			error = CRYPTO_HOST_MEMORY;
1782 			if (list != NULL)
1783 				kcf_free_triedlist(list);
1784 			return (CRYPTO_HOST_MEMORY);
1785 		}
1786 		decr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
1787 	}
1788 
1789 	/* The fast path for SW providers. */
1790 	if (CHECK_FASTPATH(cr, pd)) {
1791 		crypto_mechanism_t ldecr_mech;
1792 		crypto_mechanism_t lmac_mech;
1793 
1794 		/* careful! structs assignments */
1795 		ldecr_mech = *decr_mech;
1796 		ldecr_mech.cm_type = prov_decr_mechid;
1797 		lmac_mech = *mac_mech;
1798 		lmac_mech.cm_type = prov_mac_mechid;
1799 
1800 		error = KCF_PROV_MAC_DECRYPT_INIT(pd, ctx, &lmac_mech,
1801 		    mac_key, &ldecr_mech, decr_key, spi_mac_tmpl, spi_decr_tmpl,
1802 		    KCF_SWFP_RHNDL(cr));
1803 
1804 		KCF_PROV_INCRSTATS(pd, error);
1805 	} else {
1806 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1807 		    pd->pd_sid, mac_key, decr_key, NULL, NULL, NULL,
1808 		    spi_mac_tmpl, spi_decr_tmpl);
1809 
1810 		mdops = &(params.rp_u.mac_decrypt_params);
1811 
1812 		/* careful! structs assignments */
1813 		mdops->md_decr_mech = *decr_mech;
1814 		mdops->md_decr_mech.cm_type = prov_decr_mechid;
1815 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1816 		mdops->md_mac_mech = *mac_mech;
1817 		mdops->md_mac_mech.cm_type = prov_mac_mechid;
1818 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1819 
1820 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
1821 	}
1822 
1823 	if (error != CRYPTO_SUCCESS && error != CRYPTO_QUEUED) {
1824 		if ((IS_RECOVERABLE(error)) &&
1825 		    (kcf_insert_triedlist(&list, pd, KCF_KMFLAG(cr)) != NULL))
1826 			goto retry;
1827 
1828 		KCF_CONTEXT_REFRELE(decr_kcf_context);
1829 	} else
1830 		*ctxp = (crypto_context_t)ctx;
1831 
1832 	if (list != NULL)
1833 		kcf_free_triedlist(list);
1834 
1835 	KCF_PROV_REFRELE(pd);
1836 	return (error);
1837 }
1838 
1839 int
1840 crypto_mac_decrypt_init_prov(crypto_provider_t provider,
1841     crypto_session_id_t sid, crypto_mechanism_t *mac_mech,
1842     crypto_mechanism_t *decr_mech, crypto_key_t *mac_key,
1843     crypto_key_t *decr_key, crypto_ctx_template_t mac_tmpl,
1844     crypto_ctx_template_t decr_tmpl, crypto_context_t *ctxp,
1845     crypto_call_req_t *cr)
1846 {
1847 	/*
1848 	 * First try to find a provider for the decryption mechanism, that
1849 	 * is also capable of the MAC mechanism.
1850 	 * We still favor optimizing the costlier decryption.
1851 	 */
1852 	int rv;
1853 	kcf_mech_entry_t *me;
1854 	kcf_provider_desc_t *pd = provider;
1855 	kcf_provider_desc_t *real_provider = pd;
1856 	kcf_ctx_template_t *ctx_decr_tmpl, *ctx_mac_tmpl;
1857 	kcf_req_params_t params;
1858 	kcf_mac_decrypt_ops_params_t *mdops;
1859 	crypto_spi_ctx_template_t spi_decr_tmpl = NULL, spi_mac_tmpl = NULL;
1860 	crypto_ctx_t *ctx;
1861 	kcf_context_t *decr_kcf_context = NULL;
1862 
1863 	ASSERT(KCF_PROV_REFHELD(pd));
1864 
1865 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER) {
1866 		rv = kcf_get_hardware_provider(decr_mech->cm_type, decr_key,
1867 		    mac_mech->cm_type, mac_key, CHECK_RESTRICT(cr), pd,
1868 		    &real_provider, CRYPTO_FG_MAC_DECRYPT);
1869 
1870 		if (rv != CRYPTO_SUCCESS)
1871 			return (rv);
1872 	}
1873 
1874 	/*
1875 	 * For SW providers, check the validity of the context template
1876 	 * It is very rare that the generation number mis-matches, so
1877 	 * is acceptable to fail here, and let the consumer recover by
1878 	 * freeing this tmpl and create a new one for the key and new SW
1879 	 * provider
1880 	 * Warning! will need to change when multiple software providers
1881 	 * per mechanism are supported.
1882 	 */
1883 
1884 	if (real_provider->pd_prov_type == CRYPTO_SW_PROVIDER) {
1885 		if (decr_tmpl != NULL) {
1886 			if (kcf_get_mech_entry(decr_mech->cm_type, &me) !=
1887 			    KCF_SUCCESS) {
1888 				rv = CRYPTO_MECHANISM_INVALID;
1889 				goto out;
1890 			}
1891 			ctx_decr_tmpl = (kcf_ctx_template_t *)decr_tmpl;
1892 			if (ctx_decr_tmpl->ct_generation != me->me_gen_swprov) {
1893 				rv = CRYPTO_OLD_CTX_TEMPLATE;
1894 				goto out;
1895 			}
1896 			spi_decr_tmpl = ctx_decr_tmpl->ct_prov_tmpl;
1897 		}
1898 
1899 		if (mac_tmpl != NULL) {
1900 			if (kcf_get_mech_entry(mac_mech->cm_type, &me) !=
1901 			    KCF_SUCCESS) {
1902 				rv = CRYPTO_MECHANISM_INVALID;
1903 				goto out;
1904 			}
1905 			ctx_mac_tmpl = (kcf_ctx_template_t *)mac_tmpl;
1906 			if (ctx_mac_tmpl->ct_generation != me->me_gen_swprov) {
1907 				rv = CRYPTO_OLD_CTX_TEMPLATE;
1908 				goto out;
1909 			}
1910 			spi_mac_tmpl = ctx_mac_tmpl->ct_prov_tmpl;
1911 		}
1912 	}
1913 
1914 	ctx = kcf_new_ctx(cr, real_provider, sid);
1915 	if (ctx == NULL) {
1916 		rv = CRYPTO_HOST_MEMORY;
1917 		goto out;
1918 	}
1919 	decr_kcf_context = (kcf_context_t *)ctx->cc_framework_private;
1920 
1921 	/* The fast path for SW providers. */
1922 	if (CHECK_FASTPATH(cr, pd)) {
1923 		crypto_mechanism_t ldecr_mech;
1924 		crypto_mechanism_t lmac_mech;
1925 
1926 		/* careful! structs assignments */
1927 		ldecr_mech = *decr_mech;
1928 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1929 		    &ldecr_mech);
1930 
1931 		lmac_mech = *mac_mech;
1932 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1933 		    &lmac_mech);
1934 
1935 		rv = KCF_PROV_MAC_DECRYPT_INIT(real_provider, ctx, &lmac_mech,
1936 		    mac_key, &ldecr_mech, decr_key, spi_mac_tmpl, spi_decr_tmpl,
1937 		    KCF_SWFP_RHNDL(cr));
1938 
1939 		KCF_PROV_INCRSTATS(pd, rv);
1940 	} else {
1941 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_INIT,
1942 		    sid, mac_key, decr_key, NULL, NULL, NULL,
1943 		    spi_mac_tmpl, spi_decr_tmpl);
1944 
1945 		mdops = &(params.rp_u.mac_decrypt_params);
1946 
1947 		/* careful! structs assignments */
1948 		mdops->md_decr_mech = *decr_mech;
1949 		KCF_SET_PROVIDER_MECHNUM(decr_mech->cm_type, real_provider,
1950 		    &mdops->md_decr_mech);
1951 		mdops->md_framework_decr_mechtype = decr_mech->cm_type;
1952 
1953 		mdops->md_mac_mech = *mac_mech;
1954 		KCF_SET_PROVIDER_MECHNUM(mac_mech->cm_type, real_provider,
1955 		    &mdops->md_mac_mech);
1956 		mdops->md_framework_mac_mechtype = mac_mech->cm_type;
1957 
1958 		rv = kcf_submit_request(real_provider, ctx, cr, &params,
1959 		    B_FALSE);
1960 	}
1961 
1962 	if (rv != CRYPTO_SUCCESS && rv != CRYPTO_QUEUED) {
1963 		KCF_CONTEXT_REFRELE(decr_kcf_context);
1964 	} else
1965 		*ctxp = (crypto_context_t)ctx;
1966 
1967 out:
1968 	if (pd->pd_prov_type == CRYPTO_LOGICAL_PROVIDER)
1969 		KCF_PROV_REFRELE(real_provider);
1970 	return (rv);
1971 }
1972 /*
1973  * Continues a multi-part dual mac/decrypt operation.
1974  */
1975 /* ARGSUSED */
1976 int
1977 crypto_mac_decrypt_update(crypto_context_t context,
1978     crypto_dual_data_t *ct, crypto_data_t *pt, crypto_call_req_t *cr)
1979 {
1980 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
1981 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
1982 	kcf_provider_desc_t *pd;
1983 	int error;
1984 	kcf_req_params_t params;
1985 
1986 	if ((ctx == NULL) ||
1987 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
1988 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
1989 		return (CRYPTO_INVALID_CONTEXT);
1990 	}
1991 
1992 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
1993 
1994 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
1995 		off_t save_offset;
1996 		size_t save_len;
1997 		crypto_call_flag_t save_flag;
1998 
1999 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
2000 			error = CRYPTO_INVALID_CONTEXT;
2001 			goto out;
2002 		}
2003 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
2004 
2005 		/* First we submit the MAC request */
2006 		if (cr == NULL) {
2007 			/*
2008 			 * 'ct' is always not NULL.
2009 			 */
2010 			error = crypto_mac_update((crypto_context_t)mac_ctx,
2011 			    (crypto_data_t *)ct, NULL);
2012 
2013 			if (error != CRYPTO_SUCCESS)
2014 				goto out;
2015 
2016 			/* Decrypt a different length only when told so */
2017 
2018 			save_offset = ct->dd_offset1;
2019 			save_len = ct->dd_len1;
2020 
2021 			if (ct->dd_len2 > 0) {
2022 				ct->dd_offset1 = ct->dd_offset2;
2023 				ct->dd_len1 = ct->dd_len2;
2024 			}
2025 
2026 			error = crypto_decrypt_update(context,
2027 			    (crypto_data_t *)ct, pt, NULL);
2028 
2029 			ct->dd_offset1 = save_offset;
2030 			ct->dd_len1 = save_len;
2031 
2032 			goto out;
2033 		}
2034 		/* submit a pure asynchronous request. */
2035 		save_flag = cr->cr_flag;
2036 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
2037 
2038 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_UPDATE,
2039 		    pd->pd_sid, NULL, NULL, ct, NULL, pt, NULL, NULL)
2040 
2041 
2042 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2043 
2044 		cr->cr_flag = save_flag;
2045 		goto out;
2046 	}
2047 
2048 	/* The fast path for SW providers. */
2049 	if (CHECK_FASTPATH(cr, pd)) {
2050 		error = KCF_PROV_MAC_DECRYPT_UPDATE(pd, ctx, ct, pt, NULL);
2051 		KCF_PROV_INCRSTATS(pd, error);
2052 	} else {
2053 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_UPDATE,
2054 		    ctx->cc_session, NULL, NULL, ct, NULL, pt, NULL, NULL);
2055 
2056 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2057 	}
2058 out:
2059 	return (error);
2060 }
2061 
2062 /*
2063  * Terminates a multi-part dual mac/decrypt operation.
2064  */
2065 /* ARGSUSED */
2066 int
2067 crypto_mac_decrypt_final(crypto_context_t context, crypto_data_t *mac,
2068     crypto_data_t *pt, crypto_call_req_t *cr)
2069 {
2070 	crypto_ctx_t *ctx = (crypto_ctx_t *)context, *mac_ctx;
2071 	kcf_context_t *kcf_ctx, *kcf_mac_ctx;
2072 	kcf_provider_desc_t *pd;
2073 	int error;
2074 	kcf_req_params_t params;
2075 
2076 	if ((ctx == NULL) ||
2077 	    ((kcf_ctx = (kcf_context_t *)ctx->cc_framework_private) == NULL) ||
2078 	    ((pd = kcf_ctx->kc_prov_desc) == NULL)) {
2079 		return (CRYPTO_INVALID_CONTEXT);
2080 	}
2081 
2082 	ASSERT(pd->pd_prov_type != CRYPTO_LOGICAL_PROVIDER);
2083 
2084 	if ((kcf_mac_ctx = kcf_ctx->kc_secondctx) != NULL) {
2085 		crypto_call_flag_t save_flag;
2086 
2087 		if (kcf_mac_ctx->kc_prov_desc == NULL) {
2088 			error = CRYPTO_INVALID_CONTEXT;
2089 			goto out;
2090 		}
2091 		mac_ctx = &kcf_mac_ctx->kc_glbl_ctx;
2092 
2093 		/* First we collect the MAC */
2094 		if (cr == NULL) {
2095 
2096 			error = crypto_mac_final((crypto_context_t)mac_ctx,
2097 			    mac, NULL);
2098 
2099 			if (error != CRYPTO_SUCCESS) {
2100 				crypto_cancel_ctx(ctx);
2101 			} else {
2102 				/* Get the last chunk of plaintext */
2103 				error = crypto_decrypt_final(context, pt, NULL);
2104 			}
2105 
2106 			return (error);
2107 		}
2108 		/* submit a pure asynchronous request. */
2109 		save_flag = cr->cr_flag;
2110 		cr->cr_flag |= CRYPTO_ALWAYS_QUEUE;
2111 
2112 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_FINAL,
2113 		    pd->pd_sid, NULL, NULL, NULL, mac, pt, NULL, NULL)
2114 
2115 
2116 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2117 
2118 		cr->cr_flag = save_flag;
2119 
2120 		return (error);
2121 	}
2122 
2123 	/* The fast path for SW providers. */
2124 	if (CHECK_FASTPATH(cr, pd)) {
2125 		error = KCF_PROV_MAC_DECRYPT_FINAL(pd, ctx, mac, pt, NULL);
2126 		KCF_PROV_INCRSTATS(pd, error);
2127 	} else {
2128 		KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(&params, KCF_OP_FINAL,
2129 		    ctx->cc_session, NULL, NULL, NULL, mac, pt, NULL, NULL);
2130 
2131 		error = kcf_submit_request(pd, ctx, cr, &params, B_FALSE);
2132 	}
2133 out:
2134 	/* Release the hold done in kcf_new_ctx() during init step. */
2135 	KCF_CONTEXT_COND_RELEASE(error, kcf_ctx);
2136 	return (error);
2137 }
2138 
2139 /*
2140  * Digest/Encrypt dual operation. Project-private entry point, not part of
2141  * the k-API.
2142  */
2143 /* ARGSUSED */
2144 int
2145 crypto_digest_encrypt_update(crypto_context_t digest_ctx,
2146     crypto_context_t encrypt_ctx, crypto_data_t *plaintext,
2147     crypto_data_t *ciphertext, crypto_call_req_t *crq)
2148 {
2149 	/*
2150 	 * RFE 4688647:
2151 	 * core functions needed by ioctl interface missing from impl.h
2152 	 */
2153 	return (CRYPTO_NOT_SUPPORTED);
2154 }
2155 
2156 /*
2157  * Decrypt/Digest dual operation. Project-private entry point, not part of
2158  * the k-API.
2159  */
2160 /* ARGSUSED */
2161 int
2162 crypto_decrypt_digest_update(crypto_context_t decryptctx,
2163     crypto_context_t encrypt_ctx, crypto_data_t *ciphertext,
2164     crypto_data_t *plaintext, crypto_call_req_t *crq)
2165 {
2166 	/*
2167 	 * RFE 4688647:
2168 	 * core functions needed by ioctl interface missing from impl.h
2169 	 */
2170 	return (CRYPTO_NOT_SUPPORTED);
2171 }
2172 
2173 /*
2174  * Sign/Encrypt dual operation. Project-private entry point, not part of
2175  * the k-API.
2176  */
2177 /* ARGSUSED */
2178 int
2179 crypto_sign_encrypt_update(crypto_context_t sign_ctx,
2180     crypto_context_t encrypt_ctx, crypto_data_t *plaintext,
2181     crypto_data_t *ciphertext, crypto_call_req_t *crq)
2182 {
2183 	/*
2184 	 * RFE 4688647:
2185 	 * core functions needed by ioctl interface missing from impl.h
2186 	 */
2187 	return (CRYPTO_NOT_SUPPORTED);
2188 }
2189 
2190 /*
2191  * Decrypt/Verify dual operation. Project-private entry point, not part of
2192  * the k-API.
2193  */
2194 /* ARGSUSED */
2195 int
2196 crypto_decrypt_verify_update(crypto_context_t decrypt_ctx,
2197     crypto_context_t verify_ctx, crypto_data_t *ciphertext,
2198     crypto_data_t *plaintext, crypto_call_req_t *crq)
2199 {
2200 	/*
2201 	 * RFE 4688647:
2202 	 * core functions needed by ioctl interface missing from impl.h
2203 	 */
2204 	return (CRYPTO_NOT_SUPPORTED);
2205 }
2206