xref: /titanic_52/usr/src/common/crypto/aes/aes_modes.c (revision 9e86db79b7d1bbc5f2f04e99954cbd5eae0e22bb)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 #include <sys/types.h>
27 #include <sys/sysmacros.h>
28 #include <modes/modes.h>
29 #include "aes_impl.h"
30 #ifndef	_KERNEL
31 #include <stdlib.h>
32 #endif	/* !_KERNEL */
33 
34 
35 /* Copy a 16-byte AES block from "in" to "out" */
36 void
37 aes_copy_block(uint8_t *in, uint8_t *out)
38 {
39 	if (IS_P2ALIGNED2(in, out, sizeof (uint32_t))) {
40 		/* LINTED: pointer alignment */
41 		*(uint32_t *)&out[0] = *(uint32_t *)&in[0];
42 		/* LINTED: pointer alignment */
43 		*(uint32_t *)&out[4] = *(uint32_t *)&in[4];
44 		/* LINTED: pointer alignment */
45 		*(uint32_t *)&out[8] = *(uint32_t *)&in[8];
46 		/* LINTED: pointer alignment */
47 		*(uint32_t *)&out[12] = *(uint32_t *)&in[12];
48 	} else {
49 		AES_COPY_BLOCK(in, out);
50 	}
51 }
52 
53 
54 /* XOR a 16-byte AES block of data into dst */
55 void
56 aes_xor_block(uint8_t *data, uint8_t *dst)
57 {
58 	if (IS_P2ALIGNED2(dst, data, sizeof (uint32_t))) {
59 		/* LINTED: pointer alignment */
60 		*(uint32_t *)&dst[0] ^= *(uint32_t *)&data[0];
61 		/* LINTED: pointer alignment */
62 		*(uint32_t *)&dst[4] ^= *(uint32_t *)&data[4];
63 		/* LINTED: pointer alignment */
64 		*(uint32_t *)&dst[8] ^= *(uint32_t *)&data[8];
65 		/* LINTED: pointer alignment */
66 		*(uint32_t *)&dst[12] ^= *(uint32_t *)&data[12];
67 	} else {
68 		AES_XOR_BLOCK(data, dst);
69 	}
70 }
71 
72 
73 /*
74  * Encrypt multiple blocks of data according to mode.
75  */
76 int
77 aes_encrypt_contiguous_blocks(void *ctx, char *data, size_t length,
78     crypto_data_t *out)
79 {
80 	aes_ctx_t *aes_ctx = ctx;
81 	int rv;
82 
83 	if (aes_ctx->ac_flags & CTR_MODE) {
84 		rv = ctr_mode_contiguous_blocks(ctx, data, length, out,
85 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
86 #ifdef _KERNEL
87 	} else if (aes_ctx->ac_flags & CCM_MODE) {
88 		rv = ccm_mode_encrypt_contiguous_blocks(ctx, data, length,
89 		    out, AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
90 		    aes_xor_block);
91 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
92 		rv = gcm_mode_encrypt_contiguous_blocks(ctx, data, length,
93 		    out, AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
94 		    aes_xor_block);
95 #endif
96 	} else if (aes_ctx->ac_flags & CBC_MODE) {
97 		rv = cbc_encrypt_contiguous_blocks(ctx,
98 		    data, length, out, AES_BLOCK_LEN, aes_encrypt_block,
99 		    aes_copy_block, aes_xor_block);
100 	} else {
101 		rv = ecb_cipher_contiguous_blocks(ctx, data, length, out,
102 		    AES_BLOCK_LEN, aes_encrypt_block);
103 	}
104 	return (rv);
105 }
106 
107 
108 /*
109  * Decrypt multiple blocks of data according to mode.
110  */
111 int
112 aes_decrypt_contiguous_blocks(void *ctx, char *data, size_t length,
113     crypto_data_t *out)
114 {
115 	aes_ctx_t *aes_ctx = ctx;
116 	int rv;
117 
118 	if (aes_ctx->ac_flags & CTR_MODE) {
119 		rv = ctr_mode_contiguous_blocks(ctx, data, length, out,
120 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
121 		if (rv == CRYPTO_DATA_LEN_RANGE)
122 			rv = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
123 #ifdef _KERNEL
124 	} else if (aes_ctx->ac_flags & CCM_MODE) {
125 		rv = ccm_mode_decrypt_contiguous_blocks(ctx, data, length,
126 		    out, AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
127 		    aes_xor_block);
128 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
129 		rv = gcm_mode_decrypt_contiguous_blocks(ctx, data, length,
130 		    out, AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
131 		    aes_xor_block);
132 #endif
133 	} else if (aes_ctx->ac_flags & CBC_MODE) {
134 		rv = cbc_decrypt_contiguous_blocks(ctx, data, length, out,
135 		    AES_BLOCK_LEN, aes_decrypt_block, aes_copy_block,
136 		    aes_xor_block);
137 	} else {
138 		rv = ecb_cipher_contiguous_blocks(ctx, data, length, out,
139 		    AES_BLOCK_LEN, aes_decrypt_block);
140 		if (rv == CRYPTO_DATA_LEN_RANGE)
141 			rv = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
142 	}
143 	return (rv);
144 }
145