1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright 2008 Sun Microsystems, Inc. All rights reserved. 23 * Use is subject to license terms. 24 */ 25 26 #pragma ident "%Z%%M% %I% %E% SMI" 27 28 #ifndef _KERNEL 29 #include <strings.h> 30 #include <limits.h> 31 #include <assert.h> 32 #include <security/cryptoki.h> 33 #endif 34 35 #include <sys/types.h> 36 #include <modes/modes.h> 37 #include <sys/crypto/common.h> 38 #include <sys/crypto/impl.h> 39 40 /* 41 * Encrypt and decrypt multiple blocks of data in counter mode. 42 */ 43 int 44 ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length, 45 crypto_data_t *out, size_t block_size, 46 int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct), 47 void (*xor_block)(uint8_t *, uint8_t *)) 48 { 49 size_t remainder = length; 50 size_t need; 51 uint8_t *datap = (uint8_t *)data; 52 uint8_t *blockp; 53 uint8_t *lastp; 54 void *iov_or_mp; 55 offset_t offset; 56 uint8_t *out_data_1; 57 uint8_t *out_data_2; 58 size_t out_data_1_len; 59 uint64_t counter; 60 #ifdef _LITTLE_ENDIAN 61 uint8_t *p; 62 #endif 63 64 if (length + ctx->ctr_remainder_len < block_size) { 65 /* accumulate bytes here and return */ 66 bcopy(datap, 67 (uint8_t *)ctx->ctr_remainder + ctx->ctr_remainder_len, 68 length); 69 ctx->ctr_remainder_len += length; 70 ctx->ctr_copy_to = datap; 71 return (CRYPTO_SUCCESS); 72 } 73 74 lastp = (uint8_t *)ctx->ctr_cb; 75 if (out != NULL) 76 crypto_init_ptrs(out, &iov_or_mp, &offset); 77 78 do { 79 /* Unprocessed data from last call. */ 80 if (ctx->ctr_remainder_len > 0) { 81 need = block_size - ctx->ctr_remainder_len; 82 83 if (need > remainder) 84 return (CRYPTO_DATA_LEN_RANGE); 85 86 bcopy(datap, &((uint8_t *)ctx->ctr_remainder) 87 [ctx->ctr_remainder_len], need); 88 89 blockp = (uint8_t *)ctx->ctr_remainder; 90 } else { 91 blockp = datap; 92 } 93 94 /* ctr_cb is the counter block */ 95 cipher(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb, 96 (uint8_t *)ctx->ctr_tmp); 97 98 lastp = (uint8_t *)ctx->ctr_tmp; 99 100 /* 101 * Increment counter. Counter bits are confined 102 * to the bottom 64 bits of the counter block. 103 */ 104 counter = ctx->ctr_cb[1] & ctx->ctr_counter_mask; 105 #ifdef _LITTLE_ENDIAN 106 p = (uint8_t *)&counter; 107 counter = (((uint64_t)p[0] << 56) | 108 ((uint64_t)p[1] << 48) | 109 ((uint64_t)p[2] << 40) | 110 ((uint64_t)p[3] << 32) | 111 ((uint64_t)p[4] << 24) | 112 ((uint64_t)p[5] << 16) | 113 ((uint64_t)p[6] << 8) | 114 (uint64_t)p[7]); 115 #endif 116 counter++; 117 #ifdef _LITTLE_ENDIAN 118 counter = (((uint64_t)p[0] << 56) | 119 ((uint64_t)p[1] << 48) | 120 ((uint64_t)p[2] << 40) | 121 ((uint64_t)p[3] << 32) | 122 ((uint64_t)p[4] << 24) | 123 ((uint64_t)p[5] << 16) | 124 ((uint64_t)p[6] << 8) | 125 (uint64_t)p[7]); 126 #endif 127 counter &= ctx->ctr_counter_mask; 128 ctx->ctr_cb[1] = 129 (ctx->ctr_cb[1] & ~(ctx->ctr_counter_mask)) | counter; 130 131 /* 132 * XOR the previous cipher block or IV with the 133 * current clear block. 134 */ 135 xor_block(blockp, lastp); 136 137 if (out == NULL) { 138 if (ctx->ctr_remainder_len > 0) { 139 bcopy(lastp, ctx->ctr_copy_to, 140 ctx->ctr_remainder_len); 141 bcopy(lastp + ctx->ctr_remainder_len, datap, 142 need); 143 } 144 } else { 145 crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 146 &out_data_1_len, &out_data_2, block_size); 147 148 /* copy block to where it belongs */ 149 bcopy(lastp, out_data_1, out_data_1_len); 150 if (out_data_2 != NULL) { 151 bcopy(lastp + out_data_1_len, out_data_2, 152 block_size - out_data_1_len); 153 } 154 /* update offset */ 155 out->cd_offset += block_size; 156 } 157 158 /* Update pointer to next block of data to be processed. */ 159 if (ctx->ctr_remainder_len != 0) { 160 datap += need; 161 ctx->ctr_remainder_len = 0; 162 } else { 163 datap += block_size; 164 } 165 166 remainder = (size_t)&data[length] - (size_t)datap; 167 168 /* Incomplete last block. */ 169 if (remainder > 0 && remainder < block_size) { 170 bcopy(datap, ctx->ctr_remainder, remainder); 171 ctx->ctr_remainder_len = remainder; 172 ctx->ctr_copy_to = datap; 173 goto out; 174 } 175 ctx->ctr_copy_to = NULL; 176 177 } while (remainder > 0); 178 179 out: 180 return (CRYPTO_SUCCESS); 181 } 182 183 int 184 ctr_mode_final(ctr_ctx_t *ctx, crypto_data_t *out, 185 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *)) 186 { 187 uint8_t *lastp; 188 void *iov_or_mp; 189 offset_t offset; 190 uint8_t *out_data_1; 191 uint8_t *out_data_2; 192 size_t out_data_1_len; 193 uint8_t *p; 194 int i; 195 196 if (out->cd_length < ctx->ctr_remainder_len) 197 return (CRYPTO_DATA_LEN_RANGE); 198 199 encrypt_block(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb, 200 (uint8_t *)ctx->ctr_tmp); 201 202 lastp = (uint8_t *)ctx->ctr_tmp; 203 p = (uint8_t *)ctx->ctr_remainder; 204 for (i = 0; i < ctx->ctr_remainder_len; i++) { 205 p[i] ^= lastp[i]; 206 } 207 208 crypto_init_ptrs(out, &iov_or_mp, &offset); 209 crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 210 &out_data_1_len, &out_data_2, ctx->ctr_remainder_len); 211 212 bcopy(p, out_data_1, out_data_1_len); 213 if (out_data_2 != NULL) { 214 bcopy((uint8_t *)p + out_data_1_len, 215 out_data_2, ctx->ctr_remainder_len - out_data_1_len); 216 } 217 out->cd_offset += ctx->ctr_remainder_len; 218 ctx->ctr_remainder_len = 0; 219 return (CRYPTO_SUCCESS); 220 } 221 222 int 223 ctr_init_ctx(ctr_ctx_t *ctr_ctx, ulong_t count, uint8_t *cb, 224 void (*copy_block)(uint8_t *, uint8_t *)) 225 { 226 uint64_t mask = 0; 227 #ifdef _LITTLE_ENDIAN 228 uint8_t *p8; 229 #endif 230 231 if (count == 0 || count > 64) { 232 return (CRYPTO_MECHANISM_PARAM_INVALID); 233 } 234 while (count-- > 0) 235 mask |= (1ULL << count); 236 #ifdef _LITTLE_ENDIAN 237 p8 = (uint8_t *)&mask; 238 mask = (((uint64_t)p8[0] << 56) | 239 ((uint64_t)p8[1] << 48) | 240 ((uint64_t)p8[2] << 40) | 241 ((uint64_t)p8[3] << 32) | 242 ((uint64_t)p8[4] << 24) | 243 ((uint64_t)p8[5] << 16) | 244 ((uint64_t)p8[6] << 8) | 245 (uint64_t)p8[7]); 246 #endif 247 ctr_ctx->ctr_counter_mask = mask; 248 copy_block(cb, (uchar_t *)ctr_ctx->ctr_cb); 249 ctr_ctx->ctr_lastp = (uint8_t *)&ctr_ctx->ctr_cb[0]; 250 ctr_ctx->ctr_flags |= CTR_MODE; 251 return (CRYPTO_SUCCESS); 252 } 253 254 /* ARGSUSED */ 255 void * 256 ctr_alloc_ctx(int kmflag) 257 { 258 ctr_ctx_t *ctr_ctx; 259 260 #ifdef _KERNEL 261 if ((ctr_ctx = kmem_zalloc(sizeof (ctr_ctx_t), kmflag)) == NULL) 262 #else 263 if ((ctr_ctx = calloc(1, sizeof (ctr_ctx_t))) == NULL) 264 #endif 265 return (NULL); 266 267 ctr_ctx->ctr_flags = CTR_MODE; 268 return (ctr_ctx); 269 } 270