1*23c57df7Smcpowers /* 2*23c57df7Smcpowers * CDDL HEADER START 3*23c57df7Smcpowers * 4*23c57df7Smcpowers * The contents of this file are subject to the terms of the 5*23c57df7Smcpowers * Common Development and Distribution License (the "License"). 6*23c57df7Smcpowers * You may not use this file except in compliance with the License. 7*23c57df7Smcpowers * 8*23c57df7Smcpowers * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9*23c57df7Smcpowers * or http://www.opensolaris.org/os/licensing. 10*23c57df7Smcpowers * See the License for the specific language governing permissions 11*23c57df7Smcpowers * and limitations under the License. 12*23c57df7Smcpowers * 13*23c57df7Smcpowers * When distributing Covered Code, include this CDDL HEADER in each 14*23c57df7Smcpowers * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15*23c57df7Smcpowers * If applicable, add the following below this CDDL HEADER, with the 16*23c57df7Smcpowers * fields enclosed by brackets "[]" replaced with your own identifying 17*23c57df7Smcpowers * information: Portions Copyright [yyyy] [name of copyright owner] 18*23c57df7Smcpowers * 19*23c57df7Smcpowers * CDDL HEADER END 20*23c57df7Smcpowers */ 21*23c57df7Smcpowers /* 22*23c57df7Smcpowers * Copyright 2008 Sun Microsystems, Inc. All rights reserved. 23*23c57df7Smcpowers * Use is subject to license terms. 24*23c57df7Smcpowers */ 25*23c57df7Smcpowers 26*23c57df7Smcpowers #pragma ident "%Z%%M% %I% %E% SMI" 27*23c57df7Smcpowers 28*23c57df7Smcpowers #ifndef _KERNEL 29*23c57df7Smcpowers #include <strings.h> 30*23c57df7Smcpowers #include <limits.h> 31*23c57df7Smcpowers #include <assert.h> 32*23c57df7Smcpowers #include <security/cryptoki.h> 33*23c57df7Smcpowers #endif 34*23c57df7Smcpowers 35*23c57df7Smcpowers #include <sys/types.h> 36*23c57df7Smcpowers #include <modes/modes.h> 37*23c57df7Smcpowers #include <sys/crypto/common.h> 38*23c57df7Smcpowers #include <sys/crypto/impl.h> 39*23c57df7Smcpowers 40*23c57df7Smcpowers /* 41*23c57df7Smcpowers * Encrypt and decrypt multiple blocks of data in counter mode. 42*23c57df7Smcpowers */ 43*23c57df7Smcpowers int 44*23c57df7Smcpowers ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length, 45*23c57df7Smcpowers crypto_data_t *out, size_t block_size, 46*23c57df7Smcpowers int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct), 47*23c57df7Smcpowers void (*xor_block)(uint8_t *, uint8_t *)) 48*23c57df7Smcpowers { 49*23c57df7Smcpowers size_t remainder = length; 50*23c57df7Smcpowers size_t need; 51*23c57df7Smcpowers uint8_t *datap = (uint8_t *)data; 52*23c57df7Smcpowers uint8_t *blockp; 53*23c57df7Smcpowers uint8_t *lastp; 54*23c57df7Smcpowers void *iov_or_mp; 55*23c57df7Smcpowers offset_t offset; 56*23c57df7Smcpowers uint8_t *out_data_1; 57*23c57df7Smcpowers uint8_t *out_data_2; 58*23c57df7Smcpowers size_t out_data_1_len; 59*23c57df7Smcpowers uint64_t counter; 60*23c57df7Smcpowers #ifdef _LITTLE_ENDIAN 61*23c57df7Smcpowers uint8_t *p; 62*23c57df7Smcpowers #endif 63*23c57df7Smcpowers 64*23c57df7Smcpowers if (length + ctx->ctr_remainder_len < block_size) { 65*23c57df7Smcpowers /* accumulate bytes here and return */ 66*23c57df7Smcpowers bcopy(datap, 67*23c57df7Smcpowers (uint8_t *)ctx->ctr_remainder + ctx->ctr_remainder_len, 68*23c57df7Smcpowers length); 69*23c57df7Smcpowers ctx->ctr_remainder_len += length; 70*23c57df7Smcpowers ctx->ctr_copy_to = datap; 71*23c57df7Smcpowers return (CRYPTO_SUCCESS); 72*23c57df7Smcpowers } 73*23c57df7Smcpowers 74*23c57df7Smcpowers lastp = (uint8_t *)ctx->ctr_cb; 75*23c57df7Smcpowers if (out != NULL) 76*23c57df7Smcpowers crypto_init_ptrs(out, &iov_or_mp, &offset); 77*23c57df7Smcpowers 78*23c57df7Smcpowers do { 79*23c57df7Smcpowers /* Unprocessed data from last call. */ 80*23c57df7Smcpowers if (ctx->ctr_remainder_len > 0) { 81*23c57df7Smcpowers need = block_size - ctx->ctr_remainder_len; 82*23c57df7Smcpowers 83*23c57df7Smcpowers if (need > remainder) 84*23c57df7Smcpowers return (CRYPTO_DATA_LEN_RANGE); 85*23c57df7Smcpowers 86*23c57df7Smcpowers bcopy(datap, &((uint8_t *)ctx->ctr_remainder) 87*23c57df7Smcpowers [ctx->ctr_remainder_len], need); 88*23c57df7Smcpowers 89*23c57df7Smcpowers blockp = (uint8_t *)ctx->ctr_remainder; 90*23c57df7Smcpowers } else { 91*23c57df7Smcpowers blockp = datap; 92*23c57df7Smcpowers } 93*23c57df7Smcpowers 94*23c57df7Smcpowers /* ctr_cb is the counter block */ 95*23c57df7Smcpowers cipher(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb, 96*23c57df7Smcpowers (uint8_t *)ctx->ctr_tmp); 97*23c57df7Smcpowers 98*23c57df7Smcpowers lastp = (uint8_t *)ctx->ctr_tmp; 99*23c57df7Smcpowers 100*23c57df7Smcpowers /* 101*23c57df7Smcpowers * Increment counter. Counter bits are confined 102*23c57df7Smcpowers * to the bottom 64 bits of the counter block. 103*23c57df7Smcpowers */ 104*23c57df7Smcpowers counter = ctx->ctr_cb[1] & ctx->ctr_counter_mask; 105*23c57df7Smcpowers #ifdef _LITTLE_ENDIAN 106*23c57df7Smcpowers p = (uint8_t *)&counter; 107*23c57df7Smcpowers counter = (((uint64_t)p[0] << 56) | 108*23c57df7Smcpowers ((uint64_t)p[1] << 48) | 109*23c57df7Smcpowers ((uint64_t)p[2] << 40) | 110*23c57df7Smcpowers ((uint64_t)p[3] << 32) | 111*23c57df7Smcpowers ((uint64_t)p[4] << 24) | 112*23c57df7Smcpowers ((uint64_t)p[5] << 16) | 113*23c57df7Smcpowers ((uint64_t)p[6] << 8) | 114*23c57df7Smcpowers (uint64_t)p[7]); 115*23c57df7Smcpowers #endif 116*23c57df7Smcpowers counter++; 117*23c57df7Smcpowers #ifdef _LITTLE_ENDIAN 118*23c57df7Smcpowers counter = (((uint64_t)p[0] << 56) | 119*23c57df7Smcpowers ((uint64_t)p[1] << 48) | 120*23c57df7Smcpowers ((uint64_t)p[2] << 40) | 121*23c57df7Smcpowers ((uint64_t)p[3] << 32) | 122*23c57df7Smcpowers ((uint64_t)p[4] << 24) | 123*23c57df7Smcpowers ((uint64_t)p[5] << 16) | 124*23c57df7Smcpowers ((uint64_t)p[6] << 8) | 125*23c57df7Smcpowers (uint64_t)p[7]); 126*23c57df7Smcpowers #endif 127*23c57df7Smcpowers counter &= ctx->ctr_counter_mask; 128*23c57df7Smcpowers ctx->ctr_cb[1] = 129*23c57df7Smcpowers (ctx->ctr_cb[1] & ~(ctx->ctr_counter_mask)) | counter; 130*23c57df7Smcpowers 131*23c57df7Smcpowers /* 132*23c57df7Smcpowers * XOR the previous cipher block or IV with the 133*23c57df7Smcpowers * current clear block. 134*23c57df7Smcpowers */ 135*23c57df7Smcpowers xor_block(blockp, lastp); 136*23c57df7Smcpowers 137*23c57df7Smcpowers if (out == NULL) { 138*23c57df7Smcpowers if (ctx->ctr_remainder_len > 0) { 139*23c57df7Smcpowers bcopy(lastp, ctx->ctr_copy_to, 140*23c57df7Smcpowers ctx->ctr_remainder_len); 141*23c57df7Smcpowers bcopy(lastp + ctx->ctr_remainder_len, datap, 142*23c57df7Smcpowers need); 143*23c57df7Smcpowers } 144*23c57df7Smcpowers } else { 145*23c57df7Smcpowers crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 146*23c57df7Smcpowers &out_data_1_len, &out_data_2, block_size); 147*23c57df7Smcpowers 148*23c57df7Smcpowers /* copy block to where it belongs */ 149*23c57df7Smcpowers bcopy(lastp, out_data_1, out_data_1_len); 150*23c57df7Smcpowers if (out_data_2 != NULL) { 151*23c57df7Smcpowers bcopy(lastp + out_data_1_len, out_data_2, 152*23c57df7Smcpowers block_size - out_data_1_len); 153*23c57df7Smcpowers } 154*23c57df7Smcpowers /* update offset */ 155*23c57df7Smcpowers out->cd_offset += block_size; 156*23c57df7Smcpowers } 157*23c57df7Smcpowers 158*23c57df7Smcpowers /* Update pointer to next block of data to be processed. */ 159*23c57df7Smcpowers if (ctx->ctr_remainder_len != 0) { 160*23c57df7Smcpowers datap += need; 161*23c57df7Smcpowers ctx->ctr_remainder_len = 0; 162*23c57df7Smcpowers } else { 163*23c57df7Smcpowers datap += block_size; 164*23c57df7Smcpowers } 165*23c57df7Smcpowers 166*23c57df7Smcpowers remainder = (size_t)&data[length] - (size_t)datap; 167*23c57df7Smcpowers 168*23c57df7Smcpowers /* Incomplete last block. */ 169*23c57df7Smcpowers if (remainder > 0 && remainder < block_size) { 170*23c57df7Smcpowers bcopy(datap, ctx->ctr_remainder, remainder); 171*23c57df7Smcpowers ctx->ctr_remainder_len = remainder; 172*23c57df7Smcpowers ctx->ctr_copy_to = datap; 173*23c57df7Smcpowers goto out; 174*23c57df7Smcpowers } 175*23c57df7Smcpowers ctx->ctr_copy_to = NULL; 176*23c57df7Smcpowers 177*23c57df7Smcpowers } while (remainder > 0); 178*23c57df7Smcpowers 179*23c57df7Smcpowers out: 180*23c57df7Smcpowers return (CRYPTO_SUCCESS); 181*23c57df7Smcpowers } 182*23c57df7Smcpowers 183*23c57df7Smcpowers int 184*23c57df7Smcpowers ctr_mode_final(ctr_ctx_t *ctx, crypto_data_t *out, 185*23c57df7Smcpowers int (*encrypt_block)(const void *, const uint8_t *, uint8_t *)) 186*23c57df7Smcpowers { 187*23c57df7Smcpowers uint8_t *lastp; 188*23c57df7Smcpowers void *iov_or_mp; 189*23c57df7Smcpowers offset_t offset; 190*23c57df7Smcpowers uint8_t *out_data_1; 191*23c57df7Smcpowers uint8_t *out_data_2; 192*23c57df7Smcpowers size_t out_data_1_len; 193*23c57df7Smcpowers uint8_t *p; 194*23c57df7Smcpowers int i; 195*23c57df7Smcpowers 196*23c57df7Smcpowers if (out->cd_length < ctx->ctr_remainder_len) 197*23c57df7Smcpowers return (CRYPTO_DATA_LEN_RANGE); 198*23c57df7Smcpowers 199*23c57df7Smcpowers encrypt_block(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb, 200*23c57df7Smcpowers (uint8_t *)ctx->ctr_tmp); 201*23c57df7Smcpowers 202*23c57df7Smcpowers lastp = (uint8_t *)ctx->ctr_tmp; 203*23c57df7Smcpowers p = (uint8_t *)ctx->ctr_remainder; 204*23c57df7Smcpowers for (i = 0; i < ctx->ctr_remainder_len; i++) { 205*23c57df7Smcpowers p[i] ^= lastp[i]; 206*23c57df7Smcpowers } 207*23c57df7Smcpowers 208*23c57df7Smcpowers crypto_init_ptrs(out, &iov_or_mp, &offset); 209*23c57df7Smcpowers crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 210*23c57df7Smcpowers &out_data_1_len, &out_data_2, ctx->ctr_remainder_len); 211*23c57df7Smcpowers 212*23c57df7Smcpowers bcopy(p, out_data_1, out_data_1_len); 213*23c57df7Smcpowers if (out_data_2 != NULL) { 214*23c57df7Smcpowers bcopy((uint8_t *)p + out_data_1_len, 215*23c57df7Smcpowers out_data_2, ctx->ctr_remainder_len - out_data_1_len); 216*23c57df7Smcpowers } 217*23c57df7Smcpowers out->cd_offset += ctx->ctr_remainder_len; 218*23c57df7Smcpowers ctx->ctr_remainder_len = 0; 219*23c57df7Smcpowers return (CRYPTO_SUCCESS); 220*23c57df7Smcpowers } 221*23c57df7Smcpowers 222*23c57df7Smcpowers int 223*23c57df7Smcpowers ctr_init_ctx(ctr_ctx_t *ctr_ctx, ulong_t count, uint8_t *cb, 224*23c57df7Smcpowers void (*copy_block)(uint8_t *, uint8_t *)) 225*23c57df7Smcpowers { 226*23c57df7Smcpowers uint64_t mask = 0; 227*23c57df7Smcpowers #ifdef _LITTLE_ENDIAN 228*23c57df7Smcpowers uint8_t *p8; 229*23c57df7Smcpowers #endif 230*23c57df7Smcpowers 231*23c57df7Smcpowers if (count == 0 || count > 64) { 232*23c57df7Smcpowers return (CRYPTO_MECHANISM_PARAM_INVALID); 233*23c57df7Smcpowers } 234*23c57df7Smcpowers while (count-- > 0) 235*23c57df7Smcpowers mask |= (1ULL << count); 236*23c57df7Smcpowers #ifdef _LITTLE_ENDIAN 237*23c57df7Smcpowers p8 = (uint8_t *)&mask; 238*23c57df7Smcpowers mask = (((uint64_t)p8[0] << 56) | 239*23c57df7Smcpowers ((uint64_t)p8[1] << 48) | 240*23c57df7Smcpowers ((uint64_t)p8[2] << 40) | 241*23c57df7Smcpowers ((uint64_t)p8[3] << 32) | 242*23c57df7Smcpowers ((uint64_t)p8[4] << 24) | 243*23c57df7Smcpowers ((uint64_t)p8[5] << 16) | 244*23c57df7Smcpowers ((uint64_t)p8[6] << 8) | 245*23c57df7Smcpowers (uint64_t)p8[7]); 246*23c57df7Smcpowers #endif 247*23c57df7Smcpowers ctr_ctx->ctr_counter_mask = mask; 248*23c57df7Smcpowers copy_block(cb, (uchar_t *)ctr_ctx->ctr_cb); 249*23c57df7Smcpowers ctr_ctx->ctr_lastp = (uint8_t *)&ctr_ctx->ctr_cb[0]; 250*23c57df7Smcpowers ctr_ctx->ctr_flags |= CTR_MODE; 251*23c57df7Smcpowers return (CRYPTO_SUCCESS); 252*23c57df7Smcpowers } 253*23c57df7Smcpowers 254*23c57df7Smcpowers /* ARGSUSED */ 255*23c57df7Smcpowers void * 256*23c57df7Smcpowers ctr_alloc_ctx(int kmflag) 257*23c57df7Smcpowers { 258*23c57df7Smcpowers ctr_ctx_t *ctr_ctx; 259*23c57df7Smcpowers 260*23c57df7Smcpowers #ifdef _KERNEL 261*23c57df7Smcpowers if ((ctr_ctx = kmem_zalloc(sizeof (ctr_ctx_t), kmflag)) == NULL) 262*23c57df7Smcpowers #else 263*23c57df7Smcpowers if ((ctr_ctx = calloc(1, sizeof (ctr_ctx_t))) == NULL) 264*23c57df7Smcpowers #endif 265*23c57df7Smcpowers return (NULL); 266*23c57df7Smcpowers 267*23c57df7Smcpowers ctr_ctx->ctr_flags = CTR_MODE; 268*23c57df7Smcpowers return (ctr_ctx); 269*23c57df7Smcpowers } 270