/* * CDDL HEADER START * * The contents of this file are subject to the terms of the * Common Development and Distribution License (the "License"). * You may not use this file except in compliance with the License. * * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE * or http://www.opensolaris.org/os/licensing. * See the License for the specific language governing permissions * and limitations under the License. * * When distributing Covered Code, include this CDDL HEADER in each * file and include the License file at usr/src/OPENSOLARIS.LICENSE. * If applicable, add the following below this CDDL HEADER, with the * fields enclosed by brackets "[]" replaced with your own identifying * information: Portions Copyright [yyyy] [name of copyright owner] * * CDDL HEADER END */ /* * Copyright 2008 Sun Microsystems, Inc. All rights reserved. * Use is subject to license terms. * * Copyright 2019 Joyent, Inc. */ #ifndef _KERNEL #include #include #include #include #endif #include #include #include #include #include #include /* * CTR (counter mode) is a stream cipher. That is, it generates a * pseudo-random keystream that is used to XOR with the input to * encrypt or decrypt. The pseudo-random keystream is generated by * concatenating a nonce (supplied during initialzation) and with a * counter (initialized to zero) to form an input block to the cipher * mechanism. The resulting output of the cipher is used as a chunk * of the pseudo-random keystream. Once all of the bytes of the * keystream block have been used, the counter is incremented and * the process repeats. * * Since this is a stream cipher, we do not accumulate input cipher * text like we do for block modes. Instead we use ctr_ctx_t->ctr_offset * to track the amount of bytes used in the current keystream block. */ static void ctr_new_keyblock(ctr_ctx_t *ctx, int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct)) { uint64_t lower_counter, upper_counter; /* increment the counter */ lower_counter = ntohll(ctx->ctr_cb[1] & ctx->ctr_lower_mask); lower_counter = htonll(lower_counter + 1); lower_counter &= ctx->ctr_lower_mask; ctx->ctr_cb[1] = (ctx->ctr_cb[1] & ~(ctx->ctr_lower_mask)) | lower_counter; /* wrap around */ if (lower_counter == 0) { upper_counter = ntohll(ctx->ctr_cb[0] & ctx->ctr_upper_mask); upper_counter = htonll(upper_counter + 1); upper_counter &= ctx->ctr_upper_mask; ctx->ctr_cb[0] = (ctx->ctr_cb[0] & ~(ctx->ctr_upper_mask)) | upper_counter; } /* generate the new keyblock */ cipher(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb, (uint8_t *)ctx->ctr_keystream); ctx->ctr_offset = 0; } #ifdef __x86 /* * It's not worth bothering to check for pointer alignment on X86 -- always * try to do 32-bits at a time when enough data is available. */ #define TRY32(_src, _dst, _key, _keylen, _outlen) \ ((_keylen) > 3 && (_outlen) > 3) #else /* * Other platforms (e.g. SPARC) require the pointers to be aligned to * do 32-bits at a time. */ #define TRY32(_src, _dst, _key, _keylen, _outlen) \ ((_keylen) > 3 && (_outlen) > 3 && \ IS_P2ALIGNED((_src), sizeof (uint32_t)) && \ IS_P2ALIGNED((_dst), sizeof (uint32_t)) && \ IS_P2ALIGNED((_key), sizeof (uint32_t))) #endif /* * XOR the input with the keystream and write the result to out. * This requires that the amount of data in 'in' is >= outlen * (ctr_mode_contiguous_blocks() guarantees this for us before we are * called). As CTR mode is a stream cipher, we cannot use a cipher's * xxx_xor_block function (e.g. aes_xor_block()) as we must handle * arbitrary lengths of input and should not buffer/accumulate partial blocks * between calls. */ static void ctr_xor(ctr_ctx_t *ctx, uint8_t *in, uint8_t *out, size_t outlen, size_t block_size, int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct)) { uint8_t *keyp; size_t keyamt; while (outlen > 0) { /* * This occurs once we've consumed all the bytes in the * current block of the keystream. ctr_init_ctx() creates * the initial block of the keystream, so we always start * with a full block of key data. */ if (ctx->ctr_offset == block_size) { ctr_new_keyblock(ctx, cipher); } keyp = (uint8_t *)ctx->ctr_keystream + ctx->ctr_offset; keyamt = block_size - ctx->ctr_offset; /* * Try to process 32-bits at a time when possible. */ if (TRY32(in, out, keyp, keyamt, outlen)) { uint32_t *in32 = (uint32_t *)in; uint32_t *out32 = (uint32_t *)out; uint32_t *key32 = (uint32_t *)keyp; do { *out32++ = *in32++ ^ *key32++; keyamt -= sizeof (uint32_t); outlen -= sizeof (uint32_t); } while (keyamt > 3 && outlen > 3); in = (uint8_t *)in32; out = (uint8_t *)out32; keyp = (uint8_t *)key32; } while (keyamt > 0 && outlen > 0) { *out++ = *in++ ^ *keyp++; keyamt--; outlen--; } ctx->ctr_offset = block_size - keyamt; } } /* * Encrypt and decrypt multiple blocks of data in counter mode. */ int ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length, crypto_data_t *out, size_t block_size, int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct)) { size_t remainder = length; uint8_t *datap = (uint8_t *)data; void *iov_or_mp; offset_t offset; uint8_t *out_data_1; uint8_t *out_data_2; size_t out_data_1_len; if (block_size > sizeof (ctx->ctr_keystream)) return (CRYPTO_ARGUMENTS_BAD); if (out == NULL) return (CRYPTO_ARGUMENTS_BAD); /* * This check guarantees 'out' contains sufficient space for * the resulting output. */ if (out->cd_offset + length > out->cd_length) return (CRYPTO_BUFFER_TOO_SMALL); crypto_init_ptrs(out, &iov_or_mp, &offset); /* Now XOR the output with the keystream */ while (remainder > 0) { crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, &out_data_1_len, &out_data_2, remainder); /* * crypto_get_ptrs() should guarantee this, but act as a * safeguard in case the behavior ever changes. */ ASSERT3U(out_data_1_len, <=, remainder); ctr_xor(ctx, datap, out_data_1, out_data_1_len, block_size, cipher); datap += out_data_1_len; remainder -= out_data_1_len; } out->cd_offset += length; return (CRYPTO_SUCCESS); } int ctr_init_ctx(ctr_ctx_t *ctr_ctx, ulong_t count, uint8_t *cb, int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct), void (*copy_block)(uint8_t *, uint8_t *)) { uint64_t upper_mask = 0; uint64_t lower_mask = 0; if (count == 0 || count > 128) { return (CRYPTO_MECHANISM_PARAM_INVALID); } /* upper 64 bits of the mask */ if (count >= 64) { count -= 64; upper_mask = (count == 64) ? UINT64_MAX : (1ULL << count) - 1; lower_mask = UINT64_MAX; } else { /* now the lower 63 bits */ lower_mask = (1ULL << count) - 1; } ctr_ctx->ctr_lower_mask = htonll(lower_mask); ctr_ctx->ctr_upper_mask = htonll(upper_mask); copy_block(cb, (uchar_t *)ctr_ctx->ctr_cb); ctr_ctx->ctr_lastp = (uint8_t *)&ctr_ctx->ctr_cb[0]; /* Generate the first block of the keystream */ cipher(ctr_ctx->ctr_keysched, (uint8_t *)ctr_ctx->ctr_cb, (uint8_t *)ctr_ctx->ctr_keystream); ctr_ctx->ctr_flags |= CTR_MODE; return (CRYPTO_SUCCESS); } /* ARGSUSED */ void * ctr_alloc_ctx(int kmflag) { ctr_ctx_t *ctr_ctx; #ifdef _KERNEL if ((ctr_ctx = kmem_zalloc(sizeof (ctr_ctx_t), kmflag)) == NULL) #else if ((ctr_ctx = calloc(1, sizeof (ctr_ctx_t))) == NULL) #endif return (NULL); ctr_ctx->ctr_flags = CTR_MODE; return (ctr_ctx); }