summaryrefslogtreecommitdiff
path: root/usr/src/common/crypto/modes/ctr.c
diff options
context:
space:
mode:
Diffstat (limited to 'usr/src/common/crypto/modes/ctr.c')
-rw-r--r--usr/src/common/crypto/modes/ctr.c280
1 files changed, 154 insertions, 126 deletions
diff --git a/usr/src/common/crypto/modes/ctr.c b/usr/src/common/crypto/modes/ctr.c
index 919ed3ab53..7bf0134bb4 100644
--- a/usr/src/common/crypto/modes/ctr.c
+++ b/usr/src/common/crypto/modes/ctr.c
@@ -21,6 +21,8 @@
/*
* Copyright 2008 Sun Microsystems, Inc. All rights reserved.
* Use is subject to license terms.
+ *
+ * Copyright 2019 Joyent, Inc.
*/
#ifndef _KERNEL
@@ -30,6 +32,7 @@
#include <security/cryptoki.h>
#endif
+#include <sys/debug.h>
#include <sys/types.h>
#include <modes/modes.h>
#include <sys/crypto/common.h>
@@ -37,164 +40,184 @@
#include <sys/byteorder.h>
/*
- * Encrypt and decrypt multiple blocks of data in counter mode.
+ * CTR (counter mode) is a stream cipher. That is, it generates a
+ * pseudo-random keystream that is used to XOR with the input to
+ * encrypt or decrypt. The pseudo-random keystream is generated by
+ * concatenating a nonce (supplied during initialzation) and with a
+ * counter (initialized to zero) to form an input block to the cipher
+ * mechanism. The resulting output of the cipher is used as a chunk
+ * of the pseudo-random keystream. Once all of the bytes of the
+ * keystream block have been used, the counter is incremented and
+ * the process repeats.
+ *
+ * Since this is a stream cipher, we do not accumulate input cipher
+ * text like we do for block modes. Instead we use ctr_ctx_t->ctr_offset
+ * to track the amount of bytes used in the current keystream block.
*/
-int
-ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
- crypto_data_t *out, size_t block_size,
- int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct),
- void (*xor_block)(uint8_t *, uint8_t *))
+
+static void
+ctr_new_keyblock(ctr_ctx_t *ctx,
+ int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct))
{
- size_t remainder = length;
- size_t need;
- uint8_t *datap = (uint8_t *)data;
- uint8_t *blockp;
- uint8_t *lastp;
- void *iov_or_mp;
- offset_t offset;
- uint8_t *out_data_1;
- uint8_t *out_data_2;
- size_t out_data_1_len;
uint64_t lower_counter, upper_counter;
- if (length + ctx->ctr_remainder_len < block_size) {
- /* accumulate bytes here and return */
- bcopy(datap,
- (uint8_t *)ctx->ctr_remainder + ctx->ctr_remainder_len,
- length);
- ctx->ctr_remainder_len += length;
- ctx->ctr_copy_to = datap;
- return (CRYPTO_SUCCESS);
+ /* increment the counter */
+ lower_counter = ntohll(ctx->ctr_cb[1] & ctx->ctr_lower_mask);
+ lower_counter = htonll(lower_counter + 1);
+ lower_counter &= ctx->ctr_lower_mask;
+ ctx->ctr_cb[1] = (ctx->ctr_cb[1] & ~(ctx->ctr_lower_mask)) |
+ lower_counter;
+
+ /* wrap around */
+ if (lower_counter == 0) {
+ upper_counter = ntohll(ctx->ctr_cb[0] & ctx->ctr_upper_mask);
+ upper_counter = htonll(upper_counter + 1);
+ upper_counter &= ctx->ctr_upper_mask;
+ ctx->ctr_cb[0] = (ctx->ctr_cb[0] & ~(ctx->ctr_upper_mask)) |
+ upper_counter;
}
- lastp = (uint8_t *)ctx->ctr_cb;
- if (out != NULL)
- crypto_init_ptrs(out, &iov_or_mp, &offset);
-
- do {
- /* Unprocessed data from last call. */
- if (ctx->ctr_remainder_len > 0) {
- need = block_size - ctx->ctr_remainder_len;
-
- if (need > remainder)
- return (CRYPTO_DATA_LEN_RANGE);
-
- bcopy(datap, &((uint8_t *)ctx->ctr_remainder)
- [ctx->ctr_remainder_len], need);
-
- blockp = (uint8_t *)ctx->ctr_remainder;
- } else {
- blockp = datap;
- }
+ /* generate the new keyblock */
+ cipher(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
+ (uint8_t *)ctx->ctr_keystream);
+ ctx->ctr_offset = 0;
+}
- /* ctr_cb is the counter block */
- cipher(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
- (uint8_t *)ctx->ctr_tmp);
+#ifdef __x86
+/*
+ * It's not worth bothering to check for pointer alignment on X86 -- always
+ * try to do 32-bits at a time when enough data is available.
+ */
+#define TRY32(_src, _dst, _key, _keylen, _outlen) \
+ ((_keylen) > 3 && (_outlen) > 3)
+#else
+/*
+ * Other platforms (e.g. SPARC) require the pointers to be aligned to
+ * do 32-bits at a time.
+ */
+#define TRY32(_src, _dst, _key, _keylen, _outlen) \
+ ((_keylen) > 3 && (_outlen) > 3 && \
+ IS_P2ALIGNED((_src), sizeof (uint32_t)) && \
+ IS_P2ALIGNED((_dst), sizeof (uint32_t)) && \
+ IS_P2ALIGNED((_key), sizeof (uint32_t)))
+#endif
- lastp = (uint8_t *)ctx->ctr_tmp;
+/*
+ * XOR the input with the keystream and write the result to out.
+ * This requires that the amount of data in 'in' is >= outlen
+ * (ctr_mode_contiguous_blocks() guarantees this for us before we are
+ * called). As CTR mode is a stream cipher, we cannot use a cipher's
+ * xxx_xor_block function (e.g. aes_xor_block()) as we must handle
+ * arbitrary lengths of input and should not buffer/accumulate partial blocks
+ * between calls.
+ */
+static void
+ctr_xor(ctr_ctx_t *ctx, uint8_t *in, uint8_t *out, size_t outlen,
+ size_t block_size,
+ int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct))
+{
+ uint8_t *keyp;
+ size_t keyamt;
+ while (outlen > 0) {
/*
- * Increment Counter.
+ * This occurs once we've consumed all the bytes in the
+ * current block of the keystream. ctr_init_ctx() creates
+ * the initial block of the keystream, so we always start
+ * with a full block of key data.
*/
- lower_counter = ntohll(ctx->ctr_cb[1] & ctx->ctr_lower_mask);
- lower_counter = htonll(lower_counter + 1);
- lower_counter &= ctx->ctr_lower_mask;
- ctx->ctr_cb[1] = (ctx->ctr_cb[1] & ~(ctx->ctr_lower_mask)) |
- lower_counter;
-
- /* wrap around */
- if (lower_counter == 0) {
- upper_counter =
- ntohll(ctx->ctr_cb[0] & ctx->ctr_upper_mask);
- upper_counter = htonll(upper_counter + 1);
- upper_counter &= ctx->ctr_upper_mask;
- ctx->ctr_cb[0] =
- (ctx->ctr_cb[0] & ~(ctx->ctr_upper_mask)) |
- upper_counter;
+ if (ctx->ctr_offset == block_size) {
+ ctr_new_keyblock(ctx, cipher);
}
+ keyp = (uint8_t *)ctx->ctr_keystream + ctx->ctr_offset;
+ keyamt = block_size - ctx->ctr_offset;
+
/*
- * XOR encrypted counter block with the current clear block.
+ * Try to process 32-bits at a time when possible.
*/
- xor_block(blockp, lastp);
-
- if (out == NULL) {
- if (ctx->ctr_remainder_len > 0) {
- bcopy(lastp, ctx->ctr_copy_to,
- ctx->ctr_remainder_len);
- bcopy(lastp + ctx->ctr_remainder_len, datap,
- need);
- }
- } else {
- crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
- &out_data_1_len, &out_data_2, block_size);
-
- /* copy block to where it belongs */
- bcopy(lastp, out_data_1, out_data_1_len);
- if (out_data_2 != NULL) {
- bcopy(lastp + out_data_1_len, out_data_2,
- block_size - out_data_1_len);
- }
- /* update offset */
- out->cd_offset += block_size;
+ if (TRY32(in, out, keyp, keyamt, outlen)) {
+ uint32_t *in32 = (uint32_t *)in;
+ uint32_t *out32 = (uint32_t *)out;
+ uint32_t *key32 = (uint32_t *)keyp;
+
+ do {
+ *out32++ = *in32++ ^ *key32++;
+ keyamt -= sizeof (uint32_t);
+ outlen -= sizeof (uint32_t);
+ } while (keyamt > 3 && outlen > 3);
+
+ in = (uint8_t *)in32;
+ out = (uint8_t *)out32;
+ keyp = (uint8_t *)key32;
}
- /* Update pointer to next block of data to be processed. */
- if (ctx->ctr_remainder_len != 0) {
- datap += need;
- ctx->ctr_remainder_len = 0;
- } else {
- datap += block_size;
+ while (keyamt > 0 && outlen > 0) {
+ *out++ = *in++ ^ *keyp++;
+ keyamt--;
+ outlen--;
}
- remainder = (size_t)&data[length] - (size_t)datap;
-
- /* Incomplete last block. */
- if (remainder > 0 && remainder < block_size) {
- bcopy(datap, ctx->ctr_remainder, remainder);
- ctx->ctr_remainder_len = remainder;
- ctx->ctr_copy_to = datap;
- goto out;
- }
- ctx->ctr_copy_to = NULL;
-
- } while (remainder > 0);
-
-out:
- return (CRYPTO_SUCCESS);
+ ctx->ctr_offset = block_size - keyamt;
+ }
}
+/*
+ * Encrypt and decrypt multiple blocks of data in counter mode.
+ */
int
-ctr_mode_final(ctr_ctx_t *ctx, crypto_data_t *out,
- int (*encrypt_block)(const void *, const uint8_t *, uint8_t *))
+ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
+ crypto_data_t *out, size_t block_size,
+ int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct))
{
- uint8_t *lastp;
- uint8_t *p;
- int i;
- int rv;
+ size_t remainder = length;
+ uint8_t *datap = (uint8_t *)data;
+ void *iov_or_mp;
+ offset_t offset;
+ uint8_t *out_data_1;
+ uint8_t *out_data_2;
+ size_t out_data_1_len;
- if (out->cd_length < ctx->ctr_remainder_len)
- return (CRYPTO_DATA_LEN_RANGE);
+ if (block_size > sizeof (ctx->ctr_keystream))
+ return (CRYPTO_ARGUMENTS_BAD);
- encrypt_block(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
- (uint8_t *)ctx->ctr_tmp);
+ if (out == NULL)
+ return (CRYPTO_ARGUMENTS_BAD);
- lastp = (uint8_t *)ctx->ctr_tmp;
- p = (uint8_t *)ctx->ctr_remainder;
- for (i = 0; i < ctx->ctr_remainder_len; i++) {
- p[i] ^= lastp[i];
- }
+ /*
+ * This check guarantees 'out' contains sufficient space for
+ * the resulting output.
+ */
+ if (out->cd_offset + length > out->cd_length)
+ return (CRYPTO_BUFFER_TOO_SMALL);
- rv = crypto_put_output_data(p, out, ctx->ctr_remainder_len);
- if (rv == CRYPTO_SUCCESS) {
- out->cd_offset += ctx->ctr_remainder_len;
- ctx->ctr_remainder_len = 0;
+ crypto_init_ptrs(out, &iov_or_mp, &offset);
+
+ /* Now XOR the output with the keystream */
+ while (remainder > 0) {
+ crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
+ &out_data_1_len, &out_data_2, remainder);
+
+ /*
+ * crypto_get_ptrs() should guarantee this, but act as a
+ * safeguard in case the behavior ever changes.
+ */
+ ASSERT3U(out_data_1_len, <=, remainder);
+ ctr_xor(ctx, datap, out_data_1, out_data_1_len, block_size,
+ cipher);
+
+ datap += out_data_1_len;
+ remainder -= out_data_1_len;
}
- return (rv);
+
+ out->cd_offset += length;
+
+ return (CRYPTO_SUCCESS);
}
int
ctr_init_ctx(ctr_ctx_t *ctr_ctx, ulong_t count, uint8_t *cb,
+ int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct),
void (*copy_block)(uint8_t *, uint8_t *))
{
uint64_t upper_mask = 0;
@@ -217,6 +240,11 @@ ctr_init_ctx(ctr_ctx_t *ctr_ctx, ulong_t count, uint8_t *cb,
copy_block(cb, (uchar_t *)ctr_ctx->ctr_cb);
ctr_ctx->ctr_lastp = (uint8_t *)&ctr_ctx->ctr_cb[0];
+
+ /* Generate the first block of the keystream */
+ cipher(ctr_ctx->ctr_keysched, (uint8_t *)ctr_ctx->ctr_cb,
+ (uint8_t *)ctr_ctx->ctr_keystream);
+
ctr_ctx->ctr_flags |= CTR_MODE;
return (CRYPTO_SUCCESS);
}