summaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorDr. Stephen Henson <steve@openssl.org>2011-08-03 15:37:22 +0000
committerDr. Stephen Henson <steve@openssl.org>2011-08-03 15:37:22 +0000
commit28dd49faecf567bd946503dd4e3aa65985e659dd (patch)
treef667a5fdb9176f418011edbdcabf9b9fe6e14ffc /crypto
parent3699ec605602cf8ce7400bdbd164ebd9ed658b4c (diff)
Expand range of ctrls for AES GCM to support retrieval and setting of
invocation field. Add complete support for AES GCM ciphersuites including all those in RFC5288 and RFC5289.
Diffstat (limited to 'crypto')
-rw-r--r--crypto/evp/c_allc.c3
-rw-r--r--crypto/evp/e_aes.c114
-rw-r--r--crypto/evp/evp.h10
3 files changed, 121 insertions, 6 deletions
diff --git a/crypto/evp/c_allc.c b/crypto/evp/c_allc.c
index b262ac0685..395a164c8f 100644
--- a/crypto/evp/c_allc.c
+++ b/crypto/evp/c_allc.c
@@ -167,6 +167,7 @@ void OpenSSL_add_all_ciphers(void)
EVP_add_cipher(EVP_aes_128_cfb8());
EVP_add_cipher(EVP_aes_128_ofb());
EVP_add_cipher(EVP_aes_128_ctr());
+ EVP_add_cipher(EVP_aes_128_gcm());
EVP_add_cipher_alias(SN_aes_128_cbc,"AES128");
EVP_add_cipher_alias(SN_aes_128_cbc,"aes128");
EVP_add_cipher(EVP_aes_192_ecb());
@@ -176,6 +177,7 @@ void OpenSSL_add_all_ciphers(void)
EVP_add_cipher(EVP_aes_192_cfb8());
EVP_add_cipher(EVP_aes_192_ofb());
EVP_add_cipher(EVP_aes_192_ctr());
+ EVP_add_cipher(EVP_aes_192_gcm());
EVP_add_cipher_alias(SN_aes_192_cbc,"AES192");
EVP_add_cipher_alias(SN_aes_192_cbc,"aes192");
EVP_add_cipher(EVP_aes_256_ecb());
@@ -185,6 +187,7 @@ void OpenSSL_add_all_ciphers(void)
EVP_add_cipher(EVP_aes_256_cfb8());
EVP_add_cipher(EVP_aes_256_ofb());
EVP_add_cipher(EVP_aes_256_ctr());
+ EVP_add_cipher(EVP_aes_256_gcm());
EVP_add_cipher_alias(SN_aes_256_cbc,"AES256");
EVP_add_cipher_alias(SN_aes_256_cbc,"aes256");
#endif
diff --git a/crypto/evp/e_aes.c b/crypto/evp/e_aes.c
index 6e47963383..3e2107c2ef 100644
--- a/crypto/evp/e_aes.c
+++ b/crypto/evp/e_aes.c
@@ -76,6 +76,7 @@ typedef struct
int ivlen; /* IV length */
int taglen;
int iv_gen; /* It is OK to generate IVs */
+ int tls_aad_len; /* TLS AAD length */
} EVP_AES_GCM_CTX;
typedef struct
@@ -748,6 +749,7 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
gctx->iv = c->iv;
gctx->taglen = -1;
gctx->iv_gen = 0;
+ gctx->tls_aad_len = -1;
return 1;
case EVP_CTRL_GCM_SET_IVLEN:
@@ -798,7 +800,8 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
return 0;
if (arg)
memcpy(gctx->iv, ptr, arg);
- if (RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
+ if (c->encrypt &&
+ RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
return 0;
gctx->iv_gen = 1;
return 1;
@@ -807,7 +810,9 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
if (gctx->iv_gen == 0 || gctx->key_set == 0)
return 0;
CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
- memcpy(ptr, gctx->iv, gctx->ivlen);
+ if (arg <= 0 || arg > gctx->ivlen)
+ arg = gctx->ivlen;
+ memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
/* Invocation field will be at least 8 bytes in size and
* so no need to check wrap around or increment more than
* last 8 bytes.
@@ -816,6 +821,33 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
gctx->iv_set = 1;
return 1;
+ case EVP_CTRL_GCM_SET_IV_INV:
+ if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
+ return 0;
+ memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
+ CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
+ gctx->iv_set = 1;
+ return 1;
+
+ case EVP_CTRL_AEAD_TLS1_AAD:
+ /* Save the AAD for later use */
+ if (arg != 13)
+ return 0;
+ memcpy(c->buf, ptr, arg);
+ gctx->tls_aad_len = arg;
+ {
+ unsigned int len=c->buf[arg-2]<<8|c->buf[arg-1];
+ /* Correct length for explicit IV */
+ len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
+ /* If decrypting correct for tag too */
+ if (!c->encrypt)
+ len -= EVP_GCM_TLS_TAG_LEN;
+ c->buf[arg-2] = len>>8;
+ c->buf[arg-1] = len & 0xff;
+ }
+ /* Extra padding: tag appended to record */
+ return EVP_GCM_TLS_TAG_LEN;
+
default:
return -1;
@@ -857,12 +889,79 @@ static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
return 1;
}
+/* Handle TLS GCM packet format. This consists of the last portion of the IV
+ * followed by the payload and finally the tag. On encrypt generate IV,
+ * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
+ * and verify tag.
+ */
+
+static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
+ const unsigned char *in, size_t len)
+ {
+ EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
+ int rv = -1;
+ /* Encrypt/decrypt must be performed in place */
+ if (out != in)
+ return -1;
+ /* Set IV from start of buffer or generate IV and write to start
+ * of buffer.
+ */
+ if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ?
+ EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
+ EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
+ goto err;
+ /* Use saved AAD */
+ if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
+ goto err;
+ /* Fix buffer and length to point to payload */
+ in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
+ out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
+ len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
+ if (ctx->encrypt)
+ {
+ /* Encrypt payload */
+ if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
+ goto err;
+ out += len;
+ /* Finally write tag */
+ CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
+ rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
+ }
+ else
+ {
+ /* Decrypt */
+ if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
+ goto err;
+ /* Retrieve tag */
+ CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf,
+ EVP_GCM_TLS_TAG_LEN);
+ /* If tag mismatch wipe buffer */
+ if (memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN))
+ {
+ OPENSSL_cleanse(out, len);
+ goto err;
+ }
+ rv = len;
+ }
+
+ err:
+ gctx->iv_set = 0;
+ gctx->tls_aad_len = -1;
+ return rv;
+ }
+
static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t len)
{
EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
/* If not set up, return error */
- if (!gctx->iv_set && !gctx->key_set)
+ if (!gctx->key_set)
+ return -1;
+
+ if (gctx->tls_aad_len >= 0)
+ return aes_gcm_tls_cipher(ctx, out, in, len);
+
+ if (!gctx->iv_set)
return -1;
if (!ctx->encrypt && gctx->taglen < 0)
return -1;
@@ -908,9 +1007,12 @@ static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
| EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
| EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT)
-BLOCK_CIPHER_custom(NID_aes,128,1,12,gcm,GCM,EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
-BLOCK_CIPHER_custom(NID_aes,192,1,12,gcm,GCM,EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
-BLOCK_CIPHER_custom(NID_aes,256,1,12,gcm,GCM,EVP_CIPH_FLAG_FIPS|CUSTOM_FLAGS)
+BLOCK_CIPHER_custom(NID_aes,128,1,12,gcm,GCM,
+ EVP_CIPH_FLAG_FIPS|EVP_CIPH_FLAG_AEAD_CIPHER|CUSTOM_FLAGS)
+BLOCK_CIPHER_custom(NID_aes,192,1,12,gcm,GCM,
+ EVP_CIPH_FLAG_FIPS|EVP_CIPH_FLAG_AEAD_CIPHER|CUSTOM_FLAGS)
+BLOCK_CIPHER_custom(NID_aes,256,1,12,gcm,GCM,
+ EVP_CIPH_FLAG_FIPS|EVP_CIPH_FLAG_AEAD_CIPHER|CUSTOM_FLAGS)
static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
{
diff --git a/crypto/evp/evp.h b/crypto/evp/evp.h
index 157de07b04..d6cf616356 100644
--- a/crypto/evp/evp.h
+++ b/crypto/evp/evp.h
@@ -391,6 +391,16 @@ struct evp_cipher_st
#define EVP_CTRL_AEAD_TLS1_AAD 0x16
/* Used by composite AEAD ciphers, no-op in GCM, CCM... */
#define EVP_CTRL_AEAD_SET_MAC_KEY 0x17
+/* Set the GCM invocation field, decrypt only */
+#define EVP_CTRL_GCM_SET_IV_INV 0x18
+
+/* GCM TLS constants */
+/* Length of fixed part of IV derived from PRF */
+#define EVP_GCM_TLS_FIXED_IV_LEN 4
+/* Length of explicit part of IV part of TLS records */
+#define EVP_GCM_TLS_EXPLICIT_IV_LEN 8
+/* Length of tag for TLS */
+#define EVP_GCM_TLS_TAG_LEN 16
typedef struct evp_cipher_info_st
{