summaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2018-08-15 16:01:47 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2018-08-15 16:01:47 -0700
commitdafa5f6577a9eecd2941add553d1672c30b02364 (patch)
treeff9d3d2dffafd6eba1b6ac21ba50623812041b70 /crypto
parent9a76aba02a37718242d7cdc294f0a3901928aa57 (diff)
parent22240df7ac6d76a271197571a7be45addef2ba15 (diff)
Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu: "API: - Fix dcache flushing crash in skcipher. - Add hash finup self-tests. - Reschedule during speed tests. Algorithms: - Remove insecure vmac and replace it with vmac64. - Add public key verification for DH/ECDH. Drivers: - Decrease priority of sha-mb on x86. - Improve NEON latency/throughput on ARM64. - Add md5/sha384/sha512/des/3des to inside-secure. - Support eip197d in inside-secure. - Only register algorithms supported by the host in virtio. - Add cts and remove incompatible cts1 from ccree. - Add hisilicon SEC security accelerator driver. - Replace msm hwrng driver with qcom pseudo rng driver. Misc: - Centralize CRC polynomials" * 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (121 commits) crypto: arm64/ghash-ce - implement 4-way aggregation crypto: arm64/ghash-ce - replace NEON yield check with block limit crypto: hisilicon - sec_send_request() can be static lib/mpi: remove redundant variable esign crypto: arm64/aes-ce-gcm - don't reload key schedule if avoidable crypto: arm64/aes-ce-gcm - implement 2-way aggregation crypto: arm64/aes-ce-gcm - operate on two input blocks at a time crypto: dh - make crypto_dh_encode_key() make robust crypto: dh - fix calculating encoded key size crypto: ccp - Check for NULL PSP pointer at module unload crypto: arm/chacha20 - always use vrev for 16-bit rotates crypto: ccree - allow bigger than sector XTS op crypto: ccree - zero all of request ctx before use crypto: ccree - remove cipher ivgen left overs crypto: ccree - drop useless type flag during reg crypto: ablkcipher - fix crash flushing dcache in error path crypto: blkcipher - fix crash flushing dcache in error path crypto: skcipher - fix crash flushing dcache in error path crypto: skcipher - remove unnecessary setting of walk->nbytes crypto: scatterwalk - remove scatterwalk_samebuf() ...
Diffstat (limited to 'crypto')
-rw-r--r--crypto/ablkcipher.c59
-rw-r--r--crypto/aegis128.c1
-rw-r--r--crypto/aegis128l.c3
-rw-r--r--crypto/aegis256.c1
-rw-r--r--crypto/blkcipher.c55
-rw-r--r--crypto/crypto_null.c1
-rw-r--r--crypto/dh.c66
-rw-r--r--crypto/dh_helper.c43
-rw-r--r--crypto/drbg.c39
-rw-r--r--crypto/ecc.c42
-rw-r--r--crypto/ecc_curve_defs.h22
-rw-r--r--crypto/ghash-generic.c1
-rw-r--r--crypto/lrw.c4
-rw-r--r--crypto/md4.c1
-rw-r--r--crypto/md5.c1
-rw-r--r--crypto/morus1280.c1
-rw-r--r--crypto/morus640.c1
-rw-r--r--crypto/poly1305_generic.c1
-rw-r--r--crypto/rmd128.c1
-rw-r--r--crypto/rmd160.c1
-rw-r--r--crypto/rmd256.c11
-rw-r--r--crypto/rmd320.c13
-rw-r--r--crypto/scatterwalk.c2
-rw-r--r--crypto/sha1_generic.c2
-rw-r--r--crypto/sha256_generic.c4
-rw-r--r--crypto/sha3_generic.c4
-rw-r--r--crypto/sha512_generic.c26
-rw-r--r--crypto/skcipher.c57
-rw-r--r--crypto/sm3_generic.c1
-rw-r--r--crypto/tcrypt.c38
-rw-r--r--crypto/testmgr.c59
-rw-r--r--crypto/testmgr.h233
-rw-r--r--crypto/tgr192.c3
-rw-r--r--crypto/vmac.c444
-rw-r--r--crypto/wp512.c3
-rw-r--r--crypto/xts.c4
36 files changed, 722 insertions, 526 deletions
diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c
index d880a4897159..8882e90e868e 100644
--- a/crypto/ablkcipher.c
+++ b/crypto/ablkcipher.c
@@ -71,11 +71,9 @@ static inline u8 *ablkcipher_get_spot(u8 *start, unsigned int len)
return max(start, end_page);
}
-static inline unsigned int ablkcipher_done_slow(struct ablkcipher_walk *walk,
- unsigned int bsize)
+static inline void ablkcipher_done_slow(struct ablkcipher_walk *walk,
+ unsigned int n)
{
- unsigned int n = bsize;
-
for (;;) {
unsigned int len_this_page = scatterwalk_pagelen(&walk->out);
@@ -87,17 +85,13 @@ static inline unsigned int ablkcipher_done_slow(struct ablkcipher_walk *walk,
n -= len_this_page;
scatterwalk_start(&walk->out, sg_next(walk->out.sg));
}
-
- return bsize;
}
-static inline unsigned int ablkcipher_done_fast(struct ablkcipher_walk *walk,
- unsigned int n)
+static inline void ablkcipher_done_fast(struct ablkcipher_walk *walk,
+ unsigned int n)
{
scatterwalk_advance(&walk->in, n);
scatterwalk_advance(&walk->out, n);
-
- return n;
}
static int ablkcipher_walk_next(struct ablkcipher_request *req,
@@ -107,39 +101,40 @@ int ablkcipher_walk_done(struct ablkcipher_request *req,
struct ablkcipher_walk *walk, int err)
{
struct crypto_tfm *tfm = req->base.tfm;
- unsigned int nbytes = 0;
+ unsigned int n; /* bytes processed */
+ bool more;
- if (likely(err >= 0)) {
- unsigned int n = walk->nbytes - err;
+ if (unlikely(err < 0))
+ goto finish;
- if (likely(!(walk->flags & ABLKCIPHER_WALK_SLOW)))
- n = ablkcipher_done_fast(walk, n);
- else if (WARN_ON(err)) {
- err = -EINVAL;
- goto err;
- } else
- n = ablkcipher_done_slow(walk, n);
+ n = walk->nbytes - err;
+ walk->total -= n;
+ more = (walk->total != 0);
- nbytes = walk->total - n;
- err = 0;
+ if (likely(!(walk->flags & ABLKCIPHER_WALK_SLOW))) {
+ ablkcipher_done_fast(walk, n);
+ } else {
+ if (WARN_ON(err)) {
+ /* unexpected case; didn't process all bytes */
+ err = -EINVAL;
+ goto finish;
+ }
+ ablkcipher_done_slow(walk, n);
}
- scatterwalk_done(&walk->in, 0, nbytes);
- scatterwalk_done(&walk->out, 1, nbytes);
-
-err:
- walk->total = nbytes;
- walk->nbytes = nbytes;
+ scatterwalk_done(&walk->in, 0, more);
+ scatterwalk_done(&walk->out, 1, more);
- if (nbytes) {
+ if (more) {
crypto_yield(req->base.flags);
return ablkcipher_walk_next(req, walk);
}
-
+ err = 0;
+finish:
+ walk->nbytes = 0;
if (walk->iv != req->info)
memcpy(req->info, walk->iv, tfm->crt_ablkcipher.ivsize);
kfree(walk->iv_buffer);
-
return err;
}
EXPORT_SYMBOL_GPL(ablkcipher_walk_done);
@@ -373,6 +368,7 @@ static int crypto_ablkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
strncpy(rblkcipher.type, "ablkcipher", sizeof(rblkcipher.type));
strncpy(rblkcipher.geniv, alg->cra_ablkcipher.geniv ?: "<default>",
sizeof(rblkcipher.geniv));
+ rblkcipher.geniv[sizeof(rblkcipher.geniv) - 1] = '\0';
rblkcipher.blocksize = alg->cra_blocksize;
rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
@@ -447,6 +443,7 @@ static int crypto_givcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
strncpy(rblkcipher.type, "givcipher", sizeof(rblkcipher.type));
strncpy(rblkcipher.geniv, alg->cra_ablkcipher.geniv ?: "<built-in>",
sizeof(rblkcipher.geniv));
+ rblkcipher.geniv[sizeof(rblkcipher.geniv) - 1] = '\0';
rblkcipher.blocksize = alg->cra_blocksize;
rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
diff --git a/crypto/aegis128.c b/crypto/aegis128.c
index 38271303ce16..c22f4414856d 100644
--- a/crypto/aegis128.c
+++ b/crypto/aegis128.c
@@ -429,7 +429,6 @@ static struct aead_alg crypto_aegis128_alg = {
.chunksize = AEGIS_BLOCK_SIZE,
.base = {
- .cra_flags = CRYPTO_ALG_TYPE_AEAD,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct aegis_ctx),
.cra_alignmask = 0,
diff --git a/crypto/aegis128l.c b/crypto/aegis128l.c
index 0cc1a7525c85..b6fb21ebdc3e 100644
--- a/crypto/aegis128l.c
+++ b/crypto/aegis128l.c
@@ -121,7 +121,7 @@ static void crypto_aegis128l_ad(struct aegis_state *state,
(const union aegis_chunk *)src;
while (size >= AEGIS128L_CHUNK_SIZE) {
- crypto_aegis128l_update_a(state, src_chunk);
+ crypto_aegis128l_update_a(state, src_chunk);
size -= AEGIS128L_CHUNK_SIZE;
src_chunk += 1;
@@ -493,7 +493,6 @@ static struct aead_alg crypto_aegis128l_alg = {
.chunksize = AEGIS128L_CHUNK_SIZE,
.base = {
- .cra_flags = CRYPTO_ALG_TYPE_AEAD,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct aegis_ctx),
.cra_alignmask = 0,
diff --git a/crypto/aegis256.c b/crypto/aegis256.c
index a489d741d33a..11f0f8ec9c7c 100644
--- a/crypto/aegis256.c
+++ b/crypto/aegis256.c
@@ -444,7 +444,6 @@ static struct aead_alg crypto_aegis256_alg = {
.chunksize = AEGIS_BLOCK_SIZE,
.base = {
- .cra_flags = CRYPTO_ALG_TYPE_AEAD,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct aegis_ctx),
.cra_alignmask = 0,
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c
index 01c0d4aa2563..f93abf13b5d4 100644
--- a/crypto/blkcipher.c
+++ b/crypto/blkcipher.c
@@ -70,19 +70,18 @@ static inline u8 *blkcipher_get_spot(u8 *start, unsigned int len)
return max(start, end_page);
}
-static inline unsigned int blkcipher_done_slow(struct blkcipher_walk *walk,
- unsigned int bsize)
+static inline void blkcipher_done_slow(struct blkcipher_walk *walk,
+ unsigned int bsize)
{
u8 *addr;
addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1);
addr = blkcipher_get_spot(addr, bsize);
scatterwalk_copychunks(addr, &walk->out, bsize, 1);
- return bsize;
}
-static inline unsigned int blkcipher_done_fast(struct blkcipher_walk *walk,
- unsigned int n)
+static inline void blkcipher_done_fast(struct blkcipher_walk *walk,
+ unsigned int n)
{
if (walk->flags & BLKCIPHER_WALK_COPY) {
blkcipher_map_dst(walk);
@@ -96,49 +95,48 @@ static inline unsigned int blkcipher_done_fast(struct blkcipher_walk *walk,
scatterwalk_advance(&walk->in, n);
scatterwalk_advance(&walk->out, n);
-
- return n;
}
int blkcipher_walk_done(struct blkcipher_desc *desc,
struct blkcipher_walk *walk, int err)
{
- unsigned int nbytes = 0;
+ unsigned int n; /* bytes processed */
+ bool more;
- if (likely(err >= 0)) {
- unsigned int n = walk->nbytes - err;
+ if (unlikely(err < 0))
+ goto finish;
- if (likely(!(walk->flags & BLKCIPHER_WALK_SLOW)))
- n = blkcipher_done_fast(walk, n);
- else if (WARN_ON(err)) {
- err = -EINVAL;
- goto err;
- } else
- n = blkcipher_done_slow(walk, n);
+ n = walk->nbytes - err;
+ walk->total -= n;
+ more = (walk->total != 0);
- nbytes = walk->total - n;
- err = 0;
+ if (likely(!(walk->flags & BLKCIPHER_WALK_SLOW))) {
+ blkcipher_done_fast(walk, n);
+ } else {
+ if (WARN_ON(err)) {
+ /* unexpected case; didn't process all bytes */
+ err = -EINVAL;
+ goto finish;
+ }
+ blkcipher_done_slow(walk, n);
}
- scatterwalk_done(&walk->in, 0, nbytes);
- scatterwalk_done(&walk->out, 1, nbytes);
+ scatterwalk_done(&walk->in, 0, more);
+ scatterwalk_done(&walk->out, 1, more);
-err:
- walk->total = nbytes;
- walk->nbytes = nbytes;
-
- if (nbytes) {
+ if (more) {
crypto_yield(desc->flags);
return blkcipher_walk_next(desc, walk);
}
-
+ err = 0;
+finish:
+ walk->nbytes = 0;
if (walk->iv != desc->info)
memcpy(desc->info, walk->iv, walk->ivsize);
if (walk->buffer != walk->page)
kfree(walk->buffer);
if (walk->page)
free_page((unsigned long)walk->page);
-
return err;
}
EXPORT_SYMBOL_GPL(blkcipher_walk_done);
@@ -512,6 +510,7 @@ static int crypto_blkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
strncpy(rblkcipher.type, "blkcipher", sizeof(rblkcipher.type));
strncpy(rblkcipher.geniv, alg->cra_blkcipher.geniv ?: "<default>",
sizeof(rblkcipher.geniv));
+ rblkcipher.geniv[sizeof(rblkcipher.geniv) - 1] = '\0';
rblkcipher.blocksize = alg->cra_blocksize;
rblkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
diff --git a/crypto/crypto_null.c b/crypto/crypto_null.c
index 20ff2c746e0b..0959b268966c 100644
--- a/crypto/crypto_null.c
+++ b/crypto/crypto_null.c
@@ -104,7 +104,6 @@ static struct shash_alg digest_null = {
.final = null_final,
.base = {
.cra_name = "digest_null",
- .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.cra_blocksize = NULL_BLOCK_SIZE,
.cra_module = THIS_MODULE,
}
diff --git a/crypto/dh.c b/crypto/dh.c
index 5659fe7f446d..09a44de4209d 100644
--- a/crypto/dh.c
+++ b/crypto/dh.c
@@ -16,14 +16,16 @@
#include <linux/mpi.h>
struct dh_ctx {
- MPI p;
- MPI g;
- MPI xa;
+ MPI p; /* Value is guaranteed to be set. */
+ MPI q; /* Value is optional. */
+ MPI g; /* Value is guaranteed to be set. */
+ MPI xa; /* Value is guaranteed to be set. */
};
static void dh_clear_ctx(struct dh_ctx *ctx)
{
mpi_free(ctx->p);
+ mpi_free(ctx->q);
mpi_free(ctx->g);
mpi_free(ctx->xa);
memset(ctx, 0, sizeof(*ctx));
@@ -60,6 +62,12 @@ static int dh_set_params(struct dh_ctx *ctx, struct dh *params)
if (!ctx->p)
return -EINVAL;
+ if (params->q && params->q_size) {
+ ctx->q = mpi_read_raw_data(params->q, params->q_size);
+ if (!ctx->q)
+ return -EINVAL;
+ }
+
ctx->g = mpi_read_raw_data(params->g, params->g_size);
if (!ctx->g)
return -EINVAL;
@@ -93,6 +101,55 @@ err_clear_ctx:
return -EINVAL;
}
+/*
+ * SP800-56A public key verification:
+ *
+ * * If Q is provided as part of the domain paramenters, a full validation
+ * according to SP800-56A section 5.6.2.3.1 is performed.
+ *
+ * * If Q is not provided, a partial validation according to SP800-56A section
+ * 5.6.2.3.2 is performed.
+ */
+static int dh_is_pubkey_valid(struct dh_ctx *ctx, MPI y)
+{
+ if (unlikely(!ctx->p))
+ return -EINVAL;
+
+ /*
+ * Step 1: Verify that 2 <= y <= p - 2.
+ *
+ * The upper limit check is actually y < p instead of y < p - 1
+ * as the mpi_sub_ui function is yet missing.
+ */
+ if (mpi_cmp_ui(y, 1) < 1 || mpi_cmp(y, ctx->p) >= 0)
+ return -EINVAL;
+
+ /* Step 2: Verify that 1 = y^q mod p */
+ if (ctx->q) {
+ MPI val = mpi_alloc(0);
+ int ret;
+
+ if (!val)
+ return -ENOMEM;
+
+ ret = mpi_powm(val, y, ctx->q, ctx->p);
+
+ if (ret) {
+ mpi_free(val);
+ return ret;
+ }
+
+ ret = mpi_cmp_ui(val, 1);
+
+ mpi_free(val);
+
+ if (ret != 0)
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
static int dh_compute_value(struct kpp_request *req)
{
struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
@@ -115,6 +172,9 @@ static int dh_compute_value(struct kpp_request *req)
ret = -EINVAL;
goto err_free_val;
}
+ ret = dh_is_pubkey_valid(ctx, base);
+ if (ret)
+ goto err_free_base;
} else {
base = ctx->g;
}
diff --git a/crypto/dh_helper.c b/crypto/dh_helper.c
index 24fdb2ecaa85..edacda5f6a4d 100644
--- a/crypto/dh_helper.c
+++ b/crypto/dh_helper.c
@@ -14,10 +14,12 @@
#include <crypto/dh.h>
#include <crypto/kpp.h>
-#define DH_KPP_SECRET_MIN_SIZE (sizeof(struct kpp_secret) + 3 * sizeof(int))
+#define DH_KPP_SECRET_MIN_SIZE (sizeof(struct kpp_secret) + 4 * sizeof(int))
-static inline u8 *dh_pack_data(void *dst, const void *src, size_t size)
+static inline u8 *dh_pack_data(u8 *dst, u8 *end, const void *src, size_t size)
{
+ if (!dst || size > end - dst)
+ return NULL;
memcpy(dst, src, size);
return dst + size;
}
@@ -30,7 +32,7 @@ static inline const u8 *dh_unpack_data(void *dst, const void *src, size_t size)
static inline unsigned int dh_data_size(const struct dh *p)
{
- return p->key_size + p->p_size + p->g_size;
+ return p->key_size + p->p_size + p->q_size + p->g_size;
}
unsigned int crypto_dh_key_len(const struct dh *p)
@@ -42,25 +44,27 @@ EXPORT_SYMBOL_GPL(crypto_dh_key_len);
int crypto_dh_encode_key(char *buf, unsigned int len, const struct dh *params)
{
u8 *ptr = buf;
+ u8 * const end = ptr + len;
struct kpp_secret secret = {
.type = CRYPTO_KPP_SECRET_TYPE_DH,
.len = len
};
- if (unlikely(!buf))
+ if (unlikely(!len))
return -EINVAL;
- if (len != crypto_dh_key_len(params))
+ ptr = dh_pack_data(ptr, end, &secret, sizeof(secret));
+ ptr = dh_pack_data(ptr, end, &params->key_size,
+ sizeof(params->key_size));
+ ptr = dh_pack_data(ptr, end, &params->p_size, sizeof(params->p_size));
+ ptr = dh_pack_data(ptr, end, &params->q_size, sizeof(params->q_size));
+ ptr = dh_pack_data(ptr, end, &params->g_size, sizeof(params->g_size));
+ ptr = dh_pack_data(ptr, end, params->key, params->key_size);
+ ptr = dh_pack_data(ptr, end, params->p, params->p_size);
+ ptr = dh_pack_data(ptr, end, params->q, params->q_size);
+ ptr = dh_pack_data(ptr, end, params->g, params->g_size);
+ if (ptr != end)
return -EINVAL;
-
- ptr = dh_pack_data(ptr, &secret, sizeof(secret));
- ptr = dh_pack_data(ptr, &params->key_size, sizeof(params->key_size));
- ptr = dh_pack_data(ptr, &params->p_size, sizeof(params->p_size));
- ptr = dh_pack_data(ptr, &params->g_size, sizeof(params->g_size));
- ptr = dh_pack_data(ptr, params->key, params->key_size);
- ptr = dh_pack_data(ptr, params->p, params->p_size);
- dh_pack_data(ptr, params->g, params->g_size);
-
return 0;
}
EXPORT_SYMBOL_GPL(crypto_dh_encode_key);
@@ -79,6 +83,7 @@ int crypto_dh_decode_key(const char *buf, unsigned int len, struct dh *params)
ptr = dh_unpack_data(&params->key_size, ptr, sizeof(params->key_size));
ptr = dh_unpack_data(&params->p_size, ptr, sizeof(params->p_size));
+ ptr = dh_unpack_data(&params->q_size, ptr, sizeof(params->q_size));
ptr = dh_unpack_data(&params->g_size, ptr, sizeof(params->g_size));
if (secret.len != crypto_dh_key_len(params))
return -EINVAL;
@@ -88,7 +93,7 @@ int crypto_dh_decode_key(const char *buf, unsigned int len, struct dh *params)
* some drivers assume otherwise.
*/
if (params->key_size > params->p_size ||
- params->g_size > params->p_size)
+ params->g_size > params->p_size || params->q_size > params->p_size)
return -EINVAL;
/* Don't allocate memory. Set pointers to data within
@@ -96,7 +101,9 @@ int crypto_dh_decode_key(const char *buf, unsigned int len, struct dh *params)
*/
params->key = (void *)ptr;
params->p = (void *)(ptr + params->key_size);
- params->g = (void *)(ptr + params->key_size + params->p_size);
+ params->q = (void *)(ptr + params->key_size + params->p_size);
+ params->g = (void *)(ptr + params->key_size + params->p_size +
+ params->q_size);
/*
* Don't permit 'p' to be 0. It's not a prime number, and it's subject
@@ -106,6 +113,10 @@ int crypto_dh_decode_key(const char *buf, unsigned int len, struct dh *params)
if (memchr_inv(params->p, 0, params->p_size) == NULL)
return -EINVAL;
+ /* It is permissible to not provide Q. */
+ if (params->q_size == 0)
+ params->q = NULL;
+
return 0;
}
EXPORT_SYMBOL_GPL(crypto_dh_decode_key);
diff --git a/crypto/drbg.c b/crypto/drbg.c
index 466a112a4446..bc52d9562611 100644
--- a/crypto/drbg.c
+++ b/crypto/drbg.c
@@ -261,8 +261,7 @@ static int drbg_fini_sym_kernel(struct drbg_state *drbg);
static int drbg_kcapi_sym_ctr(struct drbg_state *drbg,
u8 *inbuf, u32 inbuflen,
u8 *outbuf, u32 outlen);
-#define DRBG_CTR_NULL_LEN 128
-#define DRBG_OUTSCRATCHLEN DRBG_CTR_NULL_LEN
+#define DRBG_OUTSCRATCHLEN 256
/* BCC function for CTR DRBG as defined in 10.4.3 */
static int drbg_ctr_bcc(struct drbg_state *drbg,
@@ -555,8 +554,7 @@ static int drbg_ctr_generate(struct drbg_state *drbg,
}
/* 10.2.1.5.2 step 4.1 */
- ret = drbg_kcapi_sym_ctr(drbg, drbg->ctr_null_value, DRBG_CTR_NULL_LEN,
- buf, len);
+ ret = drbg_kcapi_sym_ctr(drbg, NULL, 0, buf, len);
if (ret)
return ret;
@@ -1644,9 +1642,6 @@ static int drbg_fini_sym_kernel(struct drbg_state *drbg)
skcipher_request_free(drbg->ctr_req);
drbg->ctr_req = NULL;
- kfree(drbg->ctr_null_value_buf);
- drbg->ctr_null_value = NULL;
-
kfree(drbg->outscratchpadbuf);
drbg->outscratchpadbuf = NULL;
@@ -1697,15 +1692,6 @@ static int drbg_init_sym_kernel(struct drbg_state *drbg)
crypto_req_done, &drbg->ctr_wait);
alignmask = crypto_skcipher_alignmask(sk_tfm);
- drbg->ctr_null_value_buf = kzalloc(DRBG_CTR_NULL_LEN + alignmask,
- GFP_KERNEL);
- if (!drbg->ctr_null_value_buf) {
- drbg_fini_sym_kernel(drbg);
- return -ENOMEM;
- }
- drbg->ctr_null_value = (u8 *)PTR_ALIGN(drbg->ctr_null_value_buf,
- alignmask + 1);
-
drbg->outscratchpadbuf = kmalloc(DRBG_OUTSCRATCHLEN + alignmask,
GFP_KERNEL);
if (!drbg->outscratchpadbuf) {
@@ -1715,6 +1701,9 @@ static int drbg_init_sym_kernel(struct drbg_state *drbg)
drbg->outscratchpad = (u8 *)PTR_ALIGN(drbg->outscratchpadbuf,
alignmask + 1);
+ sg_init_table(&drbg->sg_in, 1);
+ sg_init_one(&drbg->sg_out, drbg->outscratchpad, DRBG_OUTSCRATCHLEN);
+
return alignmask;
}
@@ -1743,17 +1732,25 @@ static int drbg_kcapi_sym_ctr(struct drbg_state *drbg,
u8 *inbuf, u32 inlen,
u8 *outbuf, u32 outlen)
{
- struct scatterlist sg_in, sg_out;
+ struct scatterlist *sg_in = &drbg->sg_in, *sg_out = &drbg->sg_out;
+ u32 scratchpad_use = min_t(u32, outlen, DRBG_OUTSCRATCHLEN);
int ret;
- sg_init_one(&sg_in, inbuf, inlen);
- sg_init_one(&sg_out, drbg->outscratchpad, DRBG_OUTSCRATCHLEN);
+ if (inbuf) {
+ /* Use caller-provided input buffer */
+ sg_set_buf(sg_in, inbuf, inlen);
+ } else {
+ /* Use scratchpad for in-place operation */
+ inlen = scratchpad_use;
+ memset(drbg->outscratchpad, 0, scratchpad_use);
+ sg_set_buf(sg_in, drbg->outscratchpad, scratchpad_use);
+ }
while (outlen) {
u32 cryptlen = min3(inlen, outlen, (u32)DRBG_OUTSCRATCHLEN);
/* Output buffer may not be valid for SGL, use scratchpad */
- skcipher_request_set_crypt(drbg->ctr_req, &sg_in, &sg_out,
+ skcipher_request_set_crypt(drbg->ctr_req, sg_in, sg_out,
cryptlen, drbg->V);
ret = crypto_wait_req(crypto_skcipher_encrypt(drbg->ctr_req),
&drbg->ctr_wait);
@@ -1763,6 +1760,7 @@ static int drbg_kcapi_sym_ctr(struct drbg_state *drbg,
crypto_init_wait(&drbg->ctr_wait);
memcpy(outbuf, drbg->outscratchpad, cryptlen);
+ memzero_explicit(drbg->outscratchpad, cryptlen);
outlen -= cryptlen;
outbuf += cryptlen;
@@ -1770,7 +1768,6 @@ static int drbg_kcapi_sym_ctr(struct drbg_state *drbg,
ret = 0;
out:
- memzero_explicit(drbg->outscratchpad, DRBG_OUTSCRATCHLEN);
return ret;
}
#endif /* CONFIG_CRYPTO_DRBG_CTR */
diff --git a/crypto/ecc.c b/crypto/ecc.c
index 815541309a95..8facafd67802 100644
--- a/crypto/ecc.c
+++ b/crypto/ecc.c
@@ -1019,6 +1019,36 @@ out:
return ret;
}
+/* SP800-56A section 5.6.2.3.4 partial verification: ephemeral keys only */
+static int ecc_is_pubkey_valid_partial(const struct ecc_curve *curve,
+ struct ecc_point *pk)
+{
+ u64 yy[ECC_MAX_DIGITS], xxx[ECC_MAX_DIGITS], w[ECC_MAX_DIGITS];
+
+ /* Check 1: Verify key is not the zero point. */
+ if (ecc_point_is_zero(pk))
+ return -EINVAL;
+
+ /* Check 2: Verify key is in the range [1, p-1]. */
+ if (vli_cmp(curve->p, pk->x, pk->ndigits) != 1)
+ return -EINVAL;
+ if (vli_cmp(curve->p, pk->y, pk->ndigits) != 1)
+ return -EINVAL;
+
+ /* Check 3: Verify that y^2 == (x^3 + a·x + b) mod p */
+ vli_mod_square_fast(yy, pk->y, curve->p, pk->ndigits); /* y^2 */
+ vli_mod_square_fast(xxx, pk->x, curve->p, pk->ndigits); /* x^2 */
+ vli_mod_mult_fast(xxx, xxx, pk->x, curve->p, pk->ndigits); /* x^3 */
+ vli_mod_mult_fast(w, curve->a, pk->x, curve->p, pk->ndigits); /* a·x */
+ vli_mod_add(w, w, curve->b, curve->p, pk->ndigits); /* a·x + b */
+ vli_mod_add(w, w, xxx, curve->p, pk->ndigits); /* x^3 + a·x + b */
+ if (vli_cmp(yy, w, pk->ndigits) != 0) /* Equation */
+ return -EINVAL;
+
+ return 0;
+
+}
+
int crypto_ecdh_shared_secret(unsigned int curve_id, unsigned int ndigits,
const u64 *private_key, const u64 *public_key,
u64 *secret)
@@ -1046,16 +1076,20 @@ int crypto_ecdh_shared_secret(unsigned int curve_id, unsigned int ndigits,
goto out;
}
+ ecc_swap_digits(public_key, pk->x, ndigits);
+ ecc_swap_digits(&public_key[ndigits], pk->y, ndigits);
+ ret = ecc_is_pubkey_valid_partial(curve, pk);
+ if (ret)
+ goto err_alloc_product;
+
+ ecc_swap_digits(private_key, priv, ndigits);
+
product = ecc_alloc_point(ndigits);
if (!product) {
ret = -ENOMEM;
goto err_alloc_product;
}
- ecc_swap_digits(public_key, pk->x, ndigits);
- ecc_swap_digits(&public_key[ndigits], pk->y, ndigits);
- ecc_swap_digits(private_key, priv, ndigits);
-
ecc_point_mult(product, pk, priv, rand_z, curve->p, ndigits);
ecc_swap_digits(product->x, secret, ndigits);
diff --git a/crypto/ecc_curve_defs.h b/crypto/ecc_curve_defs.h
index b80f45da829c..336ab1805639 100644
--- a/crypto/ecc_curve_defs.h
+++ b/crypto/ecc_curve_defs.h
@@ -13,9 +13,11 @@ struct ecc_curve {
struct ecc_point g;
u64 *p;
u64 *n;
+ u64 *a;
+ u64 *b;
};
-/* NIST P-192 */
+/* NIST P-192: a = p - 3 */
static u64 nist_p192_g_x[] = { 0xF4FF0AFD82FF1012ull, 0x7CBF20EB43A18800ull,
0x188DA80EB03090F6ull };
static u64 nist_p192_g_y[] = { 0x73F977A11E794811ull, 0x631011ED6B24CDD5ull,
@@ -24,6 +26,10 @@ static u64 nist_p192_p[] = { 0xFFFFFFFFFFFFFFFFull, 0xFFFFFFFFFFFFFFFEull,
0xFFFFFFFFFFFFFFFFull };
static u64 nist_p192_n[] = { 0x146BC9B1B4D22831ull, 0xFFFFFFFF99DEF836ull,
0xFFFFFFFFFFFFFFFFull };
+static u64 nist_p192_a[] = { 0xFFFFFFFFFFFFFFFCull, 0xFFFFFFFFFFFFFFFEull,
+ 0xFFFFFFFFFFFFFFFFull };
+static u64 nist_p192_b[] = { 0xFEB8DEECC146B9B1ull, 0x0FA7E9AB72243049ull,
+ 0x64210519E59C80E7ull };
static struct ecc_curve nist_p192 = {
.name = "nist_192",
.g = {
@@ -32,10 +38,12 @@ static struct ecc_curve nist_p192 = {
.ndigits = 3,
},
.p = nist_p192_p,
- .n = nist_p192_n
+ .n = nist_p192_n,
+ .a = nist_p192_a,
+ .b = nist_p192_b
};
-/* NIST P-256 */
+/* NIST P-256: a = p - 3 */
static u64 nist_p256_g_x[] = { 0xF4A13945D898C296ull, 0x77037D812DEB33A0ull,
0xF8BCE6E563A440F2ull, 0x6B17D1F2E12C4247ull };
static u64 nist_p256_g_y[] = { 0xCBB6406837BF51F5ull, 0x2BCE33576B315ECEull,
@@ -44,6 +52,10 @@ static u64 nist_p256_p[] = { 0xFFFFFFFFFFFFFFFFull, 0x00000000FFFFFFFFull,
0x0000000000000000ull, 0xFFFFFFFF00000001ull };
static u64 nist_p256_n[] = { 0xF3B9CAC2FC632551ull, 0xBCE6FAADA7179E84ull,
0xFFFFFFFFFFFFFFFFull, 0xFFFFFFFF00000000ull };
+static u64 nist_p256_a[] = { 0xFFFFFFFFFFFFFFFCull, 0x00000000FFFFFFFFull,
+ 0x0000000000000000ull, 0xFFFFFFFF00000001ull };
+static u64 nist_p256_b[] = { 0x3BCE3C3E27D2604Bull, 0x651D06B0CC53B0F6ull,
+ 0xB3EBBD55769886BCull, 0x5AC635D8AA3A93E7ull };
static struct ecc_curve nist_p256 = {
.name = "nist_256",
.g = {
@@ -52,7 +64,9 @@ static struct ecc_curve nist_p256 = {
.ndigits = 4,
},
.p = nist_p256_p,
- .n = nist_p256_n
+ .n = nist_p256_n,
+ .a = nist_p256_a,
+ .b = nist_p256_b
};
#endif
diff --git a/crypto/ghash-generic.c b/crypto/ghash-generic.c
index 1bffb3f712dd..d9f192b953b2 100644
--- a/crypto/ghash-generic.c
+++ b/crypto/ghash-generic.c
@@ -132,7 +132,6 @@ static struct shash_alg ghash_alg = {