summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDavid Benjamin <davidben@google.com>2019-01-29 04:39:17 +0000
committerBernd Edlinger <bernd.edlinger@hotmail.de>2019-04-14 12:55:53 +0200
commit15972296af6b98ae495ada9d4695f2a0e71f891c (patch)
tree086f1957ddc2d527c43de5fe8670fbae23821dfa
parent3051bf2afab7ac8b7b9c64e68755d1addd2fb8ff (diff)
Fix calling convention bug in ecp_nistz256_ord_sqr_mont
The rep parameter takes an int in C, but the assembly implementation looks at the upper bits. While it's unlikely to happen here, where all calls pass a constant, in other scenarios x86_64 compilers will leave arbitrary values in the upper half. Fix this by making the C prototype match the assembly. (This aspect of the calling convention implies smaller-than-word arguments in assembly functions should be avoided. There are far fewer things to test if everything consistently takes word-sized arguments.) This was found as part of ABI testing work in BoringSSL. Reviewed-by: Paul Dale <paul.dale@oracle.com> Reviewed-by: Bernd Edlinger <bernd.edlinger@hotmail.de> (Merged from https://github.com/openssl/openssl/pull/8108)
-rw-r--r--crypto/ec/asm/ecp_nistz256-armv8.pl2
-rwxr-xr-xcrypto/ec/asm/ecp_nistz256-ppc64.pl2
-rwxr-xr-xcrypto/ec/asm/ecp_nistz256-x86_64.pl2
-rw-r--r--crypto/ec/ecp_nistz256.c2
4 files changed, 4 insertions, 4 deletions
diff --git a/crypto/ec/asm/ecp_nistz256-armv8.pl b/crypto/ec/asm/ecp_nistz256-armv8.pl
index 8914f1a619..4daa8cc026 100644
--- a/crypto/ec/asm/ecp_nistz256-armv8.pl
+++ b/crypto/ec/asm/ecp_nistz256-armv8.pl
@@ -1488,7 +1488,7 @@ $code.=<<___;
////////////////////////////////////////////////////////////////////////
// void ecp_nistz256_ord_sqr_mont(uint64_t res[4], uint64_t a[4],
-// int rep);
+// uint64_t rep);
.globl ecp_nistz256_ord_sqr_mont
.type ecp_nistz256_ord_sqr_mont,%function
.align 4
diff --git a/crypto/ec/asm/ecp_nistz256-ppc64.pl b/crypto/ec/asm/ecp_nistz256-ppc64.pl
index b1cd190c15..c06a7c0d02 100755
--- a/crypto/ec/asm/ecp_nistz256-ppc64.pl
+++ b/crypto/ec/asm/ecp_nistz256-ppc64.pl
@@ -1919,7 +1919,7 @@ $code.=<<___;
################################################################################
# void ecp_nistz256_ord_sqr_mont(uint64_t res[4], uint64_t a[4],
-# int rep);
+# uint64_t rep);
.globl ecp_nistz256_ord_sqr_mont
.align 5
ecp_nistz256_ord_sqr_mont:
diff --git a/crypto/ec/asm/ecp_nistz256-x86_64.pl b/crypto/ec/asm/ecp_nistz256-x86_64.pl
index a28ee8e947..e1e23ca90a 100755
--- a/crypto/ec/asm/ecp_nistz256-x86_64.pl
+++ b/crypto/ec/asm/ecp_nistz256-x86_64.pl
@@ -826,7 +826,7 @@ $code.=<<___;
# void ecp_nistz256_ord_sqr_mont(
# uint64_t res[4],
# uint64_t a[4],
-# int rep);
+# uint64_t rep);
.globl ecp_nistz256_ord_sqr_mont
.type ecp_nistz256_ord_sqr_mont,\@function,3
diff --git a/crypto/ec/ecp_nistz256.c b/crypto/ec/ecp_nistz256.c
index 6a64bc4f2a..66bf4ecb1e 100644
--- a/crypto/ec/ecp_nistz256.c
+++ b/crypto/ec/ecp_nistz256.c
@@ -1467,7 +1467,7 @@ void ecp_nistz256_ord_mul_mont(BN_ULONG res[P256_LIMBS],
const BN_ULONG b[P256_LIMBS]);
void ecp_nistz256_ord_sqr_mont(BN_ULONG res[P256_LIMBS],
const BN_ULONG a[P256_LIMBS],
- int rep);
+ BN_ULONG rep);
static int ecp_nistz256_inv_mod_ord(const EC_GROUP *group, BIGNUM *r,
const BIGNUM *x, BN_CTX *ctx)