diff options
author | Daniel Hu <Daniel.Hu@arm.com> | 2022-02-14 14:36:34 +0000 |
---|---|---|
committer | Tomas Mraz <tomas@openssl.org> | 2022-04-12 10:37:42 +0200 |
commit | 4908787f21f4f5fa24b721ed3ebbc4d3e93ef70c (patch) | |
tree | ff233074dbc689698d6c458f9475edca6cdec723 /providers | |
parent | 40fb5a4ce3e90c9e8702aad0fcf43eb9f6edf419 (diff) |
SM4 optimization for ARM by ASIMD
This patch optimizes SM4 for ARM processor using ASIMD instruction
It will improve performance if both of following conditions are met:
1) Input data equal to or more than 4 blocks
2) Cipher mode allows parallelism, including ECB,CTR,GCM or CBC decryption
This patch implements SM4 SBOX lookup in vector registers, with the
benefit of constant processing time over existing C implementation.
It is only enabled for micro-architecture N1/V1. In the ideal scenario,
performance can reach up to 2.7X
When either of above two conditions is not met, e.g. single block input
or CFB/OFB mode, CBC encryption, performance could drop about 50%.
The assembly code has been reviewed internally by ARM engineer
Fangming.Fang@arm.com
Signed-off-by: Daniel Hu <Daniel.Hu@arm.com>
Reviewed-by: Paul Dale <pauli@openssl.org>
Reviewed-by: Tomas Mraz <tomas@openssl.org>
(Merged from https://github.com/openssl/openssl/pull/17951)
Diffstat (limited to 'providers')
-rw-r--r-- | providers/implementations/ciphers/cipher_sm4_gcm_hw.c | 7 | ||||
-rw-r--r-- | providers/implementations/ciphers/cipher_sm4_hw.c | 24 |
2 files changed, 31 insertions, 0 deletions
diff --git a/providers/implementations/ciphers/cipher_sm4_gcm_hw.c b/providers/implementations/ciphers/cipher_sm4_gcm_hw.c index b9633f83ed..db7fe0fe2f 100644 --- a/providers/implementations/ciphers/cipher_sm4_gcm_hw.c +++ b/providers/implementations/ciphers/cipher_sm4_gcm_hw.c @@ -32,6 +32,13 @@ static int sm4_gcm_initkey(PROV_GCM_CTX *ctx, const unsigned char *key, # endif } else # endif /* HWSM4_CAPABLE */ +# ifdef VPSM4_CAPABLE + if (VPSM4_CAPABLE) { + vpsm4_set_encrypt_key(key, ks); + CRYPTO_gcm128_init(&ctx->gcm, ks, (block128_f) vpsm4_encrypt); + ctx->ctr = (ctr128_f) vpsm4_ctr32_encrypt_blocks; + } else +# endif /* VPSM4_CAPABLE */ { ossl_sm4_set_key(key, ks); CRYPTO_gcm128_init(&ctx->gcm, ks, (block128_f)ossl_sm4_encrypt); diff --git a/providers/implementations/ciphers/cipher_sm4_hw.c b/providers/implementations/ciphers/cipher_sm4_hw.c index 4cd3d3d669..9a2e99f67c 100644 --- a/providers/implementations/ciphers/cipher_sm4_hw.c +++ b/providers/implementations/ciphers/cipher_sm4_hw.c @@ -42,6 +42,19 @@ static int cipher_hw_sm4_initkey(PROV_CIPHER_CTX *ctx, (void)0; /* terminate potentially open 'else' */ } else #endif +#ifdef VPSM4_CAPABLE + if (VPSM4_CAPABLE) { + vpsm4_set_encrypt_key(key, ks); + ctx->block = (block128_f)vpsm4_encrypt; + ctx->stream.cbc = NULL; + if (ctx->mode == EVP_CIPH_CBC_MODE) + ctx->stream.cbc = (cbc128_f)vpsm4_cbc_encrypt; + else if (ctx->mode == EVP_CIPH_ECB_MODE) + ctx->stream.ecb = (ecb128_f)vpsm4_ecb_encrypt; + else if (ctx->mode == EVP_CIPH_CTR_MODE) + ctx->stream.ctr = (ctr128_f)vpsm4_ctr32_encrypt_blocks; + } else +#endif { ossl_sm4_set_key(key, ks); ctx->block = (block128_f)ossl_sm4_encrypt; @@ -62,6 +75,17 @@ static int cipher_hw_sm4_initkey(PROV_CIPHER_CTX *ctx, #endif } else #endif +#ifdef VPSM4_CAPABLE + if (VPSM4_CAPABLE) { + vpsm4_set_decrypt_key(key, ks); + ctx->block = (block128_f)vpsm4_decrypt; + ctx->stream.cbc = NULL; + if (ctx->mode == EVP_CIPH_CBC_MODE) + ctx->stream.cbc = (cbc128_f)vpsm4_cbc_encrypt; + else if (ctx->mode == EVP_CIPH_ECB_MODE) + ctx->stream.ecb = (ecb128_f)vpsm4_ecb_encrypt; + } else +#endif { ossl_sm4_set_key(key, ks); ctx->block = (block128_f)ossl_sm4_decrypt; |