diff options
author | H.J. Lu <hongjiu.lu@intel.com> | 2020-01-31 04:17:26 -0800 |
---|---|---|
committer | Richard Levitte <levitte@openssl.org> | 2020-02-15 22:15:03 +0100 |
commit | 98ad3fe82bd3e7e7f929dd1fa4ef3915426002c0 (patch) | |
tree | 90305d8bdb22f19188d925136cdd25b2d13f9b03 /crypto/aes | |
parent | 07980622e28746245a83ad9d011b6a4a32a1c2e0 (diff) |
x86_64: Add endbranch at function entries for Intel CET
To support Intel CET, all indirect branch targets must start with
endbranch. Here is a patch to add endbranch to function entries
in x86_64 assembly codes which are indirect branch targets as
discovered by running openssl testsuite on Intel CET machine and
visual inspection.
Verified with
$ CC="gcc -Wl,-z,cet-report=error" ./Configure shared linux-x86_64 -fcf-protection
$ make
$ make test
and
$ CC="gcc -mx32 -Wl,-z,cet-report=error" ./Configure shared linux-x32 -fcf-protection
$ make
$ make test # <<< passed with https://github.com/openssl/openssl/pull/10988
Reviewed-by: Tomas Mraz <tmraz@fedoraproject.org>
Reviewed-by: Richard Levitte <levitte@openssl.org>
(Merged from https://github.com/openssl/openssl/pull/10982)
Diffstat (limited to 'crypto/aes')
-rwxr-xr-x | crypto/aes/asm/aes-x86_64.pl | 5 | ||||
-rw-r--r-- | crypto/aes/asm/aesni-x86_64.pl | 11 | ||||
-rw-r--r-- | crypto/aes/asm/bsaes-x86_64.pl | 2 | ||||
-rw-r--r-- | crypto/aes/asm/vpaes-x86_64.pl | 5 |
4 files changed, 23 insertions, 0 deletions
diff --git a/crypto/aes/asm/aes-x86_64.pl b/crypto/aes/asm/aes-x86_64.pl index 813817ed46..4e417a516b 100755 --- a/crypto/aes/asm/aes-x86_64.pl +++ b/crypto/aes/asm/aes-x86_64.pl @@ -606,6 +606,7 @@ $code.=<<___; asm_AES_encrypt: AES_encrypt: .cfi_startproc + endbranch mov %rsp,%rax .cfi_def_cfa_register %rax push %rbx @@ -1226,6 +1227,7 @@ $code.=<<___; asm_AES_decrypt: AES_decrypt: .cfi_startproc + endbranch mov %rsp,%rax .cfi_def_cfa_register %rax push %rbx @@ -1343,6 +1345,7 @@ $code.=<<___; .align 16 AES_set_encrypt_key: .cfi_startproc + endbranch push %rbx .cfi_push %rbx push %rbp @@ -1623,6 +1626,7 @@ $code.=<<___; .align 16 AES_set_decrypt_key: .cfi_startproc + endbranch push %rbx .cfi_push %rbx push %rbp @@ -1737,6 +1741,7 @@ $code.=<<___; asm_AES_cbc_encrypt: AES_cbc_encrypt: .cfi_startproc + endbranch cmp \$0,%rdx # check length je .Lcbc_epilogue pushfq diff --git a/crypto/aes/asm/aesni-x86_64.pl b/crypto/aes/asm/aesni-x86_64.pl index 79d50da713..7b2ad2866b 100644 --- a/crypto/aes/asm/aesni-x86_64.pl +++ b/crypto/aes/asm/aesni-x86_64.pl @@ -277,6 +277,7 @@ $code.=<<___; .align 16 ${PREFIX}_encrypt: .cfi_startproc + endbranch movups ($inp),$inout0 # load input mov 240($key),$rounds # key->rounds ___ @@ -295,6 +296,7 @@ $code.=<<___; .align 16 ${PREFIX}_decrypt: .cfi_startproc + endbranch movups ($inp),$inout0 # load input mov 240($key),$rounds # key->rounds ___ @@ -615,6 +617,7 @@ $code.=<<___; .align 16 aesni_ecb_encrypt: .cfi_startproc + endbranch ___ $code.=<<___ if ($win64); lea -0x58(%rsp),%rsp @@ -987,6 +990,7 @@ $code.=<<___; .align 16 aesni_ccm64_encrypt_blocks: .cfi_startproc + endbranch ___ $code.=<<___ if ($win64); lea -0x58(%rsp),%rsp @@ -1079,6 +1083,7 @@ $code.=<<___; .align 16 aesni_ccm64_decrypt_blocks: .cfi_startproc + endbranch ___ $code.=<<___ if ($win64); lea -0x58(%rsp),%rsp @@ -1205,6 +1210,7 @@ $code.=<<___; .align 16 aesni_ctr32_encrypt_blocks: .cfi_startproc + endbranch cmp \$1,$len jne .Lctr32_bulk @@ -1777,6 +1783,7 @@ $code.=<<___; .align 16 aesni_xts_encrypt: .cfi_startproc + endbranch lea (%rsp),%r11 # frame pointer .cfi_def_cfa_register %r11 push %rbp @@ -2260,6 +2267,7 @@ $code.=<<___; .align 16 aesni_xts_decrypt: .cfi_startproc + endbranch lea (%rsp),%r11 # frame pointer .cfi_def_cfa_register %r11 push %rbp @@ -2785,6 +2793,7 @@ $code.=<<___; .align 32 aesni_ocb_encrypt: .cfi_startproc + endbranch lea (%rsp),%rax push %rbx .cfi_push %rbx @@ -3251,6 +3260,7 @@ __ocb_encrypt1: .align 32 aesni_ocb_decrypt: .cfi_startproc + endbranch lea (%rsp),%rax push %rbx .cfi_push %rbx @@ -3739,6 +3749,7 @@ $code.=<<___; .align 16 ${PREFIX}_cbc_encrypt: .cfi_startproc + endbranch test $len,$len # check length jz .Lcbc_ret diff --git a/crypto/aes/asm/bsaes-x86_64.pl b/crypto/aes/asm/bsaes-x86_64.pl index b75d95ffab..2e46802dfe 100644 --- a/crypto/aes/asm/bsaes-x86_64.pl +++ b/crypto/aes/asm/bsaes-x86_64.pl @@ -1616,6 +1616,7 @@ $code.=<<___; .align 16 bsaes_cbc_encrypt: .cfi_startproc + endbranch ___ $code.=<<___ if ($win64); mov 48(%rsp),$arg6 # pull direction flag @@ -1921,6 +1922,7 @@ $code.=<<___; .align 16 bsaes_ctr32_encrypt_blocks: .cfi_startproc + endbranch mov %rsp, %rax .Lctr_enc_prologue: push %rbp diff --git a/crypto/aes/asm/vpaes-x86_64.pl b/crypto/aes/asm/vpaes-x86_64.pl index 5c1dc9f9ea..121370658c 100644 --- a/crypto/aes/asm/vpaes-x86_64.pl +++ b/crypto/aes/asm/vpaes-x86_64.pl @@ -698,6 +698,7 @@ _vpaes_schedule_mangle: .align 16 ${PREFIX}_set_encrypt_key: .cfi_startproc + endbranch ___ $code.=<<___ if ($win64); lea -0xb8(%rsp),%rsp @@ -748,6 +749,7 @@ $code.=<<___; .align 16 ${PREFIX}_set_decrypt_key: .cfi_startproc + endbranch ___ $code.=<<___ if ($win64); lea -0xb8(%rsp),%rsp @@ -803,6 +805,7 @@ $code.=<<___; .align 16 ${PREFIX}_encrypt: .cfi_startproc + endbranch ___ $code.=<<___ if ($win64); lea -0xb8(%rsp),%rsp @@ -848,6 +851,7 @@ $code.=<<___; .align 16 ${PREFIX}_decrypt: .cfi_startproc + endbranch ___ $code.=<<___ if ($win64); lea -0xb8(%rsp),%rsp @@ -899,6 +903,7 @@ $code.=<<___; .align 16 ${PREFIX}_cbc_encrypt: .cfi_startproc + endbranch xchg $key,$len ___ ($len,$key)=($key,$len); |