summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Documentation/crypto/api-intro.txt41
-rw-r--r--arch/s390/crypto/aes_s390.c227
-rw-r--r--arch/x86/crypto/Makefile12
-rw-r--r--arch/x86/crypto/aes-i586-asm_32.S89
-rw-r--r--arch/x86/crypto/aes-x86_64-asm_64.S68
-rw-r--r--arch/x86/crypto/aes_32.c515
-rw-r--r--arch/x86/crypto/aes_64.c336
-rw-r--r--arch/x86/crypto/aes_glue.c57
-rw-r--r--arch/x86/crypto/salsa20-i586-asm_32.S1114
-rw-r--r--arch/x86/crypto/salsa20-x86_64-asm_64.S920
-rw-r--r--arch/x86/crypto/salsa20_glue.c129
-rw-r--r--arch/x86/crypto/twofish_64.c97
-rw-r--r--arch/x86/crypto/twofish_glue.c (renamed from arch/x86/crypto/twofish_32.c)8
-rw-r--r--crypto/Kconfig97
-rw-r--r--crypto/Makefile14
-rw-r--r--crypto/ablkcipher.c241
-rw-r--r--crypto/aead.c400
-rw-r--r--crypto/aes_generic.c468
-rw-r--r--crypto/algapi.c65
-rw-r--r--crypto/api.c19
-rw-r--r--crypto/authenc.c334
-rw-r--r--crypto/blkcipher.c202
-rw-r--r--crypto/camellia.c1781
-rw-r--r--crypto/cast6.c6
-rw-r--r--crypto/cbc.c109
-rw-r--r--crypto/ccm.c889
-rw-r--r--crypto/chainiv.c331
-rw-r--r--crypto/cryptd.c6
-rw-r--r--crypto/crypto_null.c70
-rw-r--r--crypto/ctr.c422
-rw-r--r--crypto/des_generic.c17
-rw-r--r--crypto/digest.c4
-rw-r--r--crypto/eseqiv.c264
-rw-r--r--crypto/gcm.c823
-rw-r--r--crypto/hmac.c3
-rw-r--r--crypto/internal.h31
-rw-r--r--crypto/lzo.c106
-rw-r--r--crypto/pcbc.c105
-rw-r--r--crypto/salsa20_generic.c255
-rw-r--r--crypto/scatterwalk.c10
-rw-r--r--crypto/seqiv.c345
-rw-r--r--crypto/sha256_generic.c72
-rw-r--r--crypto/tcrypt.c449
-rw-r--r--crypto/tcrypt.h3415
-rw-r--r--crypto/twofish_common.c96
-rw-r--r--crypto/xcbc.c10
-rw-r--r--drivers/char/hw_random/amd-rng.c12
-rw-r--r--drivers/char/hw_random/core.c24
-rw-r--r--drivers/char/hw_random/geode-rng.c12
-rw-r--r--drivers/char/hw_random/intel-rng.c15
-rw-r--r--drivers/char/hw_random/omap-rng.c13
-rw-r--r--drivers/char/hw_random/pasemi-rng.c16
-rw-r--r--drivers/char/hw_random/via-rng.c19
-rw-r--r--drivers/crypto/Kconfig11
-rw-r--r--drivers/crypto/Makefile1
-rw-r--r--drivers/crypto/geode-aes.c298
-rw-r--r--drivers/crypto/geode-aes.h44
-rw-r--r--drivers/crypto/hifn_795x.c2838
-rw-r--r--drivers/crypto/padlock-aes.c24
-rw-r--r--include/crypto/aead.h105
-rw-r--r--include/crypto/aes.h31
-rw-r--r--include/crypto/algapi.h31
-rw-r--r--include/crypto/authenc.h27
-rw-r--r--include/crypto/ctr.h20
-rw-r--r--include/crypto/des.h19
-rw-r--r--include/crypto/internal/aead.h80
-rw-r--r--include/crypto/internal/skcipher.h110
-rw-r--r--include/crypto/scatterwalk.h (renamed from crypto/scatterwalk.h)45
-rw-r--r--include/crypto/sha.h12
-rw-r--r--include/crypto/skcipher.h110
-rw-r--r--include/linux/crypto.h103
-rw-r--r--include/linux/hw_random.h2
72 files changed, 15840 insertions, 3254 deletions
diff --git a/Documentation/crypto/api-intro.txt b/Documentation/crypto/api-intro.txt
index a2ac6d294793..8b49302712a8 100644
--- a/Documentation/crypto/api-intro.txt
+++ b/Documentation/crypto/api-intro.txt
@@ -33,9 +33,16 @@ The idea is to make the user interface and algorithm registration API
very simple, while hiding the core logic from both. Many good ideas
from existing APIs such as Cryptoapi and Nettle have been adapted for this.
-The API currently supports three types of transforms: Ciphers, Digests and
-Compressors. The compression algorithms especially seem to be performing
-very well so far.
+The API currently supports five main types of transforms: AEAD (Authenticated
+Encryption with Associated Data), Block Ciphers, Ciphers, Compressors and
+Hashes.
+
+Please note that Block Ciphers is somewhat of a misnomer. It is in fact
+meant to support all ciphers including stream ciphers. The difference
+between Block Ciphers and Ciphers is that the latter operates on exactly
+one block while the former can operate on an arbitrary amount of data,
+subject to block size requirements (i.e., non-stream ciphers can only
+process multiples of blocks).
Support for hardware crypto devices via an asynchronous interface is
under development.
@@ -69,29 +76,12 @@ Here's an example of how to use the API:
Many real examples are available in the regression test module (tcrypt.c).
-CONFIGURATION NOTES
-
-As Triple DES is part of the DES module, for those using modular builds,
-add the following line to /etc/modprobe.conf:
-
- alias des3_ede des
-
-The Null algorithms reside in the crypto_null module, so these lines
-should also be added:
-
- alias cipher_null crypto_null
- alias digest_null crypto_null
- alias compress_null crypto_null
-
-The SHA384 algorithm shares code within the SHA512 module, so you'll
-also need:
- alias sha384 sha512
-
-
DEVELOPER NOTES
Transforms may only be allocated in user context, and cryptographic
-methods may only be called from softirq and user contexts.
+methods may only be called from softirq and user contexts. For
+transforms with a setkey method it too should only be called from
+user context.
When using the API for ciphers, performance will be optimal if each
scatterlist contains data which is a multiple of the cipher's block
@@ -130,8 +120,9 @@ might already be working on.
BUGS
Send bug reports to:
-Herbert Xu <herbert@gondor.apana.org.au>
-Cc: David S. Miller <davem@redhat.com>
+linux-crypto@vger.kernel.org
+Cc: Herbert Xu <herbert@gondor.apana.org.au>,
+ David S. Miller <davem@redhat.com>
FURTHER INFORMATION
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c
index 512669691ad0..46c97058ebe1 100644
--- a/arch/s390/crypto/aes_s390.c
+++ b/arch/s390/crypto/aes_s390.c
@@ -6,6 +6,7 @@
* s390 Version:
* Copyright IBM Corp. 2005,2007
* Author(s): Jan Glauber (jang@de.ibm.com)
+ * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
*
* Derived from "crypto/aes_generic.c"
*
@@ -16,17 +17,13 @@
*
*/
+#include <crypto/aes.h>
#include <crypto/algapi.h>
+#include <linux/err.h>
#include <linux/module.h>
#include <linux/init.h>
#include "crypt_s390.h"
-#define AES_MIN_KEY_SIZE 16
-#define AES_MAX_KEY_SIZE 32
-
-/* data block size for all key lengths */
-#define AES_BLOCK_SIZE 16
-
#define AES_KEYLEN_128 1
#define AES_KEYLEN_192 2
#define AES_KEYLEN_256 4
@@ -39,45 +36,89 @@ struct s390_aes_ctx {
long enc;
long dec;
int key_len;
+ union {
+ struct crypto_blkcipher *blk;
+ struct crypto_cipher *cip;
+ } fallback;
};
-static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
- unsigned int key_len)
+/*
+ * Check if the key_len is supported by the HW.
+ * Returns 0 if it is, a positive number if it is not and software fallback is
+ * required or a negative number in case the key size is not valid
+ */
+static int need_fallback(unsigned int key_len)
{
- struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
- u32 *flags = &tfm->crt_flags;
-
switch (key_len) {
case 16:
if (!(keylen_flag & AES_KEYLEN_128))
- goto fail;
+ return 1;
break;
case 24:
if (!(keylen_flag & AES_KEYLEN_192))
- goto fail;
-
+ return 1;
break;
case 32:
if (!(keylen_flag & AES_KEYLEN_256))
- goto fail;
+ return 1;
break;
default:
- goto fail;
+ return -1;
break;
}
+ return 0;
+}
+
+static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
+ unsigned int key_len)
+{
+ struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+ int ret;
+
+ sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
+ sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
+ CRYPTO_TFM_REQ_MASK);
+
+ ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
+ if (ret) {
+ tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
+ tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
+ CRYPTO_TFM_RES_MASK);
+ }
+ return ret;
+}
+
+static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
+ unsigned int key_len)
+{
+ struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+ u32 *flags = &tfm->crt_flags;
+ int ret;
+
+ ret = need_fallback(key_len);
+ if (ret < 0) {
+ *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
+ return -EINVAL;
+ }
sctx->key_len = key_len;
- memcpy(sctx->key, in_key, key_len);
- return 0;
-fail:
- *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
- return -EINVAL;
+ if (!ret) {
+ memcpy(sctx->key, in_key, key_len);
+ return 0;
+ }
+
+ return setkey_fallback_cip(tfm, in_key, key_len);
}
static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
{
const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+ if (unlikely(need_fallback(sctx->key_len))) {
+ crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
+ return;
+ }
+
switch (sctx->key_len) {
case 16:
crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
@@ -98,6 +139,11 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
{
const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+ if (unlikely(need_fallback(sctx->key_len))) {
+ crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
+ return;
+ }
+
switch (sctx->key_len) {
case 16:
crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
@@ -114,6 +160,29 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
}
}
+static int fallback_init_cip(struct crypto_tfm *tfm)
+{
+ const char *name = tfm->__crt_alg->cra_name;
+ struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+
+ sctx->fallback.cip = crypto_alloc_cipher(name, 0,
+ CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
+
+ if (IS_ERR(sctx->fallback.cip)) {
+ printk(KERN_ERR "Error allocating fallback algo %s\n", name);
+ return PTR_ERR(sctx->fallback.blk);
+ }
+
+ return 0;
+}
+
+static void fallback_exit_cip(struct crypto_tfm *tfm)
+{
+ struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+
+ crypto_free_cipher(sctx->fallback.cip);
+ sctx->fallback.cip = NULL;
+}
static struct crypto_alg aes_alg = {
.cra_name = "aes",
@@ -125,6 +194,8 @@ static struct crypto_alg aes_alg = {
.cra_ctxsize = sizeof(struct s390_aes_ctx),
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
+ .cra_init = fallback_init_cip,
+ .cra_exit = fallback_exit_cip,
.cra_u = {
.cipher = {
.cia_min_keysize = AES_MIN_KEY_SIZE,
@@ -136,10 +207,70 @@ static struct crypto_alg aes_alg = {
}
};
+static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
+ unsigned int len)
+{
+ struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+ unsigned int ret;
+
+ sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
+ sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
+ CRYPTO_TFM_REQ_MASK);
+
+ ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
+ if (ret) {
+ tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
+ tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
+ CRYPTO_TFM_RES_MASK);
+ }
+ return ret;
+}
+
+static int fallback_blk_dec(struct blkcipher_desc *desc,
+ struct scatterlist *dst, struct scatterlist *src,
+ unsigned int nbytes)
+{
+ unsigned int ret;
+ struct crypto_blkcipher *tfm;
+ struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
+
+ tfm = desc->tfm;
+ desc->tfm = sctx->fallback.blk;
+
+ ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
+
+ desc->tfm = tfm;
+ return ret;
+}
+
+static int fallback_blk_enc(struct blkcipher_desc *desc,
+ struct scatterlist *dst, struct scatterlist *src,
+ unsigned int nbytes)
+{
+ unsigned int ret;
+ struct crypto_blkcipher *tfm;
+ struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
+
+ tfm = desc->tfm;
+ desc->tfm = sctx->fallback.blk;
+
+ ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
+
+ desc->tfm = tfm;
+ return ret;
+}
+
static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
unsigned int key_len)
{
struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+ int ret;
+
+ ret = need_fallback(key_len);
+ if (ret > 0) {
+ sctx->key_len = key_len;
+ return setkey_fallback_blk(tfm, in_key, key_len);
+ }
switch (key_len) {
case 16:
@@ -188,6 +319,9 @@ static int ecb_aes_encrypt(struct blkcipher_desc *desc,
struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
struct blkcipher_walk walk;
+ if (unlikely(need_fallback(sctx->key_len)))
+ return fallback_blk_enc(desc, dst, src, nbytes);
+
blkcipher_walk_init(&walk, dst, src, nbytes);
return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
}
@@ -199,10 +333,37 @@ static int ecb_aes_decrypt(struct blkcipher_desc *desc,
struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
struct blkcipher_walk walk;
+ if (unlikely(need_fallback(sctx->key_len)))
+ return fallback_blk_dec(desc, dst, src, nbytes);
+
blkcipher_walk_init(&walk, dst, src, nbytes);
return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
}
+static int fallback_init_blk(struct crypto_tfm *tfm)
+{
+ const char *name = tfm->__crt_alg->cra_name;
+ struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+
+ sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
+ CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
+
+ if (IS_ERR(sctx->fallback.blk)) {
+ printk(KERN_ERR "Error allocating fallback algo %s\n", name);
+ return PTR_ERR(sctx->fallback.blk);
+ }
+
+ return 0;
+}
+
+static void fallback_exit_blk(struct crypto_tfm *tfm)
+{
+ struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+
+ crypto_free_blkcipher(sctx->fallback.blk);
+ sctx->fallback.blk = NULL;
+}
+
static struct crypto_alg ecb_aes_alg = {
.cra_name = "ecb(aes)",
.cra_driver_name = "ecb-aes-s390",
@@ -214,6 +375,8 @@ static struct crypto_alg ecb_aes_alg = {
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
+ .cra_init = fallback_init_blk,
+ .cra_exit = fallback_exit_blk,
.cra_u = {
.blkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
@@ -229,6 +392,13 @@ static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
unsigned int key_len)
{
struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
+ int ret;
+
+ ret = need_fallback(key_len);
+ if (ret > 0) {
+ sctx->key_len = key_len;
+ return setkey_fallback_blk(tfm, in_key, key_len);
+ }
switch (key_len) {
case 16:
@@ -283,6 +453,9 @@ static int cbc_aes_encrypt(struct blkcipher_desc *desc,
struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
struct blkcipher_walk walk;
+ if (unlikely(need_fallback(sctx->key_len)))
+ return fallback_blk_enc(desc, dst, src, nbytes);
+
blkcipher_walk_init(&walk, dst, src, nbytes);
return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
}
@@ -294,6 +467,9 @@ static int cbc_aes_decrypt(struct blkcipher_desc *desc,
struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
struct blkcipher_walk walk;
+ if (unlikely(need_fallback(sctx->key_len)))
+ return fallback_blk_dec(desc, dst, src, nbytes);
+
blkcipher_walk_init(&walk, dst, src, nbytes);
return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
}
@@ -309,6 +485,8 @@ static struct crypto_alg cbc_aes_alg = {
.cra_type = &crypto_blkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
+ .cra_init = fallback_init_blk,
+ .cra_exit = fallback_exit_blk,
.cra_u = {
.blkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
@@ -336,14 +514,10 @@ static int __init aes_init(void)
return -EOPNOTSUPP;
/* z9 109 and z9 BC/EC only support 128 bit key length */
- if (keylen_flag == AES_KEYLEN_128) {
- aes_alg.cra_u.cipher.cia_max_keysize = AES_MIN_KEY_SIZE;
- ecb_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
- cbc_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
+ if (keylen_flag == AES_KEYLEN_128)
printk(KERN_INFO
"aes_s390: hardware acceleration only available for"
"128 bit keys\n");
- }
ret = crypto_register_alg(&aes_alg);
if (ret)
@@ -382,4 +556,3 @@ MODULE_ALIAS("aes");
MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
MODULE_LICENSE("GPL");
-
diff --git a/arch/x86/crypto/Makefile b/arch/x86/crypto/Makefile
index 46bb609e2444..3874c2de5403 100644
--- a/arch/x86/crypto/Makefile
+++ b/arch/x86/crypto/Makefile
@@ -4,12 +4,16 @@
obj-$(CONFIG_CRYPTO_AES_586) += aes-i586.o
obj-$(CONFIG_CRYPTO_TWOFISH_586) += twofish-i586.o
+obj-$(CONFIG_CRYPTO_SALSA20_586) += salsa20-i586.o
obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o
obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o
+obj-$(CONFIG_CRYPTO_SALSA20_X86_64) += salsa20-x86_64.o
-aes-i586-y := aes-i586-asm_32.o aes_32.o
-twofish-i586-y := twofish-i586-asm_32.o twofish_32.o
+aes-i586-y := aes-i586-asm_32.o aes_glue.o
+twofish-i586-y := twofish-i586-asm_32.o twofish_glue.o
+salsa20-i586-y := salsa20-i586-asm_32.o salsa20_glue.o
-aes-x86_64-y := aes-x86_64-asm_64.o aes_64.o
-twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_64.o
+aes-x86_64-y := aes-x86_64-asm_64.o aes_glue.o
+twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_glue.o
+salsa20-x86_64-y := salsa20-x86_64-asm_64.o salsa20_glue.o
diff --git a/arch/x86/crypto/aes-i586-asm_32.S b/arch/x86/crypto/aes-i586-asm_32.S
index f942f0c8f630..1093bede3e0a 100644
--- a/arch/x86/crypto/aes-i586-asm_32.S
+++ b/arch/x86/crypto/aes-i586-asm_32.S
@@ -46,9 +46,9 @@
#define in_blk 16
/* offsets in crypto_tfm structure */
-#define ekey (crypto_tfm_ctx_offset + 0)
-#define nrnd (crypto_tfm_ctx_offset + 256)
-#define dkey (crypto_tfm_ctx_offset + 260)
+#define klen (crypto_tfm_ctx_offset + 0)
+#define ekey (crypto_tfm_ctx_offset + 4)
+#define dkey (crypto_tfm_ctx_offset + 244)
// register mapping for encrypt and decrypt subroutines
@@ -221,8 +221,8 @@
.global aes_enc_blk
-.extern ft_tab
-.extern fl_tab
+.extern crypto_ft_tab
+.extern crypto_fl_tab
.align 4
@@ -236,7 +236,7 @@ aes_enc_blk:
1: push %ebx
mov in_blk+4(%esp),%r2
push %esi
- mov nrnd(%ebp),%r3 // number of rounds
+ mov klen(%ebp),%r3 // key size
push %edi
#if ekey != 0
lea ekey(%ebp),%ebp // key pointer
@@ -255,26 +255,26 @@ aes_enc_blk:
sub $8,%esp // space for register saves on stack
add $16,%ebp // increment to next round key
- cmp $12,%r3
+ cmp $24,%r3
jb 4f // 10 rounds for 128-bit key
lea 32(%ebp),%ebp
je 3f // 12 rounds for 192-bit key
lea 32(%ebp),%ebp
-2: fwd_rnd1( -64(%ebp) ,ft_tab) // 14 rounds for 256-bit key
- fwd_rnd2( -48(%ebp) ,ft_tab)
-3: fwd_rnd1( -32(%ebp) ,ft_tab) // 12 rounds for 192-bit key
- fwd_rnd2( -16(%ebp) ,ft_tab)
-4: fwd_rnd1( (%ebp) ,ft_tab) // 10 rounds for 128-bit key
- fwd_rnd2( +16(%ebp) ,ft_tab)
- fwd_rnd1( +32(%ebp) ,ft_tab)
- fwd_rnd2( +48(%ebp) ,ft_tab)
- fwd_rnd1( +64(%ebp) ,ft_tab)
- fwd_rnd2( +80(%ebp) ,ft_tab)
- fwd_rnd1( +96(%ebp) ,ft_tab)
- fwd_rnd2(+112(%ebp) ,ft_tab)
- fwd_rnd1(+128(%ebp) ,ft_tab)
- fwd_rnd2(+144(%ebp) ,fl_tab) // last round uses a different table
+2: fwd_rnd1( -64(%ebp), crypto_ft_tab) // 14 rounds for 256-bit key
+ fwd_rnd2( -48(%ebp), crypto_ft_tab)
+3: fwd_rnd1( -32(%ebp), crypto_ft_tab) // 12 rounds for 192-bit key
+ fwd_rnd2( -16(%ebp), crypto_ft_tab)
+4: fwd_rnd1( (%ebp), crypto_ft_tab) // 10 rounds for 128-bit key
+ fwd_rnd2( +16(%ebp), crypto_ft_tab)
+ fwd_rnd1( +32(%ebp), crypto_ft_tab)
+ fwd_rnd2( +48(%ebp), crypto_ft_tab)
+ fwd_rnd1( +64(%ebp), crypto_ft_tab)
+ fwd_rnd2( +80(%ebp), crypto_ft_tab)
+ fwd_rnd1( +96(%ebp), crypto_ft_tab)
+ fwd_rnd2(+112(%ebp), crypto_ft_tab)
+ fwd_rnd1(+128(%ebp), crypto_ft_tab)
+ fwd_rnd2(+144(%ebp), crypto_fl_tab) // last round uses a different table
// move final values to the output array. CAUTION: the
// order of these assigns rely on the register mappings
@@ -297,8 +297,8 @@ aes_enc_blk:
.global aes_dec_blk
-.extern it_tab
-.extern il_tab
+.extern crypto_it_tab
+.extern crypto_il_tab
.align 4
@@ -312,14 +312,11 @@ aes_dec_blk:
1: push %ebx
mov in_blk+4(%esp),%r2
push %esi
- mov nrnd(%ebp),%r3 // number of rounds
+ mov klen(%ebp),%r3 // key size
push %edi
#if dkey != 0
lea dkey(%ebp),%ebp // key pointer
#endif
- mov %r3,%r0
- shl $4,%r0
- add %r0,%ebp
// input four columns and xor in first round key
@@ -333,27 +330,27 @@ aes_dec_blk:
xor 12(%ebp),%r5
sub $8,%esp // space for register saves on stack
- sub $16,%ebp // increment to next round key
- cmp $12,%r3
+ add $16,%ebp // increment to next round key
+ cmp $24,%r3
jb 4f // 10 rounds for 128-bit key
- lea -32(%ebp),%ebp
+ lea 32(%ebp),%ebp
je 3f // 12 rounds for 192-bit key
- lea -32(%ebp),%ebp
-
-2: inv_rnd1( +64(%ebp), it_tab) // 14 rounds for 256-bit key
- inv_rnd2( +48(%ebp), it_tab)
-3: inv_rnd1( +32(%ebp), it_tab) // 12 rounds for 192-bit key
- inv_rnd2( +16(%ebp), it_tab)
-4: inv_rnd1( (%ebp), it_tab) // 10 rounds for 128-bit k