summaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2012-10-04 09:06:34 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2012-10-04 09:06:34 -0700
commitd66e6737d454553e1e62109d8298ede5351178a4 (patch)
treec28b205045935b111527f461d2b114daa26e4fb8 /crypto
parent612a9aab56a93533e76e3ad91642db7033e03b69 (diff)
parentc9f97a27ceee84998999bf3341e6d5d207b05539 (diff)
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto update from Herbert Xu: - Optimised AES/SHA1 for ARM. - IPsec ESN support in talitos and caam. - x86_64/avx implementation of cast5/cast6. - Add/use multi-algorithm registration helpers where possible. - Added IBM Power7+ in-Nest support. - Misc fixes. Fix up trivial conflicts in crypto/Kconfig due to the sparc64 crypto config options being added next to the new ARM ones. [ Side note: cut-and-paste duplicate help texts make those conflicts harder to read than necessary, thanks to git being smart about minimizing conflicts and maximizing the common parts... ] * git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (71 commits) crypto: x86/glue_helper - fix storing of new IV in CBC encryption crypto: cast5/avx - fix storing of new IV in CBC encryption crypto: tcrypt - add missing tests for camellia and ghash crypto: testmgr - make test_aead also test 'dst != src' code paths crypto: testmgr - make test_skcipher also test 'dst != src' code paths crypto: testmgr - add test vectors for CTR mode IV increasement crypto: testmgr - add test vectors for partial ctr(cast5) and ctr(cast6) crypto: testmgr - allow non-multi page and multi page skcipher tests from same test template crypto: caam - increase TRNG clocks per sample crypto, tcrypt: remove local_bh_disable/enable() around local_irq_disable/enable() crypto: tegra-aes - fix error return code crypto: crypto4xx - fix error return code crypto: hifn_795x - fix error return code crypto: ux500 - fix error return code crypto: caam - fix error IDs for SEC v5.x RNG4 hwrng: mxc-rnga - Access data via structure hwrng: mxc-rnga - Adapt clocks to new i.mx clock framework crypto: caam - add IPsec ESN support crypto: 842 - remove .cra_list initialization Revert "[CRYPTO] cast6: inline bloat--" ...
Diffstat (limited to 'crypto')
-rw-r--r--crypto/842.c182
-rw-r--r--crypto/Kconfig75
-rw-r--r--crypto/Makefile5
-rw-r--r--crypto/aes_generic.c1
-rw-r--r--crypto/ansi_cprng.c63
-rw-r--r--crypto/anubis.c1
-rw-r--r--crypto/blowfish_generic.c1
-rw-r--r--crypto/camellia_generic.c1
-rw-r--r--crypto/cast5_generic.c (renamed from crypto/cast5.c)80
-rw-r--r--crypto/cast6_generic.c (renamed from crypto/cast6.c)73
-rw-r--r--crypto/crypto_null.c57
-rw-r--r--crypto/crypto_user.c2
-rw-r--r--crypto/deflate.c1
-rw-r--r--crypto/des_generic.c25
-rw-r--r--crypto/fcrypt.c1
-rw-r--r--crypto/ghash-generic.c1
-rw-r--r--crypto/khazad.c1
-rw-r--r--crypto/krng.c1
-rw-r--r--crypto/lzo.c1
-rw-r--r--crypto/salsa20_generic.c1
-rw-r--r--crypto/seed.c1
-rw-r--r--crypto/serpent_generic.c53
-rw-r--r--crypto/sha256_generic.c25
-rw-r--r--crypto/sha512_generic.c20
-rw-r--r--crypto/shash.c36
-rw-r--r--crypto/tcrypt.c95
-rw-r--r--crypto/tcrypt.h1
-rw-r--r--crypto/tea.c41
-rw-r--r--crypto/testmgr.c472
-rw-r--r--crypto/testmgr.h3525
-rw-r--r--crypto/tgr192.c38
-rw-r--r--crypto/twofish_generic.c1
-rw-r--r--crypto/vmac.c10
-rw-r--r--crypto/wp512.c39
34 files changed, 4488 insertions, 442 deletions
diff --git a/crypto/842.c b/crypto/842.c
new file mode 100644
index 000000000000..65c7a89cfa09
--- /dev/null
+++ b/crypto/842.c
@@ -0,0 +1,182 @@
+/*
+ * Cryptographic API for the 842 compression algorithm.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ *
+ * Copyright (C) IBM Corporation, 2011
+ *
+ * Authors: Robert Jennings <rcj@linux.vnet.ibm.com>
+ * Seth Jennings <sjenning@linux.vnet.ibm.com>
+ */
+
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/crypto.h>
+#include <linux/vmalloc.h>
+#include <linux/nx842.h>
+#include <linux/lzo.h>
+#include <linux/timer.h>
+
+static int nx842_uselzo;
+
+struct nx842_ctx {
+ void *nx842_wmem; /* working memory for 842/lzo */
+};
+
+enum nx842_crypto_type {
+ NX842_CRYPTO_TYPE_842,
+ NX842_CRYPTO_TYPE_LZO
+};
+
+#define NX842_SENTINEL 0xdeadbeef
+
+struct nx842_crypto_header {
+ unsigned int sentinel; /* debug */
+ enum nx842_crypto_type type;
+};
+
+static int nx842_init(struct crypto_tfm *tfm)
+{
+ struct nx842_ctx *ctx = crypto_tfm_ctx(tfm);
+ int wmemsize;
+
+ wmemsize = max_t(int, nx842_get_workmem_size(), LZO1X_MEM_COMPRESS);
+ ctx->nx842_wmem = kmalloc(wmemsize, GFP_NOFS);
+ if (!ctx->nx842_wmem)
+ return -ENOMEM;
+
+ return 0;
+}
+
+static void nx842_exit(struct crypto_tfm *tfm)
+{
+ struct nx842_ctx *ctx = crypto_tfm_ctx(tfm);
+
+ kfree(ctx->nx842_wmem);
+}
+
+static void nx842_reset_uselzo(unsigned long data)
+{
+ nx842_uselzo = 0;
+}
+
+static DEFINE_TIMER(failover_timer, nx842_reset_uselzo, 0, 0);
+
+static int nx842_crypto_compress(struct crypto_tfm *tfm, const u8 *src,
+ unsigned int slen, u8 *dst, unsigned int *dlen)
+{
+ struct nx842_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct nx842_crypto_header *hdr;
+ unsigned int tmp_len = *dlen;
+ size_t lzodlen; /* needed for lzo */
+ int err;
+
+ *dlen = 0;
+ hdr = (struct nx842_crypto_header *)dst;
+ hdr->sentinel = NX842_SENTINEL; /* debug */
+ dst += sizeof(struct nx842_crypto_header);
+ tmp_len -= sizeof(struct nx842_crypto_header);
+ lzodlen = tmp_len;
+
+ if (likely(!nx842_uselzo)) {
+ err = nx842_compress(src, slen, dst, &tmp_len, ctx->nx842_wmem);
+
+ if (likely(!err)) {
+ hdr->type = NX842_CRYPTO_TYPE_842;
+ *dlen = tmp_len + sizeof(struct nx842_crypto_header);
+ return 0;
+ }
+
+ /* hardware failed */
+ nx842_uselzo = 1;
+
+ /* set timer to check for hardware again in 1 second */
+ mod_timer(&failover_timer, jiffies + msecs_to_jiffies(1000));
+ }
+
+ /* no hardware, use lzo */
+ err = lzo1x_1_compress(src, slen, dst, &lzodlen, ctx->nx842_wmem);
+ if (err != LZO_E_OK)
+ return -EINVAL;
+
+ hdr->type = NX842_CRYPTO_TYPE_LZO;
+ *dlen = lzodlen + sizeof(struct nx842_crypto_header);
+ return 0;
+}
+
+static int nx842_crypto_decompress(struct crypto_tfm *tfm, const u8 *src,
+ unsigned int slen, u8 *dst, unsigned int *dlen)
+{
+ struct nx842_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct nx842_crypto_header *hdr;
+ unsigned int tmp_len = *dlen;
+ size_t lzodlen; /* needed for lzo */
+ int err;
+
+ *dlen = 0;
+ hdr = (struct nx842_crypto_header *)src;
+
+ if (unlikely(hdr->sentinel != NX842_SENTINEL))
+ return -EINVAL;
+
+ src += sizeof(struct nx842_crypto_header);
+ slen -= sizeof(struct nx842_crypto_header);
+
+ if (likely(hdr->type == NX842_CRYPTO_TYPE_842)) {
+ err = nx842_decompress(src, slen, dst, &tmp_len,
+ ctx->nx842_wmem);
+ if (err)
+ return -EINVAL;
+ *dlen = tmp_len;
+ } else if (hdr->type == NX842_CRYPTO_TYPE_LZO) {
+ lzodlen = tmp_len;
+ err = lzo1x_decompress_safe(src, slen, dst, &lzodlen);
+ if (err != LZO_E_OK)
+ return -EINVAL;
+ *dlen = lzodlen;
+ } else
+ return -EINVAL;
+
+ return 0;
+}
+
+static struct crypto_alg alg = {
+ .cra_name = "842",
+ .cra_flags = CRYPTO_ALG_TYPE_COMPRESS,
+ .cra_ctxsize = sizeof(struct nx842_ctx),
+ .cra_module = THIS_MODULE,
+ .cra_init = nx842_init,
+ .cra_exit = nx842_exit,
+ .cra_u = { .compress = {
+ .coa_compress = nx842_crypto_compress,
+ .coa_decompress = nx842_crypto_decompress } }
+};
+
+static int __init nx842_mod_init(void)
+{
+ del_timer(&failover_timer);
+ return crypto_register_alg(&alg);
+}
+
+static void __exit nx842_mod_exit(void)
+{
+ crypto_unregister_alg(&alg);
+}
+
+module_init(nx842_mod_init);
+module_exit(nx842_mod_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("842 Compression Algorithm");
diff --git a/crypto/Kconfig b/crypto/Kconfig
index 957cc56ce4b9..50402dc0ea35 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -460,6 +460,15 @@ config CRYPTO_SHA1_SPARC64
SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2) implemented
using sparc64 crypto instructions, when available.
+config CRYPTO_SHA1_ARM
+ tristate "SHA1 digest algorithm (ARM-asm)"
+ depends on ARM
+ select CRYPTO_SHA1
+ select CRYPTO_HASH
+ help
+ SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2) implemented
+ using optimized ARM assembler.
+
config CRYPTO_SHA256
tristate "SHA224 and SHA256 digest algorithm"
select CRYPTO_HASH
@@ -609,6 +618,8 @@ config CRYPTO_AES_NI_INTEL
select CRYPTO_CRYPTD
select CRYPTO_ABLK_HELPER_X86
select CRYPTO_ALGAPI
+ select CRYPTO_LRW
+ select CRYPTO_XTS
help
Use Intel AES-NI instructions for AES algorithm.
@@ -661,6 +672,30 @@ config CRYPTO_AES_SPARC64
for some popular block cipher mode is supported too, including
ECB and CBC.
+config CRYPTO_AES_ARM
+ tristate "AES cipher algorithms (ARM-asm)"
+ depends on ARM
+ select CRYPTO_ALGAPI
+ select CRYPTO_AES
+ help
+ Use optimized AES assembler routines for ARM platforms.
+
+ AES cipher algorithms (FIPS-197). AES uses the Rijndael
+ algorithm.
+
+ Rijndael appears to be consistently a very good performer in
+ both hardware and software across a wide range of computing
+ environments regardless of its use in feedback or non-feedback
+ modes. Its key setup time is excellent, and its key agility is
+ good. Rijndael's very low memory requirements make it very well
+ suited for restricted-space environments, in which it also
+ demonstrates excellent performance. Rijndael's operations are
+ among the easiest to defend against power and timing attacks.
+
+ The AES specifies three key sizes: 128, 192 and 256 bits
+
+ See <http://csrc.nist.gov/encryption/aes/> for more information.
+
config CRYPTO_ANUBIS
tristate "Anubis cipher algorithm"
select CRYPTO_ALGAPI
@@ -781,6 +816,20 @@ config CRYPTO_CAST5
The CAST5 encryption algorithm (synonymous with CAST-128) is
described in RFC2144.
+config CRYPTO_CAST5_AVX_X86_64
+ tristate "CAST5 (CAST-128) cipher algorithm (x86_64/AVX)"
+ depends on X86 && 64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_CRYPTD
+ select CRYPTO_ABLK_HELPER_X86
+ select CRYPTO_CAST5
+ help
+ The CAST5 encryption algorithm (synonymous with CAST-128) is
+ described in RFC2144.
+
+ This module provides the Cast5 cipher algorithm that processes
+ sixteen blocks parallel using the AVX instruction set.
+
config CRYPTO_CAST6
tristate "CAST6 (CAST-256) cipher algorithm"
select CRYPTO_ALGAPI
@@ -788,6 +837,23 @@ config CRYPTO_CAST6
The CAST6 encryption algorithm (synonymous with CAST-256) is
described in RFC2612.
+config CRYPTO_CAST6_AVX_X86_64
+ tristate "CAST6 (CAST-256) cipher algorithm (x86_64/AVX)"
+ depends on X86 && 64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_CRYPTD
+ select CRYPTO_ABLK_HELPER_X86
+ select CRYPTO_GLUE_HELPER_X86
+ select CRYPTO_CAST6
+ select CRYPTO_LRW
+ select CRYPTO_XTS
+ help
+ The CAST6 encryption algorithm (synonymous with CAST-256) is
+ described in RFC2612.
+
+ This module provides the Cast6 cipher algorithm that processes
+ eight blocks parallel using the AVX instruction set.
+
config CRYPTO_DES
tristate "DES and Triple DES EDE cipher algorithms"
select CRYPTO_ALGAPI
@@ -1106,6 +1172,15 @@ config CRYPTO_LZO
help
This is the LZO algorithm.
+config CRYPTO_842
+ tristate "842 compression algorithm"
+ depends on CRYPTO_DEV_NX_COMPRESS
+ # 842 uses lzo if the hardware becomes unavailable
+ select LZO_COMPRESS
+ select LZO_DECOMPRESS
+ help
+ This is the 842 algorithm.
+
comment "Random Number Generation"
config CRYPTO_ANSI_CPRNG
diff --git a/crypto/Makefile b/crypto/Makefile
index 30f33d675330..a301ad2b258c 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -68,8 +68,8 @@ obj-$(CONFIG_CRYPTO_TWOFISH_COMMON) += twofish_common.o
obj-$(CONFIG_CRYPTO_SERPENT) += serpent_generic.o
obj-$(CONFIG_CRYPTO_AES) += aes_generic.o
obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia_generic.o
-obj-$(CONFIG_CRYPTO_CAST5) += cast5.o
-obj-$(CONFIG_CRYPTO_CAST6) += cast6.o
+obj-$(CONFIG_CRYPTO_CAST5) += cast5_generic.o
+obj-$(CONFIG_CRYPTO_CAST6) += cast6_generic.o
obj-$(CONFIG_CRYPTO_ARC4) += arc4.o
obj-$(CONFIG_CRYPTO_TEA) += tea.o
obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o
@@ -82,6 +82,7 @@ obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o
obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o
obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o authencesn.o
obj-$(CONFIG_CRYPTO_LZO) += lzo.o
+obj-$(CONFIG_CRYPTO_842) += 842.o
obj-$(CONFIG_CRYPTO_RNG2) += rng.o
obj-$(CONFIG_CRYPTO_RNG2) += krng.o
obj-$(CONFIG_CRYPTO_ANSI_CPRNG) += ansi_cprng.o
diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c
index a68c73dae15a..47f2e5c71759 100644
--- a/crypto/aes_generic.c
+++ b/crypto/aes_generic.c
@@ -1448,7 +1448,6 @@ static struct crypto_alg aes_alg = {
.cra_ctxsize = sizeof(struct crypto_aes_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
.cra_u = {
.cipher = {
.cia_min_keysize = AES_MIN_KEY_SIZE,
diff --git a/crypto/ansi_cprng.c b/crypto/ansi_cprng.c
index 6ddd99e6114b..c0bb3778f1ae 100644
--- a/crypto/ansi_cprng.c
+++ b/crypto/ansi_cprng.c
@@ -382,26 +382,6 @@ static int cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen)
return 0;
}
-static struct crypto_alg rng_alg = {
- .cra_name = "stdrng",
- .cra_driver_name = "ansi_cprng",
- .cra_priority = 100,
- .cra_flags = CRYPTO_ALG_TYPE_RNG,
- .cra_ctxsize = sizeof(struct prng_context),
- .cra_type = &crypto_rng_type,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(rng_alg.cra_list),
- .cra_init = cprng_init,
- .cra_exit = cprng_exit,
- .cra_u = {
- .rng = {
- .rng_make_random = cprng_get_random,
- .rng_reset = cprng_reset,
- .seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ,
- }
- }
-};
-
#ifdef CONFIG_CRYPTO_FIPS
static int fips_cprng_get_random(struct crypto_rng *tfm, u8 *rdata,
unsigned int dlen)
@@ -438,8 +418,27 @@ static int fips_cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen)
out:
return rc;
}
+#endif
-static struct crypto_alg fips_rng_alg = {
+static struct crypto_alg rng_algs[] = { {
+ .cra_name = "stdrng",
+ .cra_driver_name = "ansi_cprng",
+ .cra_priority = 100,
+ .cra_flags = CRYPTO_ALG_TYPE_RNG,
+ .cra_ctxsize = sizeof(struct prng_context),
+ .cra_type = &crypto_rng_type,
+ .cra_module = THIS_MODULE,
+ .cra_init = cprng_init,
+ .cra_exit = cprng_exit,
+ .cra_u = {
+ .rng = {
+ .rng_make_random = cprng_get_random,
+ .rng_reset = cprng_reset,
+ .seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ,
+ }
+ }
+#ifdef CONFIG_CRYPTO_FIPS
+}, {
.cra_name = "fips(ansi_cprng)",
.cra_driver_name = "fips_ansi_cprng",
.cra_priority = 300,
@@ -447,7 +446,6 @@ static struct crypto_alg fips_rng_alg = {
.cra_ctxsize = sizeof(struct prng_context),
.cra_type = &crypto_rng_type,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(rng_alg.cra_list),
.cra_init = cprng_init,
.cra_exit = cprng_exit,
.cra_u = {
@@ -457,33 +455,18 @@ static struct crypto_alg fips_rng_alg = {
.seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ,
}
}
-};
#endif
+} };
/* Module initalization */
static int __init prng_mod_init(void)
{
- int rc = 0;
-
- rc = crypto_register_alg(&rng_alg);
-#ifdef CONFIG_CRYPTO_FIPS
- if (rc)
- goto out;
-
- rc = crypto_register_alg(&fips_rng_alg);
-
-out:
-#endif
- return rc;
+ return crypto_register_algs(rng_algs, ARRAY_SIZE(rng_algs));
}
static void __exit prng_mod_fini(void)
{
- crypto_unregister_alg(&rng_alg);
-#ifdef CONFIG_CRYPTO_FIPS
- crypto_unregister_alg(&fips_rng_alg);
-#endif
- return;
+ crypto_unregister_algs(rng_algs, ARRAY_SIZE(rng_algs));
}
MODULE_LICENSE("GPL");
diff --git a/crypto/anubis.c b/crypto/anubis.c
index 77530d571c96..008c8a4fb67c 100644
--- a/crypto/anubis.c
+++ b/crypto/anubis.c
@@ -678,7 +678,6 @@ static struct crypto_alg anubis_alg = {
.cra_ctxsize = sizeof (struct anubis_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(anubis_alg.cra_list),
.cra_u = { .cipher = {
.cia_min_keysize = ANUBIS_MIN_KEY_SIZE,
.cia_max_keysize = ANUBIS_MAX_KEY_SIZE,
diff --git a/crypto/blowfish_generic.c b/crypto/blowfish_generic.c
index 6f269b5cfa3b..8baf5447d35b 100644
--- a/crypto/blowfish_generic.c
+++ b/crypto/blowfish_generic.c
@@ -115,7 +115,6 @@ static struct crypto_alg alg = {
.cra_ctxsize = sizeof(struct bf_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(alg.cra_list),
.cra_u = { .cipher = {
.cia_min_keysize = BF_MIN_KEY_SIZE,
.cia_max_keysize = BF_MAX_KEY_SIZE,
diff --git a/crypto/camellia_generic.c b/crypto/camellia_generic.c
index f7aaaaf86982..75efa2052305 100644
--- a/crypto/camellia_generic.c
+++ b/crypto/camellia_generic.c
@@ -1072,7 +1072,6 @@ static struct crypto_alg camellia_alg = {
.cra_ctxsize = sizeof(struct camellia_ctx),
.cra_alignmask = 3,
.cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(camellia_alg.cra_list),
.cra_u = {
.cipher = {
.cia_min_keysize = CAMELLIA_MIN_KEY_SIZE,
diff --git a/crypto/cast5.c b/crypto/cast5_generic.c
index 4a230ddec877..bc525dbd8a4b 100644
--- a/crypto/cast5.c
+++ b/crypto/cast5_generic.c
@@ -4,8 +4,8 @@
* Derived from GnuPG implementation of cast5.
*
* Major Changes.
-* Complete conformance to rfc2144.
-* Supports key size from 40 to 128 bits.
+* Complete conformance to rfc2144.
+* Supports key size from 40 to 128 bits.
*
* Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
* Copyright (C) 2003 Kartikey Mahendra Bhatt <kartik_me@hotmail.com>.
@@ -28,19 +28,10 @@
#include <linux/errno.h>
#include <linux/string.h>
#include <linux/types.h>
+#include <crypto/cast5.h>
-#define CAST5_BLOCK_SIZE 8
-#define CAST5_MIN_KEY_SIZE 5
-#define CAST5_MAX_KEY_SIZE 16
-struct cast5_ctx {
- u32 Km[16];
- u8 Kr[16];
- int rr; /* rr?number of rounds = 16:number of rounds = 12; (rfc 2144) */
-};
-
-
-static const u32 s1[256] = {
+const u32 cast5_s1[256] = {
0x30fb40d4, 0x9fa0ff0b, 0x6beccd2f, 0x3f258c7a, 0x1e213f2f,
0x9c004dd3, 0x6003e540, 0xcf9fc949,
0xbfd4af27, 0x88bbbdb5, 0xe2034090, 0x98d09675, 0x6e63a0e0,
@@ -106,7 +97,8 @@ static const u32 s1[256] = {
0x1a69e783, 0x02cc4843, 0xa2f7c579, 0x429ef47d, 0x427b169c,
0x5ac9f049, 0xdd8f0f00, 0x5c8165bf
};
-static const u32 s2[256] = {
+EXPORT_SYMBOL_GPL(cast5_s1);
+const u32 cast5_s2[256] = {
0x1f201094, 0xef0ba75b, 0x69e3cf7e, 0x393f4380, 0xfe61cf7a,
0xeec5207a, 0x55889c94, 0x72fc0651,
0xada7ef79, 0x4e1d7235, 0xd55a63ce, 0xde0436ba, 0x99c430ef,
@@ -172,7 +164,8 @@ static const u32 s2[256] = {
0x43d79572, 0x7e6dd07c, 0x06dfdf1e, 0x6c6cc4ef, 0x7160a539,
0x73bfbe70, 0x83877605, 0x4523ecf1
};
-static const u32 s3[256] = {
+EXPORT_SYMBOL_GPL(cast5_s2);
+const u32 cast5_s3[256] = {
0x8defc240, 0x25fa5d9f, 0xeb903dbf, 0xe810c907, 0x47607fff,
0x369fe44b, 0x8c1fc644, 0xaececa90,
0xbeb1f9bf, 0xeefbcaea, 0xe8cf1950, 0x51df07ae, 0x920e8806,
@@ -238,7 +231,8 @@ static const u32 s3[256] = {
0xf7baefd5, 0x4142ed9c, 0xa4315c11, 0x83323ec5, 0xdfef4636,
0xa133c501, 0xe9d3531c, 0xee353783
};
-static const u32 s4[256] = {
+EXPORT_SYMBOL_GPL(cast5_s3);
+const u32 cast5_s4[256] = {
0x9db30420, 0x1fb6e9de, 0xa7be7bef, 0xd273a298, 0x4a4f7bdb,
0x64ad8c57, 0x85510443, 0xfa020ed1,
0x7e287aff, 0xe60fb663, 0x095f35a1, 0x79ebf120, 0xfd059d43,
@@ -304,6 +298,7 @@ static const u32 s4[256] = {
0x7ae5290c, 0x3cb9536b, 0x851e20fe, 0x9833557e, 0x13ecf0b0,
0xd3ffb372, 0x3f85c5c1, 0x0aef7ed2
};
+EXPORT_SYMBOL_GPL(cast5_s4);
static const u32 s5[256] = {
0x7ec90c04, 0x2c6e74b9, 0x9b0e66df, 0xa6337911, 0xb86a7fff,
0x1dd358f5, 0x44dd9d44, 0x1731167f,
@@ -569,17 +564,21 @@ static const u32 sb8[256] = {
0xeaee6801, 0x8db2a283, 0xea8bf59e
};
+#define s1 cast5_s1
+#define s2 cast5_s2
+#define s3 cast5_s3
+#define s4 cast5_s4
+
#define F1(D, m, r) ((I = ((m) + (D))), (I = rol32(I, (r))), \
- (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff]))
+ (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff]))
#define F2(D, m, r) ((I = ((m) ^ (D))), (I = rol32(I, (r))), \
- (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff]))
+ (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff]))
#define F3(D, m, r) ((I = ((m) - (D))), (I = rol32(I, (r))), \
- (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff]))
+ (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff]))
-static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
+void __cast5_encrypt(struct cast5_ctx *c, u8 *outbuf, const u8 *inbuf)
{
- struct cast5_ctx *c = crypto_tfm_ctx(tfm);
const __be32 *src = (const __be32 *)inbuf;
__be32 *dst = (__be32 *)outbuf;
u32 l, r, t;
@@ -628,10 +627,15 @@ static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
dst[0] = cpu_to_be32(r);
dst[1] = cpu_to_be32(l);
}
+EXPORT_SYMBOL_GPL(__cast5_encrypt);
-static void cast5_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
+static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
+{
+ __cast5_encrypt(crypto_tfm_ctx(tfm), outbuf, inbuf);
+}
+
+void __cast5_decrypt(struct cast5_ctx *c, u8 *outbuf, const u8 *inbuf)
{
- struct cast5_ctx *c = crypto_tfm_ctx(tfm);
const __be32 *src = (const __be32 *)inbuf;
__be32 *dst = (__be32 *)outbuf;
u32 l, r, t;
@@ -667,6 +671,12 @@ static void cast5_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
dst[0] = cpu_to_be32(r);
dst[1] = cpu_to_be32(l);
}
+EXPORT_SYMBOL_GPL(__cast5_decrypt);
+
+static void cast5_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf)
+{
+ __cast5_decrypt(crypto_tfm_ctx(tfm), outbuf, inbuf);
+}
static void key_schedule(u32 *x, u32 *z, u32 *k)
{
@@ -743,7 +753,7 @@ static void key_schedule(u32 *x, u32 *z, u32 *k)
}
-static int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned key_len)
+int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int key_len)
{
struct cast5_ctx *c = crypto_tfm_ctx(tfm);
int i;
@@ -771,20 +781,22 @@ static int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned key_len)
c->Kr[i] = k[i] & 0x1f;
return 0;
}
+EXPORT_SYMBOL_GPL(cast5_setkey);
static struct crypto_alg alg = {
- .cra_name = "cast5",
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
- .cra_blocksize = CAST5_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct cast5_ctx),
- .cra_alignmask = 3,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(alg.cra_list),
- .cra_u = {
+ .cra_name = "cast5",
+ .cra_driver_name = "cast5-generic",
+ .cra_priority = 100,
+ .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
+ .cra_blocksize = CAST5_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct cast5_ctx),
+ .cra_alignmask = 3,
+ .cra_module = THIS_MODULE,
+ .cra_u = {
.cipher = {
.cia_min_keysize = CAST5_MIN_KEY_SIZE,
.cia_max_keysize = CAST5_MAX_KEY_SIZE,
- .cia_setkey = cast5_setkey,
+ .cia_setkey = cast5_setkey,
.cia_encrypt = cast5_encrypt,
.cia_decrypt = cast5_decrypt
}
@@ -806,4 +818,4 @@ module_exit(cast5_mod_fini);
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Cast5 Cipher Algorithm");
-
+MODULE_ALIAS("cast5");
diff --git a/crypto/cast6.c b/crypto/cast6_generic.c
index e0c15a6c7c34..1acd2f1c48fc 100644
--- a/crypto/cast6.c
+++ b/crypto/cast6_generic.c
@@ -25,24 +25,21 @@
#include <linux/errno.h>
#include <linux/string.h>
#include <linux/types.h>
+#include <crypto/cast6.h>
-#define CAST6_BLOCK_SIZE 16
-#define CAST6_MIN_KEY_SIZE 16
-#define CAST6_MAX_KEY_SIZE 32
-
-struct cast6_ctx {
- u32 Km[12][4];
- u8 Kr[12][4];
-};
+#define s1 cast6_s1
+#define s2 cast6_s2
+#define s3 cast6_s3
+#define s4 cast6_s4
#define F1(D, r, m) ((I = ((m) + (D))), (I = rol32(I, (r))), \
- (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff]))
+ (((s1[I >> 24] ^ s2[(I>>16)&0xff]) - s3[(I>>8)&0xff]) + s4[I&0xff]))
#define F2(D, r, m) ((I = ((m) ^ (D))), (I = rol32(I, (r))), \
- (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff]))
+ (((s1[I >> 24] - s2[(I>>16)&0xff]) + s3[(I>>8)&0xff]) ^ s4[I&0xff]))
#define F3(D, r, m) ((I = ((m) - (D))), (I = rol32(I, (r))), \
- (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff]))
+ (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff]))
-static const u32 s1[256] = {
+const u32 cast6_s1[256] = {
0x30fb40d4, 0x9fa0ff0b, 0x6beccd2f, 0x3f258c7a, 0x1e213f2f,
0x9c004dd3, 0x6003e540, 0xcf9fc949,
0xbfd4af27, 0x88bbbdb5, 0xe2034090, 0x98d09675, 0x6e63a0e0,
@@ -108,8 +105,9 @@ static const u32 s1[256] = {
0x1a69e783, 0x02cc4843, 0xa2f7c579, 0x429ef47d, 0x427b169c,
0x5ac9f049, 0xdd8f0f00, 0x5c8165bf
};
+EXPORT_SYMBOL_GPL(cast6_s1);
-static const u32 s2[256] = {
+const u32 cast6_s2[256] = {
0x1f201094, 0xef0ba75b, 0x69e3cf7e, 0x393f4380, 0xfe61cf7a,
0xeec5207a, 0x55889c94, 0x72fc0651,
0xada7ef79, 0x4e1d7235, 0xd55a63ce, 0xde0436ba, 0x99c430ef,
@@ -175,8 +173,9 @@ static const u32 s2[256] = {
0x43d79572, 0x7e6dd07c, 0x06dfdf1e, 0x6c6cc4ef, 0x7160a539,
0x73bfbe70, 0x83877605, 0x4523ecf1
};
+EXPORT_SYMBOL_GPL(cast6_s2);
-static const u32 s3[256] = {
+const u32 cast6_s3[256] = {
0x8defc240, 0x25fa5d9f, 0xeb903dbf, 0xe810c907, 0x47607fff,
0x369fe44b, 0x8c1fc644, 0xaececa90,
0xbeb1f9bf, 0xeefbcaea, 0xe8cf1950, 0x51df07ae, 0x920e8806,
@@ -242,8 +241,9 @@ static const u32 s3[256] = {
0xf7baefd5, 0x4142ed9c, 0xa4315c11, 0x83323ec5, 0xdfef4636,
0xa133c501, 0xe9d3531c, 0xee353783
};
+EXPORT_SYMBOL_GPL(cast6_s3);
-static const u32 s4[256] = {
+const u32 cast6_s4[256] = {
0x9db30420, 0x1fb6e9de, 0xa7be7bef, 0xd273a298, 0x4a4f7bdb,
0x64ad8c57, 0x85510443, 0xfa020ed1,
0x7e287aff, 0xe60fb663, 0x095f35a1, 0x79ebf120, 0xfd059d43,
@@ -309,6 +309,7 @@ static const u32 s4[256] = {
0x7ae5290c, 0x3cb9536b, 0x851e20fe, 0x9833557e, 0x13ecf0b0,
0xd3ffb372, 0x3f85c5c1, 0x0aef7ed2
};
+EXPORT_SYMBOL_GPL(cast6_s4);
static const u32 Tm[24][8] = {
{ 0x5a827999, 0xc95c653a, 0x383650db, 0xa7103c7c, 0x15ea281d,
@@ -369,7 +370,7 @@ static const u8 Tr[4][8] = {
};
/* forward octave */
-static void W(u32 *key, unsigned int i)
+static inline void W(u32 *key, unsigned int i)
{
u32 I;
key[6] ^= F1(key[7], Tr[i % 4][0], Tm[i][0]);
@@ -382,14 +383,12 @@ static void W(u32 *key, unsigned int i)
key[7] ^= F2(key[0], Tr[i % 4][7], Tm[i][7]);
}
-static int cast6_setkey(struct crypto_tfm *tfm, const u8 *in_key,
- unsigned key_len)
+int __cast6_setkey(struct cast6_ctx *c, const u8 *in_key,
+ unsigned key_len, u32 *flags)
{
int i;
u32 key[8];
__be32 p_key[8]; /* p