Commit e14e61e9 authored by Linus Torvalds's avatar Linus Torvalds

Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (57 commits)
  crypto: aes - Precompute tables
  crypto: talitos - Ack done interrupt in isr instead of tasklet
  crypto: testmgr - Correct comment about deflate parameters
  crypto: salsa20 - Remove private wrappers around various operations
  crypto: des3_ede - permit weak keys unless REQ_WEAK_KEY set
  crypto: sha512 - Switch to shash 
  crypto: sha512 - Move message schedule W[80] to static percpu area
  crypto: michael_mic - Switch to shash
  crypto: wp512 - Switch to shash
  crypto: tgr192 - Switch to shash
  crypto: sha256 - Switch to shash
  crypto: md5 - Switch to shash
  crypto: md4 - Switch to shash
  crypto: sha1 - Switch to shash
  crypto: rmd320 - Switch to shash
  crypto: rmd256 - Switch to shash
  crypto: rmd160 - Switch to shash
  crypto: rmd128 - Switch to shash
  crypto: null - Switch to shash
  crypto: hash - Make setkey optional
  ...
parents cb10ea54 0ee4a969
...@@ -6,13 +6,22 @@ ...@@ -6,13 +6,22 @@
* Intel(R) 64 and IA-32 Architectures Software Developer's Manual * Intel(R) 64 and IA-32 Architectures Software Developer's Manual
* Volume 2A: Instruction Set Reference, A-M * Volume 2A: Instruction Set Reference, A-M
* *
* Copyright (c) 2008 Austin Zhang <austin_zhang@linux.intel.com> * Copyright (C) 2008 Intel Corporation
* Copyright (c) 2008 Kent Liu <kent.liu@intel.com> * Authors: Austin Zhang <austin_zhang@linux.intel.com>
* Kent Liu <kent.liu@intel.com>
* *
* This program is free software; you can redistribute it and/or modify it * This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free * under the terms and conditions of the GNU General Public License,
* Software Foundation; either version 2 of the License, or (at your option) * version 2, as published by the Free Software Foundation.
* any later version. *
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
* *
*/ */
#include <linux/init.h> #include <linux/init.h>
...@@ -75,99 +84,92 @@ static u32 __pure crc32c_intel_le_hw(u32 crc, unsigned char const *p, size_t len ...@@ -75,99 +84,92 @@ static u32 __pure crc32c_intel_le_hw(u32 crc, unsigned char const *p, size_t len
* If your algorithm starts with ~0, then XOR with ~0 before you set * If your algorithm starts with ~0, then XOR with ~0 before you set
* the seed. * the seed.
*/ */
static int crc32c_intel_setkey(struct crypto_ahash *hash, const u8 *key, static int crc32c_intel_setkey(struct crypto_shash *hash, const u8 *key,
unsigned int keylen) unsigned int keylen)
{ {
u32 *mctx = crypto_ahash_ctx(hash); u32 *mctx = crypto_shash_ctx(hash);
if (keylen != sizeof(u32)) { if (keylen != sizeof(u32)) {
crypto_ahash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN); crypto_shash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN);
return -EINVAL; return -EINVAL;
} }
*mctx = le32_to_cpup((__le32 *)key); *mctx = le32_to_cpup((__le32 *)key);
return 0; return 0;
} }
static int crc32c_intel_init(struct ahash_request *req) static int crc32c_intel_init(struct shash_desc *desc)
{ {
u32 *mctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); u32 *mctx = crypto_shash_ctx(desc->tfm);
u32 *crcp = ahash_request_ctx(req); u32 *crcp = shash_desc_ctx(desc);
*crcp = *mctx; *crcp = *mctx;
return 0; return 0;
} }
static int crc32c_intel_update(struct ahash_request *req) static int crc32c_intel_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{ {
struct crypto_hash_walk walk; u32 *crcp = shash_desc_ctx(desc);
u32 *crcp = ahash_request_ctx(req);
u32 crc = *crcp;
int nbytes;
for (nbytes = crypto_hash_walk_first(req, &walk); nbytes;
nbytes = crypto_hash_walk_done(&walk, 0))
crc = crc32c_intel_le_hw(crc, walk.data, nbytes);
*crcp = crc; *crcp = crc32c_intel_le_hw(*crcp, data, len);
return 0; return 0;
} }
static int crc32c_intel_final(struct ahash_request *req) static int __crc32c_intel_finup(u32 *crcp, const u8 *data, unsigned int len,
u8 *out)
{ {
u32 *crcp = ahash_request_ctx(req); *(__le32 *)out = ~cpu_to_le32(crc32c_intel_le_hw(*crcp, data, len));
*(__le32 *)req->result = ~cpu_to_le32p(crcp);
return 0; return 0;
} }
static int crc32c_intel_digest(struct ahash_request *req) static int crc32c_intel_finup(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{ {
struct crypto_hash_walk walk; return __crc32c_intel_finup(shash_desc_ctx(desc), data, len, out);
u32 *mctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); }
u32 crc = *mctx;
int nbytes;
for (nbytes = crypto_hash_walk_first(req, &walk); nbytes; static int crc32c_intel_final(struct shash_desc *desc, u8 *out)
nbytes = crypto_hash_walk_done(&walk, 0)) {
crc = crc32c_intel_le_hw(crc, walk.data, nbytes); u32 *crcp = shash_desc_ctx(desc);
*(__le32 *)req->result = ~cpu_to_le32(crc); *(__le32 *)out = ~cpu_to_le32p(crcp);
return 0; return 0;
} }
static int crc32c_intel_digest(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{
return __crc32c_intel_finup(crypto_shash_ctx(desc->tfm), data, len,
out);
}
static int crc32c_intel_cra_init(struct crypto_tfm *tfm) static int crc32c_intel_cra_init(struct crypto_tfm *tfm)
{ {
u32 *key = crypto_tfm_ctx(tfm); u32 *key = crypto_tfm_ctx(tfm);
*key = ~0; *key = ~0;
tfm->crt_ahash.reqsize = sizeof(u32);
return 0; return 0;
} }
static struct crypto_alg alg = { static struct shash_alg alg = {
.cra_name = "crc32c", .setkey = crc32c_intel_setkey,
.cra_driver_name = "crc32c-intel", .init = crc32c_intel_init,
.cra_priority = 200, .update = crc32c_intel_update,
.cra_flags = CRYPTO_ALG_TYPE_AHASH, .final = crc32c_intel_final,
.cra_blocksize = CHKSUM_BLOCK_SIZE, .finup = crc32c_intel_finup,
.cra_alignmask = 3, .digest = crc32c_intel_digest,
.cra_ctxsize = sizeof(u32), .descsize = sizeof(u32),
.cra_module = THIS_MODULE, .digestsize = CHKSUM_DIGEST_SIZE,
.cra_list = LIST_HEAD_INIT(alg.cra_list), .base = {
.cra_init = crc32c_intel_cra_init, .cra_name = "crc32c",
.cra_type = &crypto_ahash_type, .cra_driver_name = "crc32c-intel",
.cra_u = { .cra_priority = 200,
.ahash = { .cra_blocksize = CHKSUM_BLOCK_SIZE,
.digestsize = CHKSUM_DIGEST_SIZE, .cra_ctxsize = sizeof(u32),
.setkey = crc32c_intel_setkey, .cra_module = THIS_MODULE,
.init = crc32c_intel_init, .cra_init = crc32c_intel_cra_init,
.update = crc32c_intel_update,
.final = crc32c_intel_final,
.digest = crc32c_intel_digest,
}
} }
}; };
...@@ -175,14 +177,14 @@ static struct crypto_alg alg = { ...@@ -175,14 +177,14 @@ static struct crypto_alg alg = {
static int __init crc32c_intel_mod_init(void) static int __init crc32c_intel_mod_init(void)
{ {
if (cpu_has_xmm4_2) if (cpu_has_xmm4_2)
return crypto_register_alg(&alg); return crypto_register_shash(&alg);
else else
return -ENODEV; return -ENODEV;
} }
static void __exit crc32c_intel_mod_fini(void) static void __exit crc32c_intel_mod_fini(void)
{ {
crypto_unregister_alg(&alg); crypto_unregister_shash(&alg);
} }
module_init(crc32c_intel_mod_init); module_init(crc32c_intel_mod_init);
...@@ -194,4 +196,3 @@ MODULE_LICENSE("GPL"); ...@@ -194,4 +196,3 @@ MODULE_LICENSE("GPL");
MODULE_ALIAS("crc32c"); MODULE_ALIAS("crc32c");
MODULE_ALIAS("crc32c-intel"); MODULE_ALIAS("crc32c-intel");
...@@ -102,6 +102,7 @@ config CRYPTO_NULL ...@@ -102,6 +102,7 @@ config CRYPTO_NULL
tristate "Null algorithms" tristate "Null algorithms"
select CRYPTO_ALGAPI select CRYPTO_ALGAPI
select CRYPTO_BLKCIPHER select CRYPTO_BLKCIPHER
select CRYPTO_HASH
help help
These are 'Null' algorithms, used by IPsec, which do nothing. These are 'Null' algorithms, used by IPsec, which do nothing.
...@@ -256,12 +257,10 @@ comment "Digest" ...@@ -256,12 +257,10 @@ comment "Digest"
config CRYPTO_CRC32C config CRYPTO_CRC32C
tristate "CRC32c CRC algorithm" tristate "CRC32c CRC algorithm"
select CRYPTO_HASH select CRYPTO_HASH
select LIBCRC32C
help help
Castagnoli, et al Cyclic Redundancy-Check Algorithm. Used Castagnoli, et al Cyclic Redundancy-Check Algorithm. Used
by iSCSI for header and data digests and by others. by iSCSI for header and data digests and by others.
See Castagnoli93. This implementation uses lib/libcrc32c. See Castagnoli93. Module will be crc32c.
Module will be crc32c.
config CRYPTO_CRC32C_INTEL config CRYPTO_CRC32C_INTEL
tristate "CRC32c INTEL hardware acceleration" tristate "CRC32c INTEL hardware acceleration"
...@@ -277,19 +276,19 @@ config CRYPTO_CRC32C_INTEL ...@@ -277,19 +276,19 @@ config CRYPTO_CRC32C_INTEL
config CRYPTO_MD4 config CRYPTO_MD4
tristate "MD4 digest algorithm" tristate "MD4 digest algorithm"
select CRYPTO_ALGAPI select CRYPTO_HASH
help help
MD4 message digest algorithm (RFC1320). MD4 message digest algorithm (RFC1320).
config CRYPTO_MD5 config CRYPTO_MD5
tristate "MD5 digest algorithm" tristate "MD5 digest algorithm"
select CRYPTO_ALGAPI select CRYPTO_HASH
help help
MD5 message digest algorithm (RFC1321). MD5 message digest algorithm (RFC1321).
config CRYPTO_MICHAEL_MIC config CRYPTO_MICHAEL_MIC
tristate "Michael MIC keyed digest algorithm" tristate "Michael MIC keyed digest algorithm"
select CRYPTO_ALGAPI select CRYPTO_HASH
help help
Michael MIC is used for message integrity protection in TKIP Michael MIC is used for message integrity protection in TKIP
(IEEE 802.11i). This algorithm is required for TKIP, but it (IEEE 802.11i). This algorithm is required for TKIP, but it
...@@ -298,7 +297,7 @@ config CRYPTO_MICHAEL_MIC ...@@ -298,7 +297,7 @@ config CRYPTO_MICHAEL_MIC
config CRYPTO_RMD128 config CRYPTO_RMD128
tristate "RIPEMD-128 digest algorithm" tristate "RIPEMD-128 digest algorithm"
select CRYPTO_ALGAPI select CRYPTO_HASH
help help
RIPEMD-128 (ISO/IEC 10118-3:2004). RIPEMD-128 (ISO/IEC 10118-3:2004).
...@@ -311,7 +310,7 @@ config CRYPTO_RMD128 ...@@ -311,7 +310,7 @@ config CRYPTO_RMD128
config CRYPTO_RMD160 config CRYPTO_RMD160
tristate "RIPEMD-160 digest algorithm" tristate "RIPEMD-160 digest algorithm"
select CRYPTO_ALGAPI select CRYPTO_HASH
help help
RIPEMD-160 (ISO/IEC 10118-3:2004). RIPEMD-160 (ISO/IEC 10118-3:2004).
...@@ -328,7 +327,7 @@ config CRYPTO_RMD160 ...@@ -328,7 +327,7 @@ config CRYPTO_RMD160
config CRYPTO_RMD256 config CRYPTO_RMD256
tristate "RIPEMD-256 digest algorithm" tristate "RIPEMD-256 digest algorithm"
select CRYPTO_ALGAPI select CRYPTO_HASH
help help
RIPEMD-256 is an optional extension of RIPEMD-128 with a RIPEMD-256 is an optional extension of RIPEMD-128 with a
256 bit hash. It is intended for applications that require 256 bit hash. It is intended for applications that require
...@@ -340,7 +339,7 @@ config CRYPTO_RMD256 ...@@ -340,7 +339,7 @@ config CRYPTO_RMD256
config CRYPTO_RMD320 config CRYPTO_RMD320
tristate "RIPEMD-320 digest algorithm" tristate "RIPEMD-320 digest algorithm"
select CRYPTO_ALGAPI select CRYPTO_HASH
help help
RIPEMD-320 is an optional extension of RIPEMD-160 with a RIPEMD-320 is an optional extension of RIPEMD-160 with a
320 bit hash. It is intended for applications that require 320 bit hash. It is intended for applications that require
...@@ -352,13 +351,13 @@ config CRYPTO_RMD320 ...@@ -352,13 +351,13 @@ config CRYPTO_RMD320
config CRYPTO_SHA1 config CRYPTO_SHA1
tristate "SHA1 digest algorithm" tristate "SHA1 digest algorithm"
select CRYPTO_ALGAPI select CRYPTO_HASH
help help
SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2). SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2).
config CRYPTO_SHA256 config CRYPTO_SHA256
tristate "SHA224 and SHA256 digest algorithm" tristate "SHA224 and SHA256 digest algorithm"
select CRYPTO_ALGAPI select CRYPTO_HASH
help help
SHA256 secure hash standard (DFIPS 180-2). SHA256 secure hash standard (DFIPS 180-2).
...@@ -370,7 +369,7 @@ config CRYPTO_SHA256 ...@@ -370,7 +369,7 @@ config CRYPTO_SHA256
config CRYPTO_SHA512 config CRYPTO_SHA512
tristate "SHA384 and SHA512 digest algorithms" tristate "SHA384 and SHA512 digest algorithms"
select CRYPTO_ALGAPI select CRYPTO_HASH
help help
SHA512 secure hash standard (DFIPS 180-2). SHA512 secure hash standard (DFIPS 180-2).
...@@ -382,7 +381,7 @@ config CRYPTO_SHA512 ...@@ -382,7 +381,7 @@ config CRYPTO_SHA512
config CRYPTO_TGR192 config CRYPTO_TGR192
tristate "Tiger digest algorithms" tristate "Tiger digest algorithms"
select CRYPTO_ALGAPI select CRYPTO_HASH
help help
Tiger hash algorithm 192, 160 and 128-bit hashes Tiger hash algorithm 192, 160 and 128-bit hashes
...@@ -395,7 +394,7 @@ config CRYPTO_TGR192 ...@@ -395,7 +394,7 @@ config CRYPTO_TGR192
config CRYPTO_WP512 config CRYPTO_WP512
tristate "Whirlpool digest algorithms" tristate "Whirlpool digest algorithms"
select CRYPTO_ALGAPI select CRYPTO_HASH
help help
Whirlpool hash algorithm 512, 384 and 256-bit hashes Whirlpool hash algorithm 512, 384 and 256-bit hashes
......
...@@ -22,6 +22,7 @@ obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o ...@@ -22,6 +22,7 @@ obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
crypto_hash-objs := hash.o crypto_hash-objs := hash.o
crypto_hash-objs += ahash.o crypto_hash-objs += ahash.o
crypto_hash-objs += shash.o
obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o
cryptomgr-objs := algboss.o testmgr.o cryptomgr-objs := algboss.o testmgr.o
......
This diff is collapsed.
...@@ -112,6 +112,22 @@ int crypto_hash_walk_first(struct ahash_request *req, ...@@ -112,6 +112,22 @@ int crypto_hash_walk_first(struct ahash_request *req,
} }
EXPORT_SYMBOL_GPL(crypto_hash_walk_first); EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
struct crypto_hash_walk *walk,
struct scatterlist *sg, unsigned int len)
{
walk->total = len;
if (!walk->total)
return 0;
walk->alignmask = crypto_hash_alignmask(hdesc->tfm);
walk->sg = sg;
walk->flags = hdesc->flags;
return hash_walk_new_entry(walk);
}
static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key, static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
unsigned int keylen) unsigned int keylen)
{ {
...@@ -146,6 +162,26 @@ static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key, ...@@ -146,6 +162,26 @@ static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
return ahash->setkey(tfm, key, keylen); return ahash->setkey(tfm, key, keylen);
} }
static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
unsigned int keylen)
{
return -ENOSYS;
}
int crypto_ahash_import(struct ahash_request *req, const u8 *in)
{
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
struct ahash_alg *alg = crypto_ahash_alg(tfm);
memcpy(ahash_request_ctx(req), in, crypto_ahash_reqsize(tfm));
if (alg->reinit)
alg->reinit(req);
return 0;
}
EXPORT_SYMBOL_GPL(crypto_ahash_import);
static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type, static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type,
u32 mask) u32 mask)
{ {
...@@ -164,7 +200,7 @@ static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) ...@@ -164,7 +200,7 @@ static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
crt->update = alg->update; crt->update = alg->update;
crt->final = alg->final; crt->final = alg->final;
crt->digest = alg->digest; crt->digest = alg->digest;
crt->setkey = ahash_setkey; crt->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey;
crt->digestsize = alg->digestsize; crt->digestsize = alg->digestsize;
return 0; return 0;
......
...@@ -161,7 +161,7 @@ static int _get_more_prng_bytes(struct prng_context *ctx) ...@@ -161,7 +161,7 @@ static int _get_more_prng_bytes(struct prng_context *ctx)
/* /*
* Now update our DT value * Now update our DT value
*/ */
for (i = 0; i < DEFAULT_BLK_SZ; i++) { for (i = DEFAULT_BLK_SZ - 1; i >= 0; i--) {
ctx->DT[i] += 1; ctx->DT[i] += 1;
if (ctx->DT[i] != 0) if (ctx->DT[i] != 0)
break; break;
...@@ -223,9 +223,10 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx) ...@@ -223,9 +223,10 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
} }
/* /*
* Copy up to the next whole block size * Copy any data less than an entire block
*/ */
if (byte_count < DEFAULT_BLK_SZ) { if (byte_count < DEFAULT_BLK_SZ) {
empty_rbuf:
for (; ctx->rand_data_valid < DEFAULT_BLK_SZ; for (; ctx->rand_data_valid < DEFAULT_BLK_SZ;
ctx->rand_data_valid++) { ctx->rand_data_valid++) {
*ptr = ctx->rand_data[ctx->rand_data_valid]; *ptr = ctx->rand_data[ctx->rand_data_valid];
...@@ -240,18 +241,22 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx) ...@@ -240,18 +241,22 @@ static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx)
* Now copy whole blocks * Now copy whole blocks
*/ */
for (; byte_count >= DEFAULT_BLK_SZ; byte_count -= DEFAULT_BLK_SZ) { for (; byte_count >= DEFAULT_BLK_SZ; byte_count -= DEFAULT_BLK_SZ) {
if (_get_more_prng_bytes(ctx) < 0) { if (ctx->rand_data_valid == DEFAULT_BLK_SZ) {
memset(buf, 0, nbytes); if (_get_more_prng_bytes(ctx) < 0) {
err = -EINVAL; memset(buf, 0, nbytes);
goto done; err = -EINVAL;
goto done;
}
} }
if (ctx->rand_data_valid > 0)
goto empty_rbuf;
memcpy(ptr, ctx->rand_data, DEFAULT_BLK_SZ); memcpy(ptr, ctx->rand_data, DEFAULT_BLK_SZ);
ctx->rand_data_valid += DEFAULT_BLK_SZ; ctx->rand_data_valid += DEFAULT_BLK_SZ;
ptr += DEFAULT_BLK_SZ; ptr += DEFAULT_BLK_SZ;
} }
/* /*
* Now copy any extra partial data * Now go back and get any remaining partial block
*/ */
if (byte_count) if (byte_count)
goto remainder; goto remainder;
...@@ -349,15 +354,25 @@ static int cprng_get_random(struct crypto_rng *tfm, u8 *rdata, ...@@ -349,15 +354,25 @@ static int cprng_get_random(struct crypto_rng *tfm, u8 *rdata,
return get_prng_bytes(rdata, dlen, prng); return get_prng_bytes(rdata, dlen, prng);
} }
/*
* This is the cprng_registered reset method the seed value is
* interpreted as the tuple { V KEY DT}
* V and KEY are required during reset, and DT is optional, detected
* as being present by testing the length of the seed
*/
static int cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen) static int cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen)
{ {
struct prng_context *prng = crypto_rng_ctx(tfm); struct prng_context *prng = crypto_rng_ctx(tfm);
u8 *key = seed + DEFAULT_PRNG_KSZ; u8 *key = seed + DEFAULT_BLK_SZ;
u8 *dt = NULL;
if (slen < DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ) if (slen < DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ)
return -EINVAL; return -EINVAL;
reset_prng_context(prng, key, DEFAULT_PRNG_KSZ, seed, NULL); if (slen >= (2 * DEFAULT_BLK_SZ + DEFAULT_PRNG_KSZ))
dt = key + DEFAULT_PRNG_KSZ;
reset_prng_context(prng, key, DEFAULT_PRNG_KSZ, seed, dt);
if (prng->flags & PRNG_NEED_RESET) if (prng->flags & PRNG_NEED_RESET)
return -EINVAL; return -EINVAL;
...@@ -379,7 +394,7 @@ static struct crypto_alg rng_alg = { ...@@ -379,7 +394,7 @@ static struct crypto_alg rng_alg = {
.rng = { .rng = {
.rng_make_random = cprng_get_random, .rng_make_random = cprng_get_random,
.rng_reset = cprng_reset, .rng_reset = cprng_reset,
.seedsize = DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ, .seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ,
} }
} }
}; };
......
...@@ -300,8 +300,8 @@ static void crypto_exit_ops(struct crypto_tfm *tfm) ...@@ -300,8 +300,8 @@ static void crypto_exit_ops(struct crypto_tfm *tfm)
const struct crypto_type *type = tfm->__crt_alg->cra_type; const struct crypto_type *type = tfm->__crt_alg->cra_type;
if (type) { if (type) {
if (type->exit) if (tfm->exit)
type->exit(tfm); tfm->exit(tfm);
return; return;
} }
...@@ -379,17 +379,16 @@ struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, ...@@ -379,17 +379,16 @@ struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
if (err) if (err)
goto out_free_tfm; goto out_free_tfm;
if (alg->cra_init && (err = alg->cra_init(tfm))) { if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
if (err == -EAGAIN)
crypto_shoot_alg(alg);
goto cra_init_failed; goto cra_init_failed;
}
goto out; goto out;
cra_init_failed: cra_init_failed:
crypto_exit_ops(tfm); crypto_exit_ops(tfm);
out_free_tfm: out_free_tfm:
if (err == -EAGAIN)
crypto_shoot_alg(alg);
kfree(tfm); kfree(tfm);
out_err: out_err:
tfm = ERR_PTR(err); tfm = ERR_PTR(err);
...@@ -404,6 +403,9 @@ EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); ...@@ -404,6 +403,9 @@ EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
* @type: Type of algorithm * @type: Type of algorithm
* @mask: Mask for type comparison * @mask: Mask for type comparison
* *
* This function should not be used by new algorithm types.
* Plesae use crypto_alloc_tfm instead.
*
* crypto_alloc_base() will first attempt to locate an already loaded * crypto_alloc_base() will first attempt to locate an already loaded
* algorithm. If that fails and the kernel supports dynamically loadable * algorithm. If that fails and the kernel supports dynamically loadable
* modules, it will then attempt to load a module of the same name or * modules, it will then attempt to load a module of the same name or
...@@ -450,6 +452,111 @@ struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) ...@@ -450,6 +452,111 @@ struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
return ERR_PTR(err); return ERR_PTR(err);
} }
EXPORT_SYMBOL_GPL(crypto_alloc_base); EXPORT_SYMBOL_GPL(crypto_alloc_base);
struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
const struct crypto_type *frontend)
{
char *mem;
struct crypto_tfm *tfm = NULL;
unsigned int tfmsize;
unsigned int total;
int err = -ENOMEM;
tfmsize = frontend->tfmsize;
total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend);
mem = kzalloc(total, GFP_KERNEL);
if (mem == NULL)
goto out_err;
tfm = (struct crypto_tfm *)(mem + tfmsize);
tfm->__crt_alg = alg;
err = frontend->init_tfm(tfm, frontend);
if (err)
goto out_free_tfm;
if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
goto cra_init_failed;
goto out;
cra_init_failed:
crypto_exit_ops(tfm);
out_free_tfm:
if (err == -EAGAIN)
crypto_shoot_alg(alg);
kfree(mem);
out_err:
tfm = ERR_PTR(err);
out:
return tfm;
}
EXPORT_SYMBOL_GPL(crypto_create_tfm);
/*
* crypto_alloc_tfm - Locate algorithm and allocate transform
* @alg_name: Name of algorithm
* @frontend: Frontend algorithm type
* @type: Type of algorithm
* @mask: Mask for type comparison
*
* crypto_alloc_tfm() will first attempt to locate an already loaded
* algorithm. If that fails and the kernel supports dynamically loadable
* modules, it will then attempt to load a module of the same name or
* alias. If that fails it will send a query to any loaded crypto manager
* to construct an algorithm on the fly. A refcount is grabbed on the
* algorithm which is then associated with the new transform.
*
* The returned transform is of a non-determinate type. Most people
* should use one of the more specific allocation functions such as
* crypto_alloc_blkcipher.
*
* In case of error the return value is an error pointer.
*/
struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
const struct crypto_type *frontend,
u32 type, u32 mask)
{
struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
struct crypto_tfm *tfm;
int err;
type &= frontend->maskclear;
mask &= frontend->maskclear;
type |= frontend->type;
mask |= frontend->maskset;
lookup = frontend->lookup ?: crypto_alg_mod_lookup;
for (;;) {
struct crypto_alg *alg;
alg = lookup(alg_name, type, mask);
if (IS_ERR(alg)) {
err = PTR_ERR(alg);
goto err;
}
tfm = crypto_create_tfm(alg, frontend);
if (!IS_ERR(tfm))
return tfm;
crypto_mod_put(alg);
err = PTR_ERR(tfm);
err:
if (err != -EAGAIN)
break;
if (signal_pending(current)) {
err = -EINTR;
break;
}
}
return ERR_PTR(err);
}
EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
/* /*
* crypto_free_tfm - Free crypto transform * crypto_free_tfm - Free crypto transform
...@@ -469,7 +576,7 @@ void crypto_free_tfm(struct crypto_tfm *tfm) ...@@ -469,7 +576,7 @@ void crypto_free_tfm(struct crypto_tfm *tfm)
alg = tfm->__crt_alg; alg = tfm->__crt_alg;
size = sizeof(*tfm) + alg->cra_ctxsize; size = sizeof(*tfm) + alg->cra_ctxsize;
if (alg->cra_exit) if (!tfm->exit && alg->cra_exit)
alg->cra_exit(tfm); alg->cra_exit(tfm);
crypto_exit_ops(tfm); crypto_exit_ops(tfm);
crypto_mod_put(alg); crypto_mod_put(alg);
......
...@@ -11,6 +11,7 @@ ...@@ -11,6 +11,7 @@
*/ */
#include <crypto/aead.h> #include <crypto/aead.h>
#include <crypto/internal/hash.h>
#include <crypto/internal/skcipher.h> #include <crypto/internal/skcipher.h>
#include <crypto/authenc.h> #include <crypto/authenc.h>
#include <crypto/scatterwalk.h> #include <crypto/scatterwalk.h>
...@@ -431,6 +432,8 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb) ...@@ -431,6 +432,8 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize; inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize;
inst->alg.cra_aead.maxauthsize = auth->cra_type == &crypto_hash_type ? inst->alg.cra_aead.maxauthsize = auth->cra_type == &crypto_hash_type ?
auth->cra_hash.digestsize : auth->cra_hash.digestsize :
auth->cra_type ?
__crypto_shash_alg(auth)->digestsize :
auth->cra_digest.dia_digestsize; auth->cra_digest.dia_digestsize;
inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx); inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx);
......
...@@ -35,6 +35,8 @@ ...@@ -35,6 +35,8 @@
#include <linux/init.h> #include <linux/init.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/bitops.h>
#include <asm/unaligned.h>
static const u32 camellia_sp1110[256] = { static const u32 camellia_sp1110[256] = {
0x70707000,0x82828200,0x2c2c2c00,0xececec00, 0x70707000,0x82828200,0x2c2c2c00,0xececec00,
...@@ -335,20 +337,6 @@ static const u32 camellia_sp4404[256] = { ...@@ -335,20 +337,6 @@ static const u32 camellia_sp4404[256] = {
/* /*
* macros * macros
*/ */
#define GETU32(v, pt) \
do { \
/* latest breed of gcc is clever enough to use move */ \
memcpy(&(v), (pt), 4); \
(v) = be32_to_cpu(v); \
} while(0)
/* rotation right shift 1byte */
#define ROR8(x) (((x) >> 8) + ((x) << 24))
/* rotation left shift 1bit */
#define ROL1(x) (((x) << 1) + ((x) >> 31))
/* rotation left shift 1byte */
#define ROL8(x) (((x) << 8) + ((x) >> 24))
#define ROLDQ(ll, lr, rl, rr, w0, w1, bits) \ #define ROLDQ(ll, lr, rl, rr, w0, w1, bits) \
do { \ do { \
w0 = ll; \ w0 = ll; \
...@@ -383,7 +371,7 @@ static const u32 camellia_sp4404[256] = { ...@@ -383,7 +371,7 @@ static const u32 camellia_sp4404[256] = {
^ camellia_sp3033[(u8)(il >> 8)] \ ^ camellia_sp3033[(u8)(il >> 8)] \
^ camellia_sp4404[(u8)(il )]; \ ^ camellia_sp4404[(u8)(il )]; \
yl ^= yr; \ yl ^= yr; \
yr = ROR8(yr); \ yr = ror32(yr, 8); \
yr ^= yl; \ yr ^= yl; \
} while(0) } while(0)
...@@ -405,7 +393,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -405,7 +393,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
subL[7] ^= subL[1]; subR[7] ^= subR[1]; subL[7] ^= subL[1]; subR[7] ^= subR[1];
subL[1] ^= subR[1] & ~subR[9]; subL[1] ^= subR[1] & ~subR[9];
dw = subL[1] & subL[9], dw = subL[1] & subL[9],
subR[1] ^= ROL1(dw); /* modified for FLinv(kl2) */ subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl2) */
/* round 8 */ /* round 8 */
subL[11] ^= subL[1]; subR[11] ^= subR[1]; subL[11] ^= subL[1]; subR[11] ^= subR[1];
/* round 10 */ /* round 10 */
...@@ -414,7 +402,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -414,7 +402,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
subL[15] ^= subL[1]; subR[15] ^= subR[1]; subL[15] ^= subL[1]; subR[15] ^= subR[1];
subL[1] ^= subR[1] & ~subR[17]; subL[1] ^= subR[1] & ~subR[17];
dw = subL[1] & subL[17], dw = subL[1] & subL[17],
subR[1] ^= ROL1(dw); /* modified for FLinv(kl4) */ subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl4) */
/* round 14 */ /* round 14 */
subL[19] ^= subL[1]; subR[19] ^= subR[1]; subL[19] ^= subL[1]; subR[19] ^= subR[1];
/* round 16 */ /* round 16 */
...@@ -430,7 +418,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -430,7 +418,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
} else { } else {
subL[1] ^= subR[1] & ~subR[25]; subL[1] ^= subR[1] & ~subR[25];
dw = subL[1] & subL[25], dw = subL[1] & subL[25],
subR[1] ^= ROL1(dw); /* modified for FLinv(kl6) */ subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl6) */
/* round 20 */ /* round 20 */
subL[27] ^= subL[1]; subR[27] ^= subR[1]; subL[27] ^= subL[1]; subR[27] ^= subR[1];
/* round 22 */ /* round 22 */
...@@ -450,7 +438,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -450,7 +438,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
subL[26] ^= kw4l; subR[26] ^= kw4r; subL[26] ^= kw4l; subR[26] ^= kw4r;
kw4l ^= kw4r & ~subR[24]; kw4l ^= kw4r & ~subR[24];
dw = kw4l & subL[24], dw = kw4l & subL[24],
kw4r ^= ROL1(dw); /* modified for FL(kl5) */ kw4r ^= rol32(dw, 1); /* modified for FL(kl5) */
} }
/* round 17 */ /* round 17 */
subL[22] ^= kw4l; subR[22] ^= kw4r; subL[22] ^= kw4l; subR[22] ^= kw4r;
...@@ -460,7 +448,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -460,7 +448,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
subL[18] ^= kw4l; subR[18] ^= kw4r; subL[18] ^= kw4l; subR[18] ^= kw4r;
kw4l ^= kw4r & ~subR[16]; kw4l ^= kw4r & ~subR[16];
dw = kw4l & subL[16], dw = kw4l & subL[16],
kw4r ^= ROL1(dw); /* modified for FL(kl3) */ kw4r ^= rol32(dw, 1); /* modified for FL(kl3) */
/* round 11 */ /* round 11 */
subL[14] ^= kw4l; subR[14] ^= kw4r; subL[14] ^= kw4l; subR[14] ^= kw4r;
/* round 9 */ /* round 9 */
...@@ -469,7 +457,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -469,7 +457,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
subL[10] ^= kw4l; subR[10] ^= kw4r; subL[10] ^= kw4l; subR[10] ^= kw4r;
kw4l ^= kw4r & ~subR[8]; kw4l ^= kw4r & ~subR[8];
dw = kw4l & subL[8], dw = kw4l & subL[8],
kw4r ^= ROL1(dw); /* modified for FL(kl1) */ kw4r ^= rol32(dw, 1); /* modified for FL(kl1) */
/* round 5 */ /* round 5 */
subL[6] ^= kw4l; subR[6] ^= kw4r; subL[6] ^= kw4l; subR[6] ^= kw4r;
/* round 3 */ /* round 3 */
...@@ -494,7 +482,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -494,7 +482,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_R(6) = subR[5] ^ subR[7]; SUBKEY_R(6) = subR[5] ^ subR[7];
tl = subL[10] ^ (subR[10] & ~subR[8]); tl = subL[10] ^ (subR[10] & ~subR[8]);
dw = tl & subL[8], /* FL(kl1) */ dw = tl & subL[8], /* FL(kl1) */
tr = subR[10] ^ ROL1(dw); tr = subR[10] ^ rol32(dw, 1);
SUBKEY_L(7) = subL[6] ^ tl; /* round 6 */ SUBKEY_L(7) = subL[6] ^ tl; /* round 6 */
SUBKEY_R(7) = subR[6] ^ tr; SUBKEY_R(7) = subR[6] ^ tr;
SUBKEY_L(8) = subL[8]; /* FL(kl1) */ SUBKEY_L(8) = subL[8]; /* FL(kl1) */
...@@ -503,7 +491,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -503,7 +491,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_R(9) = subR[9]; SUBKEY_R(9) = subR[9];
tl = subL[7] ^ (subR[7] & ~subR[9]); tl = subL[7] ^ (subR[7] & ~subR[9]);
dw = tl & subL[9], /* FLinv(kl2) */ dw = tl & subL[9], /* FLinv(kl2) */
tr = subR[7] ^ ROL1(dw); tr = subR[7] ^ rol32(dw, 1);
SUBKEY_L(10) = tl ^ subL[11]; /* round 7 */ SUBKEY_L(10) = tl ^ subL[11]; /* round 7 */
SUBKEY_R(10) = tr ^ subR[11]; SUBKEY_R(10) = tr ^ subR[11];
SUBKEY_L(11) = subL[10] ^ subL[12]; /* round 8 */ SUBKEY_L(11) = subL[10] ^ subL[12]; /* round 8 */
...@@ -516,7 +504,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -516,7 +504,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_R(14) = subR[13] ^ subR[15]; SUBKEY_R(14) = subR[13] ^ subR[15];
tl = subL[18] ^ (subR[18] & ~subR[16]); tl = subL[18] ^ (subR[18] & ~subR[16]);
dw = tl & subL[16], /* FL(kl3) */ dw = tl & subL[16], /* FL(kl3) */
tr = subR[18] ^ ROL1(dw); tr = subR[18] ^ rol32(dw, 1);
SUBKEY_L(15) = subL[14] ^ tl; /* round 12 */ SUBKEY_L(15) = subL[14] ^ tl; /* round 12 */
SUBKEY_R(15) = subR[14] ^ tr; SUBKEY_R(15) = subR[14] ^ tr;
SUBKEY_L(16) = subL[16]; /* FL(kl3) */ SUBKEY_L(16) = subL[16]; /* FL(kl3) */
...@@ -525,7 +513,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -525,7 +513,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_R(17) = subR[17]; SUBKEY_R(17) = subR[17];
tl = subL[15] ^ (subR[15] & ~subR[17]); tl = subL[15] ^ (subR[15] & ~subR[17]);
dw = tl & subL[17], /* FLinv(kl4) */ dw = tl & subL[17], /* FLinv(kl4) */
tr = subR[15] ^ ROL1(dw); tr = subR[15] ^ rol32(dw, 1);
SUBKEY_L(18) = tl ^ subL[19]; /* round 13 */ SUBKEY_L(18) = tl ^ subL[19]; /* round 13 */
SUBKEY_R(18) = tr ^ subR[19]; SUBKEY_R(18) = tr ^ subR[19];
SUBKEY_L(19) = subL[18] ^ subL[20]; /* round 14 */ SUBKEY_L(19) = subL[18] ^ subL[20]; /* round 14 */
...@@ -544,7 +532,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -544,7 +532,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
} else { } else {
tl = subL[26] ^ (subR[26] & ~subR[24]); tl = subL[26] ^ (subR[26] & ~subR[24]);
dw = tl & subL[24], /* FL(kl5) */ dw = tl & subL[24], /* FL(kl5) */
tr = subR[26] ^ ROL1(dw); tr = subR[26] ^ rol32(dw, 1);
SUBKEY_L(23) = subL[22] ^ tl; /* round 18 */ SUBKEY_L(23) = subL[22] ^ tl; /* round 18 */
SUBKEY_R(23) = subR[22] ^ tr; SUBKEY_R(23) = subR[22] ^ tr;
SUBKEY_L(24) = subL[24]; /* FL(kl5) */ SUBKEY_L(24) = subL[24]; /* FL(kl5) */
...@@ -553,7 +541,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -553,7 +541,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
SUBKEY_R(25) = subR[25]; SUBKEY_R(25) = subR[25];
tl = subL[23] ^ (subR[23] & ~subR[25]); tl = subL[23] ^ (subR[23] & ~subR[25]);
dw = tl & subL[25], /* FLinv(kl6) */ dw = tl & subL[25], /* FLinv(kl6) */
tr = subR[23] ^ ROL1(dw); tr = subR[23] ^ rol32(dw, 1);
SUBKEY_L(26) = tl ^ subL[27]; /* round 19 */ SUBKEY_L(26) = tl ^ subL[27]; /* round 19 */
SUBKEY_R(26) = tr ^ subR[27]; SUBKEY_R(26) = tr ^ subR[27];
SUBKEY_L(27) = subL[26] ^ subL[28]; /* round 20 */ SUBKEY_L(27) = subL[26] ^ subL[28]; /* round 20 */
...@@ -573,17 +561,17 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max) ...@@ -573,17 +561,17 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
/* apply the inverse of the last half of P-function */ /* apply the inverse of the last half of P-function */
i = 2; i = 2;
do { do {
dw = SUBKEY_L(i + 0) ^ SUBKEY_R(i + 0); dw = ROL8(dw);/* round 1 */ dw = SUBKEY_L(i + 0) ^ SUBKEY_R(i + 0); dw = rol32(dw, 8);/* round 1 */
SUBKEY_R(i + 0) = SUBKEY_L(i + 0) ^ dw; SUBKEY_L(i + 0) = dw; SUBKEY_R(i + 0) = SUBKEY_L(i + 0) ^ dw; SUBKEY_L(i + 0) = dw;
dw = SUBKEY_L(i + 1) ^ SUBKEY_R(i + 1); dw = ROL8(dw);/* round 2 */ dw = SUBKEY_L(i + 1) ^ SUBKEY_R(i + 1); dw = rol32(dw, 8);/* round 2 */
SUBKEY_R(i + 1) = SUBKEY_L(i + 1) ^ dw; SUBKEY_L(i + 1) = dw; SUBKEY_R(i + 1) = SUBKEY_L(i + 1) ^ dw; SUBKEY_L(i + 1) = dw;
dw = SUBKEY_L(i + 2) ^ SUBKEY_R(i + 2); dw = ROL8(dw);/* round 3 */ dw = SUBKEY_L(i + 2) ^ SUBKEY_R(i + 2); dw = rol32(dw, 8);/* round 3 */
SUBKEY_R(i + 2) = SUBKEY_L(i + 2) ^ dw; SUBKEY_L(i + 2) = dw; SUBKEY_R(i + 2) = SUBKEY_L(i + 2) ^ dw; SUBKEY_L(i + 2) = dw;
dw = SUBKEY_L(i + 3) ^ SUBKEY_R(i + 3); dw = ROL8(dw);/* round 4 */ dw = SUBKEY_L(i + 3) ^ SUBKEY_R(i + 3); dw = rol32(dw, 8);/* round 4 */
SUBKEY_R(i + 3) = SUBKEY_L(i + 3) ^ dw; SUBKEY_L(i + 3) = dw; SUBKEY_R(i + 3) = SUBKEY_L(i + 3) ^ dw; SUBKEY_L(i + 3) = dw;
dw = SUBKEY_L(i + 4) ^ SUBKEY_R(i + 4); dw = ROL8(dw);/* round 5 */ dw = SUBKEY_L(i + 4) ^ SUBKEY_R(i + 4); dw = rol32(dw, 8);/* round 5 */
SUBKEY_R(i + 4) = SUBKEY_L(i + 4) ^ dw; SUBKEY_L(i + 4) = dw; SUBKEY_R(i + 4) = SUBKEY_L(i + 4) ^ dw; SUBKEY_L(i + 4) = dw;
dw = SUBKEY_L(i + 5) ^ SUBKEY_R(i + 5); dw = ROL8(dw);/* round 6 */ dw = SUBKEY_L(i + 5) ^ SUBKEY_R(i + 5); dw = rol32(dw, 8);/* round 6 */
SUBKEY_R(i + 5) = SUBKEY_L(i + 5) ^ dw; SUBKEY_L(i + 5) = dw; SUBKEY_R(i + 5) = SUBKEY_L(i + 5) ^ dw; SUBKEY_L(i + 5) = dw;
i += 8; i += 8;
} while (i < max); } while (i < max);
...@@ -599,10 +587,10 @@ static void camellia_setup128(const unsigned char *key, u32 *subkey) ...@@ -599,10 +587,10 @@ static void camellia_setup128(const unsigned char *key, u32 *subkey)
/** /**
* k == kll || klr || krl || krr (|| is concatenation) * k == kll || klr || krl || krr (|| is concatenation)
*/ */
GETU32(kll, key ); kll = get_unaligned_be32(key);
GETU32(klr, key + 4); klr = get_unaligned_be32(key + 4);
GETU32(krl, key + 8); krl = get_unaligned_be32(key + 8);
GETU32(krr, key + 12); krr = get_unaligned_be32(key + 12);
/* generate KL dependent subkeys */ /* generate KL dependent subkeys */
/* kw1 */ /* kw1 */
...@@ -707,14 +695,14 @@ static void camellia_setup256(const unsigned char *key, u32 *subkey) ...@@ -707,14 +695,14 @@ static void camellia_setup256(const unsigned char *key, u32 *subkey)
* key = (kll || klr || krl || krr || krll || krlr || krrl || krrr) * key = (kll || klr || krl || krr || krll || krlr || krrl || krrr)
* (|| is concatenation) * (|| is concatenation)
*/ */
GETU32(kll, key ); kll = get_unaligned_be32(key);
GETU32(klr, key + 4); klr = get_unaligned_be32(key + 4);
GETU32(krl, key + 8); krl = get_unaligned_be32(key + 8);
GETU32(krr, key + 12); krr = get_unaligned_be32(key + 12);
GETU32(krll, key + 16); krll = get_unaligned_be32(key + 16);
GETU32(krlr, key + 20); krlr = get_unaligned_be32(key + 20);
GETU32(krrl, key + 24); krrl = get_unaligned_be32(key + 24);
GETU32(krrr, key + 28); krrr = get_unaligned_be32(key + 28);
/* generate KL dependent subkeys */ /* generate KL dependent subkeys */
/* kw1 */ /* kw1 */
...@@ -870,13 +858,13 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey) ...@@ -870,13 +858,13 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey)
t0 &= ll; \ t0 &= ll; \
t2 |= rr; \ t2 |= rr; \
rl ^= t2; \ rl ^= t2; \
lr ^= ROL1(t0); \ lr ^= rol32(t0, 1); \
t3 = krl; \ t3 = krl; \
t1 = klr; \ t1 = klr; \
t3 &= rl; \ t3 &= rl; \
t1 |= lr; \ t1 |= lr; \
ll ^= t1; \ ll ^= t1; \
rr ^= ROL1(t3); \ rr ^= rol32(t3, 1); \
} while(0) } while(0)
#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir) \ #define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir) \
...@@ -892,7 +880,7 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey) ...@@ -892,7 +880,7 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey)
il ^= kl; \ il ^= kl; \
ir ^= il ^ kr; \ ir ^= il ^ kr; \
yl ^= ir; \ yl ^= ir; \
yr ^= ROR8(il) ^ ir; \ yr ^= ror32(il, 8) ^ ir; \
} while(0) } while(0)
/* max = 24: 128bit encrypt, max = 32: 256bit encrypt */ /* max = 24: 128bit encrypt, max = 32: 256bit encrypt */
......
This diff is collapsed.
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
* *
*/ */
#include <crypto/internal/hash.h>
#include <crypto/internal/skcipher.h> #include <crypto/internal/skcipher.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
...@@ -38,15 +39,31 @@ static int null_compress(struct crypto_tfm *tfm, const u8 *src, ...@@ -38,15 +39,31 @@ static int null_compress(struct crypto_tfm *tfm, const u8 *src,
return 0; return 0;
} }
static void null_init(struct crypto_tfm *tfm) static int null_init(struct shash_desc *desc)
{ } {
return 0;
}
static void null_update(struct crypto_tfm *tfm, const u8 *data, static int null_update(struct shash_desc *desc, const u8 *data,
unsigned int len) unsigned int len)
{ } {
return 0;
}
static void null_final(struct crypto_tfm *tfm, u8 *out) static int null_final(struct shash_desc *desc, u8 *out)
{ } {
return 0;
}
static int null_digest(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{
return 0;
}
static int null_hash_setkey(struct crypto_shash *tfm, const u8 *key,
unsigned int keylen)
{ return 0; }
static int null_setkey(struct crypto_tfm *tfm, const u8 *key, static int null_setkey(struct crypto_tfm *tfm, const u8 *key,
unsigned int keylen) unsigned int keylen)
...@@ -89,19 +106,20 @@ static struct crypto_alg compress_null = { ...@@ -89,19 +106,20 @@ static struct crypto_alg compress_null = {
.coa_decompress = null_compress } } .coa_decompress = null_compress } }
}; };
static struct crypto_alg digest_null = { static struct shash_alg digest_null = {
.cra_name = "digest_null", .digestsize = NULL_DIGEST_SIZE,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .setkey = null_hash_setkey,
.cra_blocksize = NULL_BLOCK_SIZE, .init = null_init,
.cra_ctxsize = 0, .update = null_update,
.cra_module = THIS_MODULE, .finup = null_digest,
.cra_list = LIST_HEAD_INIT(digest_null.cra_list), .digest = null_digest,
.cra_u = { .digest = { .final = null_final,
.dia_digestsize = NULL_DIGEST_SIZE, .base = {
.dia_setkey = null_setkey, .cra_name = "digest_null",
.dia_init = null_init, .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_update = null_update, .cra_blocksize = NULL_BLOCK_SIZE,
.dia_final = null_final } } .cra_module = THIS_MODULE,
}
}; };
static struct crypto_alg cipher_null = { static struct crypto_alg cipher_null = {
...@@ -154,7 +172,7 @@ static int __init crypto_null_mod_init(void) ...@@ -154,7 +172,7 @@ static int __init crypto_null_mod_init(void)
if (ret < 0) if (ret < 0)
goto out_unregister_cipher; goto out_unregister_cipher;
ret = crypto_register_alg(&digest_null); ret = crypto_register_shash(&digest_null);
if (ret < 0) if (ret < 0)
goto out_unregister_skcipher; goto out_unregister_skcipher;
...@@ -166,7 +184,7 @@ static int __init crypto_null_mod_init(void) ...@@ -166,7 +184,7 @@ static int __init crypto_null_mod_init(void)
return ret; return ret;
out_unregister_digest: out_unregister_digest:
crypto_unregister_alg(&digest_null); crypto_unregister_shash(&digest_null);
out_unregister_skcipher: out_unregister_skcipher:
crypto_unregister_alg(&skcipher_null); crypto_unregister_alg(&skcipher_null);
out_unregister_cipher: out_unregister_cipher:
...@@ -177,7 +195,7 @@ static int __init crypto_null_mod_init(void) ...@@ -177,7 +195,7 @@ static int __init crypto_null_mod_init(void)
static void __exit crypto_null_mod_fini(void) static void __exit crypto_null_mod_fini(void)
{ {
crypto_unregister_alg(&compress_null); crypto_unregister_alg(&compress_null);
crypto_unregister_alg(&digest_null); crypto_unregister_shash(&digest_null);
crypto_unregister_alg(&skcipher_null); crypto_unregister_alg(&skcipher_null);
crypto_unregister_alg(&cipher_null); crypto_unregister_alg(&cipher_null);
} }
......
...@@ -868,9 +868,10 @@ static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key, ...@@ -868,9 +868,10 @@ static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key,
u32 *flags = &tfm->crt_flags; u32 *flags = &tfm->crt_flags;
if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) || if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
!((K[2] ^ K[4]) | (K[3] ^ K[5])))) !((K[2] ^ K[4]) | (K[3] ^ K[5]))) &&
(*flags & CRYPTO_TFM_REQ_WEAK_KEY))
{ {
*flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED; *flags |= CRYPTO_TFM_RES_WEAK_KEY;
return -EINVAL; return -EINVAL;
} }
......
...@@ -73,7 +73,7 @@ do { \ ...@@ -73,7 +73,7 @@ do { \
* /afs/transarc.com/public/afsps/afs.rel31b.export-src/rxkad/sboxes.h * /afs/transarc.com/public/afsps/afs.rel31b.export-src/rxkad/sboxes.h
*/ */
#undef Z #undef Z
#define Z(x) __constant_cpu_to_be32(x << 3) #define Z(x) cpu_to_be32(x << 3)
static const __be32 sbox0[256] = { static const __be32 sbox0[256] = {
Z(0xea), Z(0x7f), Z(0xb2), Z(0x64), Z(0x9d), Z(0xb0), Z(0xd9), Z(0x11), Z(0xea), Z(0x7f), Z(0xb2), Z(0x64), Z(0x9d), Z(0xb0), Z(0xd9), Z(0x11),
Z(0xcd), Z(0x86), Z(0x86), Z(0x91), Z(0x0a), Z(0xb2), Z(0x93), Z(0x06), Z(0xcd), Z(0x86), Z(0x86), Z(0x91), Z(0x0a), Z(0xb2), Z(0x93), Z(0x06),
...@@ -110,7 +110,7 @@ static const __be32 sbox0[256] = { ...@@ -110,7 +110,7 @@ static const __be32 sbox0[256] = {
}; };
#undef Z #undef Z
#define Z(x) __constant_cpu_to_be32((x << 27) | (x >> 5)) #define Z(x) cpu_to_be32((x << 27) | (x >> 5))
static const __be32 sbox1[256] = { static const __be32 sbox1[256] = {
Z(0x77), Z(0x14), Z(0xa6), Z(0xfe), Z(0xb2), Z(0x5e), Z(0x8c), Z(0x3e), Z(0x77), Z(0x14), Z(0xa6), Z(0xfe), Z(0xb2), Z(0x5e), Z(0x8c), Z(0x3e),
Z(0x67), Z(0x6c), Z(0xa1), Z(0x0d), Z(0xc2), Z(0xa2), Z(0xc1), Z(0x85), Z(0x67), Z(0x6c), Z(0xa1), Z(0x0d), Z(0xc2), Z(0xa2), Z(0xc1), Z(0x85),
...@@ -147,7 +147,7 @@ static const __be32 sbox1[256] = { ...@@ -147,7 +147,7 @@ static const __be32 sbox1[256] = {
}; };
#undef Z #undef Z
#define Z(x) __constant_cpu_to_be32(x << 11) #define Z(x) cpu_to_be32(x << 11)
static const __be32 sbox2[256] = { static const __be32 sbox2[256] = {
Z(0xf0), Z(0x37), Z(0x24), Z(0x53), Z(0x2a), Z(0x03), Z(0x83), Z(0x86), Z(0xf0), Z(0x37), Z(0x24), Z(0x53), Z(0x2a), Z(0x03), Z(0x83), Z(0x86),
Z(0xd1), Z(0xec), Z(0x50), Z(0xf0), Z(0x42), Z(0x78), Z(0x2f), Z(0x6d), Z(0xd1), Z(0xec), Z(0x50), Z(0xf0), Z(0x42), Z(0x78), Z(0x2f), Z(0x6d),
...@@ -184,7 +184,7 @@ static const __be32 sbox2[256] = { ...@@ -184,7 +184,7 @@ static const __be32 sbox2[256] = {
}; };
#undef Z #undef Z
#define Z(x) __constant_cpu_to_be32(x << 19) #define Z(x) cpu_to_be32(x << 19)
static const __be32 sbox3[256] = { static const __be32 sbox3[256] = {
Z(0xa9), Z(0x2a), Z(0x48), Z(0x51), Z(0x84), Z(0x7e), Z(0x49), Z(0xe2), Z(0xa9), Z(0x2a), Z(0x48), Z(0x51), Z(0x84), Z(0x7e), Z(0x49), Z(0xe2),
Z(0xb5), Z(0xb7), Z(0x42), Z(0x33), Z(0x7d), Z(0x5d), Z(0xa6), Z(0x12), Z(0xb5), Z(0xb7), Z(0x42), Z(0x33), Z(0x7d), Z(0x5d), Z(0xa6), Z(0x12),
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
* *
*/ */
#include <crypto/algapi.h> #include <crypto/internal/hash.h>
#include <crypto/scatterwalk.h> #include <crypto/scatterwalk.h>
#include <linux/err.h> #include <linux/err.h>
#include <linux/init.h> #include <linux/init.h>
...@@ -238,9 +238,11 @@ static struct crypto_instance *hmac_alloc(struct rtattr **tb) ...@@ -238,9 +238,11 @@ static struct crypto_instance *hmac_alloc(struct rtattr **tb)
return ERR_CAST(alg); return ERR_CAST(alg);
inst = ERR_PTR(-EINVAL); inst = ERR_PTR(-EINVAL);
ds = (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == ds = alg->cra_type == &crypto_hash_type ?
CRYPTO_ALG_TYPE_HASH ? alg->cra_hash.digestsize : alg->cra_hash.digestsize :
alg->cra_digest.dia_digestsize; alg->cra_type ?
__crypto_shash_alg(alg)->digestsize :
alg->cra_digest.dia_digestsize;
if (ds > alg->cra_blocksize) if (ds > alg->cra_blocksize)
goto out_put_alg; goto out_put_alg;
......
...@@ -109,6 +109,8 @@ void crypto_alg_tested(const char *name, int err); ...@@ -109,6 +109,8 @@ void crypto_alg_tested(const char *name, int err);
void crypto_shoot_alg(struct crypto_alg *alg); void crypto_shoot_alg(struct crypto_alg *alg);
struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
u32 mask); u32 mask);
struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
const struct crypto_type *frontend);
int crypto_register_instance(struct crypto_template *tmpl, int crypto_register_instance(struct crypto_template *tmpl,
struct crypto_instance *inst); struct crypto_instance *inst);
......
...@@ -20,8 +20,8 @@ ...@@ -20,8 +20,8 @@
* (at your option) any later version. * (at your option) any later version.
* *
*/ */
#include <crypto/internal/hash.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/crypto.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/string.h> #include <linux/string.h>
#include <linux/types.h> #include <linux/types.h>
...@@ -58,7 +58,7 @@ static inline u32 H(u32 x, u32 y, u32 z) ...@@ -58,7 +58,7 @@ static inline u32 H(u32 x, u32 y, u32 z)
{ {
return x ^ y ^ z; return x ^ y ^ z;
} }
#define ROUND1(a,b,c,d,k,s) (a = lshift(a + F(b,c,d) + k, s)) #define ROUND1(a,b,c,d,k,s) (a = lshift(a + F(b,c,d) + k, s))
#define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (u32)0x5A827999,s)) #define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (u32)0x5A827999,s))
#define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s)) #define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s))
...@@ -148,24 +148,26 @@ static void md4_transform(u32 *hash, u32 const *in) ...@@ -148,24 +148,26 @@ static void md4_transform(u32 *hash, u32 const *in)
static inline void md4_transform_helper(struct md4_ctx *ctx) static inline void md4_transform_helper(struct md4_ctx *ctx)
{ {
le32_to_cpu_array(ctx->block, sizeof(ctx->block) / sizeof(u32)); le32_to_cpu_array(ctx->block, ARRAY_SIZE(ctx->block));
md4_transform(ctx->hash, ctx->block); md4_transform(ctx->hash, ctx->block);
} }
static void md4_init(struct crypto_tfm *tfm) static int md4_init(struct shash_desc *desc)
{ {
struct md4_ctx *mctx = crypto_tfm_ctx(tfm); struct md4_ctx *mctx = shash_desc_ctx(desc);
mctx->hash[0] = 0x67452301; mctx->hash[0] = 0x67452301;
mctx->hash[1] = 0xefcdab89; mctx->hash[1] = 0xefcdab89;
mctx->hash[2] = 0x98badcfe; mctx->hash[2] = 0x98badcfe;
mctx->hash[3] = 0x10325476; mctx->hash[3] = 0x10325476;
mctx->byte_count = 0; mctx->byte_count = 0;
return 0;
} }
static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) static int md4_update(struct shash_desc *desc, const u8 *data, unsigned int len)
{ {
struct md4_ctx *mctx = crypto_tfm_ctx(tfm); struct md4_ctx *mctx = shash_desc_ctx(desc);
const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f);
mctx->byte_count += len; mctx->byte_count += len;
...@@ -173,7 +175,7 @@ static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) ...@@ -173,7 +175,7 @@ static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
if (avail > len) { if (avail > len) {
memcpy((char *)mctx->block + (sizeof(mctx->block) - avail), memcpy((char *)mctx->block + (sizeof(mctx->block) - avail),
data, len); data, len);
return; return 0;
} }
memcpy((char *)mctx->block + (sizeof(mctx->block) - avail), memcpy((char *)mctx->block + (sizeof(mctx->block) - avail),
...@@ -191,11 +193,13 @@ static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) ...@@ -191,11 +193,13 @@ static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
} }
memcpy(mctx->block, data, len); memcpy(mctx->block, data, len);
return 0;
} }
static void md4_final(struct crypto_tfm *tfm, u8 *out) static int md4_final(struct shash_desc *desc, u8 *out)
{ {
struct md4_ctx *mctx = crypto_tfm_ctx(tfm); struct md4_ctx *mctx = shash_desc_ctx(desc);
const unsigned int offset = mctx->byte_count & 0x3f; const unsigned int offset = mctx->byte_count & 0x3f;
char *p = (char *)mctx->block + offset; char *p = (char *)mctx->block + offset;
int padding = 56 - (offset + 1); int padding = 56 - (offset + 1);
...@@ -214,33 +218,35 @@ static void md4_final(struct crypto_tfm *tfm, u8 *out) ...@@ -214,33 +218,35 @@ static void md4_final(struct crypto_tfm *tfm, u8 *out)
le32_to_cpu_array(mctx->block, (sizeof(mctx->block) - le32_to_cpu_array(mctx->block, (sizeof(mctx->block) -
sizeof(u64)) / sizeof(u32)); sizeof(u64)) / sizeof(u32));
md4_transform(mctx->hash, mctx->block); md4_transform(mctx->hash, mctx->block);
cpu_to_le32_array(mctx->hash, sizeof(mctx->hash) / sizeof(u32)); cpu_to_le32_array(mctx->hash, ARRAY_SIZE(mctx->hash));
memcpy(out, mctx->hash, sizeof(mctx->hash)); memcpy(out, mctx->hash, sizeof(mctx->hash));
memset(mctx, 0, sizeof(*mctx)); memset(mctx, 0, sizeof(*mctx));
return 0;
} }
static struct crypto_alg alg = { static struct shash_alg alg = {
.cra_name = "md4", .digestsize = MD4_DIGEST_SIZE,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .init = md4_init,
.cra_blocksize = MD4_HMAC_BLOCK_SIZE, .update = md4_update,
.cra_ctxsize = sizeof(struct md4_ctx), .final = md4_final,
.cra_module = THIS_MODULE, .descsize = sizeof(struct md4_ctx),
.cra_list = LIST_HEAD_INIT(alg.cra_list), .base = {
.cra_u = { .digest = { .cra_name = "md4",
.dia_digestsize = MD4_DIGEST_SIZE, .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_init = md4_init, .cra_blocksize = MD4_HMAC_BLOCK_SIZE,
.dia_update = md4_update, .cra_module = THIS_MODULE,
.dia_final = md4_final } } }
}; };
static int __init md4_mod_init(void) static int __init md4_mod_init(void)
{ {
return crypto_register_alg(&alg); return crypto_register_shash(&alg);
} }
static void __exit md4_mod_fini(void) static void __exit md4_mod_fini(void)
{ {
crypto_unregister_alg(&alg); crypto_unregister_shash(&alg);
} }
module_init(md4_mod_init); module_init(md4_mod_init);
......
...@@ -15,10 +15,10 @@ ...@@ -15,10 +15,10 @@
* any later version. * any later version.
* *
*/ */
#include <crypto/internal/hash.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/string.h> #include <linux/string.h>
#include <linux/crypto.h>
#include <linux/types.h> #include <linux/types.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
...@@ -147,20 +147,22 @@ static inline void md5_transform_helper(struct md5_ctx *ctx) ...@@ -147,20 +147,22 @@ static inline void md5_transform_helper(struct md5_ctx *ctx)
md5_transform(ctx->hash, ctx->block); md5_transform(ctx->hash, ctx->block);
} }
static void md5_init(struct crypto_tfm *tfm) static int md5_init(struct shash_desc *desc)
{ {
struct md5_ctx *mctx = crypto_tfm_ctx(tfm); struct md5_ctx *mctx = shash_desc_ctx(desc);
mctx->hash[0] = 0x67452301; mctx->hash[0] = 0x67452301;
mctx->hash[1] = 0xefcdab89; mctx->hash[1] = 0xefcdab89;
mctx->hash[2] = 0x98badcfe; mctx->hash[2] = 0x98badcfe;
mctx->hash[3] = 0x10325476; mctx->hash[3] = 0x10325476;
mctx->byte_count = 0; mctx->byte_count = 0;
return 0;
} }
static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) static int md5_update(struct shash_desc *desc, const u8 *data, unsigned int len)
{ {
struct md5_ctx *mctx = crypto_tfm_ctx(tfm); struct md5_ctx *mctx = shash_desc_ctx(desc);
const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f);
mctx->byte_count += len; mctx->byte_count += len;
...@@ -168,7 +170,7 @@ static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) ...@@ -168,7 +170,7 @@ static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
if (avail > len) { if (avail > len) {
memcpy((char *)mctx->block + (sizeof(mctx->block) - avail), memcpy((char *)mctx->block + (sizeof(mctx->block) - avail),
data, len); data, len);
return; return 0;
} }
memcpy((char *)mctx->block + (sizeof(mctx->block) - avail), memcpy((char *)mctx->block + (sizeof(mctx->block) - avail),
...@@ -186,11 +188,13 @@ static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) ...@@ -186,11 +188,13 @@ static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
} }
memcpy(mctx->block, data, len); memcpy(mctx->block, data, len);
return 0;
} }
static void md5_final(struct crypto_tfm *tfm, u8 *out) static int md5_final(struct shash_desc *desc, u8 *out)
{ {
struct md5_ctx *mctx = crypto_tfm_ctx(tfm); struct md5_ctx *mctx = shash_desc_ctx(desc);
const unsigned int offset = mctx->byte_count & 0x3f; const unsigned int offset = mctx->byte_count & 0x3f;
char *p = (char *)mctx->block + offset; char *p = (char *)mctx->block + offset;
int padding = 56 - (offset + 1); int padding = 56 - (offset + 1);
...@@ -212,30 +216,32 @@ static void md5_final(struct crypto_tfm *tfm, u8 *out) ...@@ -212,30 +216,32 @@ static void md5_final(struct crypto_tfm *tfm, u8 *out)
cpu_to_le32_array(mctx->hash, sizeof(mctx->hash) / sizeof(u32)); cpu_to_le32_array(mctx->hash, sizeof(mctx->hash) / sizeof(u32));
memcpy(out, mctx->hash, sizeof(mctx->hash)); memcpy(out, mctx->hash, sizeof(mctx->hash));
memset(mctx, 0, sizeof(*mctx)); memset(mctx, 0, sizeof(*mctx));
return 0;
} }
static struct crypto_alg alg = { static struct shash_alg alg = {
.cra_name = "md5", .digestsize = MD5_DIGEST_SIZE,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .init = md5_init,
.cra_blocksize = MD5_HMAC_BLOCK_SIZE, .update = md5_update,
.cra_ctxsize = sizeof(struct md5_ctx), .final = md5_final,
.cra_module = THIS_MODULE, .descsize = sizeof(struct md5_ctx),
.cra_list = LIST_HEAD_INIT(alg.cra_list), .base = {
.cra_u = { .digest = { .cra_name = "md5",
.dia_digestsize = MD5_DIGEST_SIZE, .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_init = md5_init, .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
.dia_update = md5_update, .cra_module = THIS_MODULE,
.dia_final = md5_final } } }
}; };
static int __init md5_mod_init(void) static int __init md5_mod_init(void)
{ {
return crypto_register_alg(&alg); return crypto_register_shash(&alg);
} }
static void __exit md5_mod_fini(void) static void __exit md5_mod_fini(void)
{ {
crypto_unregister_alg(&alg); crypto_unregister_shash(&alg);
} }
module_init(md5_mod_init); module_init(md5_mod_init);
......
...@@ -9,23 +9,25 @@ ...@@ -9,23 +9,25 @@
* it under the terms of the GNU General Public License version 2 as * it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation. * published by the Free Software Foundation.
*/ */
#include <crypto/internal/hash.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/string.h> #include <linux/string.h>
#include <linux/crypto.h>
#include <linux/types.h> #include <linux/types.h>
struct michael_mic_ctx { struct michael_mic_ctx {
u32 l, r;
};
struct michael_mic_desc_ctx {
u8 pending[4]; u8 pending[4];
size_t pending_len; size_t pending_len;
u32 l, r; u32 l, r;
}; };
static inline u32 xswap(u32 val) static inline u32 xswap(u32 val)
{ {
return ((val & 0x00ff00ff) << 8) | ((val & 0xff00ff00) >> 8); return ((val & 0x00ff00ff) << 8) | ((val & 0xff00ff00) >> 8);
...@@ -45,17 +47,22 @@ do { \ ...@@ -45,17 +47,22 @@ do { \
} while (0) } while (0)
static void michael_init(struct crypto_tfm *tfm) static int michael_init(struct shash_desc *desc)
{ {
struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); struct michael_mic_desc_ctx *mctx = shash_desc_ctx(desc);
struct michael_mic_ctx *ctx = crypto_shash_ctx(desc->tfm);
mctx->pending_len = 0; mctx->pending_len = 0;
mctx->l = ctx->l;
mctx->r = ctx->r;
return 0;
} }
static void michael_update(struct crypto_tfm *tfm, const u8 *data, static int michael_update(struct shash_desc *desc, const u8 *data,
unsigned int len) unsigned int len)
{ {
struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); struct michael_mic_desc_ctx *mctx = shash_desc_ctx(desc);
const __le32 *src; const __le32 *src;
if (mctx->pending_len) { if (mctx->pending_len) {
...@@ -68,7 +75,7 @@ static void michael_update(struct crypto_tfm *tfm, const u8 *data, ...@@ -68,7 +75,7 @@ static void michael_update(struct crypto_tfm *tfm, const u8 *data,
len -= flen; len -= flen;
if (mctx->pending_len < 4) if (mctx->pending_len < 4)
return; return 0;
src = (const __le32 *)mctx->pending; src = (const __le32 *)mctx->pending;
mctx->l ^= le32_to_cpup(src); mctx->l ^= le32_to_cpup(src);
...@@ -88,12 +95,14 @@ static void michael_update(struct crypto_tfm *tfm, const u8 *data, ...@@ -88,12 +95,14 @@ static void michael_update(struct crypto_tfm *tfm, const u8 *data,
mctx->pending_len = len; mctx->pending_len = len;
memcpy(mctx->pending, src, len); memcpy(mctx->pending, src, len);
} }
return 0;
} }
static void michael_final(struct crypto_tfm *tfm, u8 *out) static int michael_final(struct shash_desc *desc, u8 *out)
{ {
struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); struct michael_mic_desc_ctx *mctx = shash_desc_ctx(desc);
u8 *data = mctx->pending; u8 *data = mctx->pending;
__le32 *dst = (__le32 *)out; __le32 *dst = (__le32 *)out;
...@@ -119,17 +128,20 @@ static void michael_final(struct crypto_tfm *tfm, u8 *out) ...@@ -119,17 +128,20 @@ static void michael_final(struct crypto_tfm *tfm, u8 *out)
dst[0] = cpu_to_le32(mctx->l); dst[0] = cpu_to_le32(mctx->l);
dst[1] = cpu_to_le32(mctx->r); dst[1] = cpu_to_le32(mctx->r);
return 0;
} }
static int michael_setkey(struct crypto_tfm *tfm, const u8 *key, static int michael_setkey(struct crypto_shash *tfm, const u8 *key,
unsigned int keylen) unsigned int keylen)
{ {
struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); struct michael_mic_ctx *mctx = crypto_shash_ctx(tfm);
const __le32 *data = (const __le32 *)key; const __le32 *data = (const __le32 *)key;
if (keylen != 8) { if (keylen != 8) {
tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
return -EINVAL; return -EINVAL;
} }
...@@ -138,33 +150,31 @@ static int michael_setkey(struct crypto_tfm *tfm, const u8 *key, ...@@ -138,33 +150,31 @@ static int michael_setkey(struct crypto_tfm *tfm, const u8 *key,
return 0; return 0;
} }
static struct shash_alg alg = {
static struct crypto_alg michael_mic_alg = { .digestsize = 8,
.cra_name = "michael_mic", .setkey = michael_setkey,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .init = michael_init,
.cra_blocksize = 8, .update = michael_update,
.cra_ctxsize = sizeof(struct michael_mic_ctx), .final = michael_final,
.cra_module = THIS_MODULE, .descsize = sizeof(struct michael_mic_desc_ctx),
.cra_alignmask = 3, .base = {
.cra_list = LIST_HEAD_INIT(michael_mic_alg.cra_list), .cra_name = "michael_mic",
.cra_u = { .digest = { .cra_blocksize = 8,
.dia_digestsize = 8, .cra_alignmask = 3,
.dia_init = michael_init, .cra_ctxsize = sizeof(struct michael_mic_ctx),
.dia_update = michael_update, .cra_module = THIS_MODULE,
.dia_final = michael_final, }
.dia_setkey = michael_setkey } }
}; };
static int __init michael_mic_init(void) static int __init michael_mic_init(void)
{ {
return crypto_register_alg(&michael_mic_alg); return crypto_register_shash(&alg);
} }
static void __exit michael_mic_exit(void) static void __exit michael_mic_exit(void)
{ {
crypto_unregister_alg(&michael_mic_alg); crypto_unregister_shash(&alg);
} }
......
...@@ -94,6 +94,17 @@ static int c_show(struct seq_file *m, void *p) ...@@ -94,6 +94,17 @@ static int c_show(struct seq_file *m, void *p)
seq_printf(m, "selftest : %s\n", seq_printf(m, "selftest : %s\n",
(alg->cra_flags & CRYPTO_ALG_TESTED) ? (alg->cra_flags & CRYPTO_ALG_TESTED) ?
"passed" : "unknown"); "passed" : "unknown");
if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
seq_printf(m, "type : larval\n");
seq_printf(m, "flags : 0x%x\n", alg->cra_flags);
goto out;
}
if (alg->cra_type && alg->cra_type->show) {
alg->cra_type->show(m, alg);
goto out;
}
switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) { switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) {
case CRYPTO_ALG_TYPE_CIPHER: case CRYPTO_ALG_TYPE_CIPHER:
...@@ -115,16 +126,11 @@ static int c_show(struct seq_file *m, void *p) ...@@ -115,16 +126,11 @@ static int c_show(struct seq_file *m, void *p)
seq_printf(m, "type : compression\n"); seq_printf(m, "type : compression\n");
break; break;
default: default:
if (alg->cra_flags & CRYPTO_ALG_LARVAL) { seq_printf(m, "type : unknown\n");
seq_printf(m, "type : larval\n");
seq_printf(m, "flags : 0x%x\n", alg->cra_flags);
} else if (alg->cra_type && alg->cra_type->show)
alg->cra_type->show(m, alg);
else
seq_printf(m, "type : unknown\n");
break; break;
} }
out:
seq_putc(m, '\n'); seq_putc(m, '\n');
return 0; return 0;
} }
......
...@@ -13,11 +13,10 @@ ...@@ -13,11 +13,10 @@
* any later version. * any later version.
* *
*/ */
#include <crypto/internal/hash.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/mm.h> #include <linux/mm.h>
#include <linux/crypto.h>
#include <linux/cryptohash.h>
#include <linux/types.h> #include <linux/types.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
...@@ -218,9 +217,9 @@ static void rmd128_transform(u32 *state, const __le32 *in) ...@@ -218,9 +217,9 @@ static void rmd128_transform(u32 *state, const __le32 *in)
return; return;
} }
static void rmd128_init(struct crypto_tfm *tfm) static int rmd128_init(struct shash_desc *desc)
{ {
struct rmd128_ctx *rctx = crypto_tfm_ctx(tfm); struct rmd128_ctx *rctx = shash_desc_ctx(desc);
rctx->byte_count = 0; rctx->byte_count = 0;
...@@ -230,12 +229,14 @@ static void rmd128_init(struct crypto_tfm *tfm) ...@@ -230,12 +229,14 @@ static void rmd128_init(struct crypto_tfm *tfm)
rctx->state[3] = RMD_H3; rctx->state[3] = RMD_H3;
memset(rctx->buffer, 0, sizeof(rctx->buffer)); memset(rctx->buffer, 0, sizeof(rctx->buffer));
return 0;
} }
static void rmd128_update(struct crypto_tfm *tfm, const u8 *data, static int rmd128_update(struct shash_desc *desc, const u8 *data,
unsigned int len) unsigned int len)
{ {
struct rmd128_ctx *rctx = crypto_tfm_ctx(tfm); struct rmd128_ctx *rctx = shash_desc_ctx(desc);
const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f);
rctx->byte_count += len; rctx->byte_count += len;
...@@ -244,7 +245,7 @@ static void rmd128_update(struct crypto_tfm *tfm, const u8 *data, ...@@ -244,7 +245,7 @@ static void rmd128_update(struct crypto_tfm *tfm, const u8 *data,
if (avail > len) { if (avail > len) {
memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
data, len); data, len);
return; goto out;
} }
memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
...@@ -262,12 +263,15 @@ static void rmd128_update(struct crypto_tfm *tfm, const u8 *data, ...@@ -262,12 +263,15 @@ static void rmd128_update(struct crypto_tfm *tfm, const u8 *data,
} }
memcpy(rctx->buffer, data, len); memcpy(rctx->buffer, data, len);
out:
return 0;
} }
/* Add padding and return the message digest. */ /* Add padding and return the message digest. */
static void rmd128_final(struct crypto_tfm *tfm, u8 *out) static int rmd128_final(struct shash_desc *desc, u8 *out)
{ {
struct rmd128_ctx *rctx = crypto_tfm_ctx(tfm); struct rmd128_ctx *rctx = shash_desc_ctx(desc);
u32 i, index, padlen; u32 i, index, padlen;
__le64 bits; __le64 bits;
__le32 *dst = (__le32 *)out; __le32 *dst = (__le32 *)out;
...@@ -278,10 +282,10 @@ static void rmd128_final(struct crypto_tfm *tfm, u8 *out) ...@@ -278,10 +282,10 @@ static void rmd128_final(struct crypto_tfm *tfm, u8 *out)
/* Pad out to 56 mod 64 */ /* Pad out to 56 mod 64 */
index = rctx->byte_count & 0x3f; index = rctx->byte_count & 0x3f;
padlen = (index < 56) ? (56 - index) : ((64+56) - index); padlen = (index < 56) ? (56 - index) : ((64+56) - index);
rmd128_update(tfm, padding, padlen); rmd128_update(desc, padding, padlen);
/* Append length */ /* Append length */
rmd128_update(tfm, (const u8 *)&bits, sizeof(bits)); rmd128_update(desc, (const u8 *)&bits, sizeof(bits));
/* Store state in digest */ /* Store state in digest */
for (i = 0; i < 4; i++) for (i = 0; i < 4; i++)
...@@ -289,31 +293,32 @@ static void rmd128_final(struct crypto_tfm *tfm, u8 *out) ...@@ -289,31 +293,32 @@ static void rmd128_final(struct crypto_tfm *tfm, u8 *out)
/* Wipe context */ /* Wipe context */
memset(rctx, 0, sizeof(*rctx)); memset(rctx, 0, sizeof(*rctx));
return 0;
} }
static struct crypto_alg alg = { static struct shash_alg alg = {
.cra_name = "rmd128", .digestsize = RMD128_DIGEST_SIZE,
.cra_driver_name = "rmd128", .init = rmd128_init,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .update = rmd128_update,
.cra_blocksize = RMD128_BLOCK_SIZE, .final = rmd128_final,
.cra_ctxsize = sizeof(struct rmd128_ctx), .descsize = sizeof(struct rmd128_ctx),
.cra_module = THIS_MODULE, .base = {
.cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_name = "rmd128",
.cra_u = { .digest = { .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_digestsize = RMD128_DIGEST_SIZE, .cra_blocksize = RMD128_BLOCK_SIZE,
.dia_init = rmd128_init, .cra_module = THIS_MODULE,
.dia_update = rmd128_update, }
.dia_final = rmd128_final } }
}; };
static int __init rmd128_mod_init(void) static int __init rmd128_mod_init(void)
{ {
return crypto_register_alg(&alg); return crypto_register_shash(&alg);
} }
static void __exit rmd128_mod_fini(void) static void __exit rmd128_mod_fini(void)
{ {
crypto_unregister_alg(&alg); crypto_unregister_shash(&alg);
} }
module_init(rmd128_mod_init); module_init(rmd128_mod_init);
...@@ -321,5 +326,3 @@ module_exit(rmd128_mod_fini); ...@@ -321,5 +326,3 @@ module_exit(rmd128_mod_fini);
MODULE_LICENSE("GPL"); MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("RIPEMD-128 Message Digest"); MODULE_DESCRIPTION("RIPEMD-128 Message Digest");
MODULE_ALIAS("rmd128");
...@@ -13,11 +13,10 @@ ...@@ -13,11 +13,10 @@
* any later version. * any later version.
* *
*/ */
#include <crypto/internal/hash.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/mm.h> #include <linux/mm.h>
#include <linux/crypto.h>
#include <linux/cryptohash.h>
#include <linux/types.h> #include <linux/types.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
...@@ -261,9 +260,9 @@ static void rmd160_transform(u32 *state, const __le32 *in) ...@@ -261,9 +260,9 @@ static void rmd160_transform(u32 *state, const __le32 *in)
return; return;
} }
static void rmd160_init(struct crypto_tfm *tfm) static int rmd160_init(struct shash_desc *desc)
{ {
struct rmd160_ctx *rctx = crypto_tfm_ctx(tfm); struct rmd160_ctx *rctx = shash_desc_ctx(desc);
rctx->byte_count = 0; rctx->byte_count = 0;
...@@ -274,12 +273,14 @@ static void rmd160_init(struct crypto_tfm *tfm) ...@@ -274,12 +273,14 @@ static void rmd160_init(struct crypto_tfm *tfm)
rctx->state[4] = RMD_H4; rctx->state[4] = RMD_H4;
memset(rctx->buffer, 0, sizeof(rctx->buffer)); memset(rctx->buffer, 0, sizeof(rctx->buffer));
return 0;
} }
static void rmd160_update(struct crypto_tfm *tfm, const u8 *data, static int rmd160_update(struct shash_desc *desc, const u8 *data,
unsigned int len) unsigned int len)
{ {
struct rmd160_ctx *rctx = crypto_tfm_ctx(tfm); struct rmd160_ctx *rctx = shash_desc_ctx(desc);
const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f);
rctx->byte_count += len; rctx->byte_count += len;
...@@ -288,7 +289,7 @@ static void rmd160_update(struct crypto_tfm *tfm, const u8 *data, ...@@ -288,7 +289,7 @@ static void rmd160_update(struct crypto_tfm *tfm, const u8 *data,
if (avail > len) { if (avail > len) {
memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
data, len); data, len);
return; goto out;
} }
memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
...@@ -306,12 +307,15 @@ static void rmd160_update(struct crypto_tfm *tfm, const u8 *data, ...@@ -306,12 +307,15 @@ static void rmd160_update(struct crypto_tfm *tfm, const u8 *data,
} }
memcpy(rctx->buffer, data, len); memcpy(rctx->buffer, data, len);
out:
return 0;
} }
/* Add padding and return the message digest. */ /* Add padding and return the message digest. */
static void rmd160_final(struct crypto_tfm *tfm, u8 *out) static int rmd160_final(struct shash_desc *desc, u8 *out)
{ {
struct rmd160_ctx *rctx = crypto_tfm_ctx(tfm); struct rmd160_ctx *rctx = shash_desc_ctx(desc);
u32 i, index, padlen; u32 i, index, padlen;
__le64 bits; __le64 bits;
__le32 *dst = (__le32 *)out; __le32 *dst = (__le32 *)out;
...@@ -322,10 +326,10 @@ static void rmd160_final(struct crypto_tfm *tfm, u8 *out) ...@@ -322,10 +326,10 @@ static void rmd160_final(struct crypto_tfm *tfm, u8 *out)
/* Pad out to 56 mod 64 */ /* Pad out to 56 mod 64 */
index = rctx->byte_count & 0x3f; index = rctx->byte_count & 0x3f;
padlen = (index < 56) ? (56 - index) : ((64+56) - index); padlen = (index < 56) ? (56 - index) : ((64+56) - index);
rmd160_update(tfm, padding, padlen); rmd160_update(desc, padding, padlen);
/* Append length */ /* Append length */
rmd160_update(tfm, (const u8 *)&bits, sizeof(bits)); rmd160_update(desc, (const u8 *)&bits, sizeof(bits));
/* Store state in digest */ /* Store state in digest */
for (i = 0; i < 5; i++) for (i = 0; i < 5; i++)
...@@ -333,31 +337,32 @@ static void rmd160_final(struct crypto_tfm *tfm, u8 *out) ...@@ -333,31 +337,32 @@ static void rmd160_final(struct crypto_tfm *tfm, u8 *out)
/* Wipe context */ /* Wipe context */
memset(rctx, 0, sizeof(*rctx)); memset(rctx, 0, sizeof(*rctx));
return 0;
} }
static struct crypto_alg alg = { static struct shash_alg alg = {
.cra_name = "rmd160", .digestsize = RMD160_DIGEST_SIZE,
.cra_driver_name = "rmd160", .init = rmd160_init,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .update = rmd160_update,
.cra_blocksize = RMD160_BLOCK_SIZE, .final = rmd160_final,
.cra_ctxsize = sizeof(struct rmd160_ctx), .descsize = sizeof(struct rmd160_ctx),
.cra_module = THIS_MODULE, .base = {
.cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_name = "rmd160",
.cra_u = { .digest = { .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_digestsize = RMD160_DIGEST_SIZE, .cra_blocksize = RMD160_BLOCK_SIZE,
.dia_init = rmd160_init, .cra_module = THIS_MODULE,
.dia_update = rmd160_update, }
.dia_final = rmd160_final } }
}; };
static int __init rmd160_mod_init(void) static int __init rmd160_mod_init(void)
{ {
return crypto_register_alg(&alg); return crypto_register_shash(&alg);
} }
static void __exit rmd160_mod_fini(void) static void __exit rmd160_mod_fini(void)
{ {
crypto_unregister_alg(&alg); crypto_unregister_shash(&alg);
} }
module_init(rmd160_mod_init); module_init(rmd160_mod_init);
...@@ -365,5 +370,3 @@ module_exit(rmd160_mod_fini); ...@@ -365,5 +370,3 @@ module_exit(rmd160_mod_fini);
MODULE_LICENSE("GPL"); MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("RIPEMD-160 Message Digest"); MODULE_DESCRIPTION("RIPEMD-160 Message Digest");
MODULE_ALIAS("rmd160");
...@@ -13,11 +13,10 @@ ...@@ -13,11 +13,10 @@
* any later version. * any later version.
* *
*/ */
#include <crypto/internal/hash.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/mm.h> #include <linux/mm.h>
#include <linux/crypto.h>
#include <linux/cryptohash.h>
#include <linux/types.h> #include <linux/types.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
...@@ -233,9 +232,9 @@ static void rmd256_transform(u32 *state, const __le32 *in) ...@@ -233,9 +232,9 @@ static void rmd256_transform(u32 *state, const __le32 *in)
return; return;
} }
static void rmd256_init(struct crypto_tfm *tfm) static int rmd256_init(struct shash_desc *desc)
{ {
struct rmd256_ctx *rctx = crypto_tfm_ctx(tfm); struct rmd256_ctx *rctx = shash_desc_ctx(desc);
rctx->byte_count = 0; rctx->byte_count = 0;
...@@ -249,12 +248,14 @@ static void rmd256_init(struct crypto_tfm *tfm) ...@@ -249,12 +248,14 @@ static void rmd256_init(struct crypto_tfm *tfm)
rctx->state[7] = RMD_H8; rctx->state[7] = RMD_H8;
memset(rctx->buffer, 0, sizeof(rctx->buffer)); memset(rctx->buffer, 0, sizeof(rctx->buffer));
return 0;
} }
static void rmd256_update(struct crypto_tfm *tfm, const u8 *data, static int rmd256_update(struct shash_desc *desc, const u8 *data,
unsigned int len) unsigned int len)
{ {
struct rmd256_ctx *rctx = crypto_tfm_ctx(tfm); struct rmd256_ctx *rctx = shash_desc_ctx(desc);
const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f);
rctx->byte_count += len; rctx->byte_count += len;
...@@ -263,7 +264,7 @@ static void rmd256_update(struct crypto_tfm *tfm, const u8 *data, ...@@ -263,7 +264,7 @@ static void rmd256_update(struct crypto_tfm *tfm, const u8 *data,
if (avail > len) { if (avail > len) {
memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
data, len); data, len);
return; goto out;
} }
memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
...@@ -281,12 +282,15 @@ static void rmd256_update(struct crypto_tfm *tfm, const u8 *data, ...@@ -281,12 +282,15 @@ static void rmd256_update(struct crypto_tfm *tfm, const u8 *data,
} }
memcpy(rctx->buffer, data, len); memcpy(rctx->buffer, data, len);
out:
return 0;
} }
/* Add padding and return the message digest. */ /* Add padding and return the message digest. */
static void rmd256_final(struct crypto_tfm *tfm, u8 *out) static int rmd256_final(struct shash_desc *desc, u8 *out)
{ {
struct rmd256_ctx *rctx = crypto_tfm_ctx(tfm); struct rmd256_ctx *rctx = shash_desc_ctx(desc);
u32 i, index, padlen; u32 i, index, padlen;
__le64 bits; __le64 bits;
__le32 *dst = (__le32 *)out; __le32 *dst = (__le32 *)out;
...@@ -297,10 +301,10 @@ static void rmd256_final(struct crypto_tfm *tfm, u8 *out) ...@@ -297,10 +301,10 @@ static void rmd256_final(struct crypto_tfm *tfm, u8 *out)
/* Pad out to 56 mod 64 */ /* Pad out to 56 mod 64 */
index = rctx->byte_count & 0x3f; index = rctx->byte_count & 0x3f;
padlen = (index < 56) ? (56 - index) : ((64+56) - index); padlen = (index < 56) ? (56 - index) : ((64+56) - index);
rmd256_update(tfm, padding, padlen); rmd256_update(desc, padding, padlen);
/* Append length */ /* Append length */
rmd256_update(tfm, (const u8 *)&bits, sizeof(bits)); rmd256_update(desc, (const u8 *)&bits, sizeof(bits));
/* Store state in digest */ /* Store state in digest */
for (i = 0; i < 8; i++) for (i = 0; i < 8; i++)
...@@ -308,31 +312,32 @@ static void rmd256_final(struct crypto_tfm *tfm, u8 *out) ...@@ -308,31 +312,32 @@ static void rmd256_final(struct crypto_tfm *tfm, u8 *out)
/* Wipe context */ /* Wipe context */
memset(rctx, 0, sizeof(*rctx)); memset(rctx, 0, sizeof(*rctx));
return 0;
} }
static struct crypto_alg alg = { static struct shash_alg alg = {
.cra_name = "rmd256", .digestsize = RMD256_DIGEST_SIZE,
.cra_driver_name = "rmd256", .init = rmd256_init,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .update = rmd256_update,
.cra_blocksize = RMD256_BLOCK_SIZE, .final = rmd256_final,
.cra_ctxsize = sizeof(struct rmd256_ctx), .descsize = sizeof(struct rmd256_ctx),
.cra_module = THIS_MODULE, .base = {
.cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_name = "rmd256",
.cra_u = { .digest = { .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_digestsize = RMD256_DIGEST_SIZE, .cra_blocksize = RMD256_BLOCK_SIZE,
.dia_init = rmd256_init, .cra_module = THIS_MODULE,
.dia_update = rmd256_update, }
.dia_final = rmd256_final } }
}; };
static int __init rmd256_mod_init(void) static int __init rmd256_mod_init(void)
{ {
return crypto_register_alg(&alg); return crypto_register_shash(&alg);
} }
static void __exit rmd256_mod_fini(void) static void __exit rmd256_mod_fini(void)
{ {
crypto_unregister_alg(&alg); crypto_unregister_shash(&alg);
} }
module_init(rmd256_mod_init); module_init(rmd256_mod_init);
...@@ -340,5 +345,3 @@ module_exit(rmd256_mod_fini); ...@@ -340,5 +345,3 @@ module_exit(rmd256_mod_fini);
MODULE_LICENSE("GPL"); MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("RIPEMD-256 Message Digest"); MODULE_DESCRIPTION("RIPEMD-256 Message Digest");
MODULE_ALIAS("rmd256");
...@@ -13,11 +13,10 @@ ...@@ -13,11 +13,10 @@
* any later version. * any later version.
* *
*/ */
#include <crypto/internal/hash.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/mm.h> #include <linux/mm.h>
#include <linux/crypto.h>
#include <linux/cryptohash.h>
#include <linux/types.h> #include <linux/types.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
...@@ -280,9 +279,9 @@ static void rmd320_transform(u32 *state, const __le32 *in) ...@@ -280,9 +279,9 @@ static void rmd320_transform(u32 *state, const __le32 *in)
return; return;
} }
static void rmd320_init(struct crypto_tfm *tfm) static int rmd320_init(struct shash_desc *desc)
{ {
struct rmd320_ctx *rctx = crypto_tfm_ctx(tfm); struct rmd320_ctx *rctx = shash_desc_ctx(desc);
rctx->byte_count = 0; rctx->byte_count = 0;
...@@ -298,12 +297,14 @@ static void rmd320_init(struct crypto_tfm *tfm) ...@@ -298,12 +297,14 @@ static void rmd320_init(struct crypto_tfm *tfm)
rctx->state[9] = RMD_H9; rctx->state[9] = RMD_H9;
memset(rctx->buffer, 0, sizeof(rctx->buffer)); memset(rctx->buffer, 0, sizeof(rctx->buffer));
return 0;
} }
static void rmd320_update(struct crypto_tfm *tfm, const u8 *data, static int rmd320_update(struct shash_desc *desc, const u8 *data,
unsigned int len) unsigned int len)
{ {
struct rmd320_ctx *rctx = crypto_tfm_ctx(tfm); struct rmd320_ctx *rctx = shash_desc_ctx(desc);
const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f);
rctx->byte_count += len; rctx->byte_count += len;
...@@ -312,7 +313,7 @@ static void rmd320_update(struct crypto_tfm *tfm, const u8 *data, ...@@ -312,7 +313,7 @@ static void rmd320_update(struct crypto_tfm *tfm, const u8 *data,
if (avail > len) { if (avail > len) {
memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
data, len); data, len);
return; goto out;
} }
memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
...@@ -330,12 +331,15 @@ static void rmd320_update(struct crypto_tfm *tfm, const u8 *data, ...@@ -330,12 +331,15 @@ static void rmd320_update(struct crypto_tfm *tfm, const u8 *data,
} }
memcpy(rctx->buffer, data, len); memcpy(rctx->buffer, data, len);
out:
return 0;
} }
/* Add padding and return the message digest. */ /* Add padding and return the message digest. */
static void rmd320_final(struct crypto_tfm *tfm, u8 *out) static int rmd320_final(struct shash_desc *desc, u8 *out)
{ {
struct rmd320_ctx *rctx = crypto_tfm_ctx(tfm); struct rmd320_ctx *rctx = shash_desc_ctx(desc);
u32 i, index, padlen; u32 i, index, padlen;
__le64 bits; __le64 bits;
__le32 *dst = (__le32 *)out; __le32 *dst = (__le32 *)out;
...@@ -346,10 +350,10 @@ static void rmd320_final(struct crypto_tfm *tfm, u8 *out) ...@@ -346,10 +350,10 @@ static void rmd320_final(struct crypto_tfm *tfm, u8 *out)
/* Pad out to 56 mod 64 */ /* Pad out to 56 mod 64 */
index = rctx->byte_count & 0x3f; index = rctx->byte_count & 0x3f;
padlen = (index < 56) ? (56 - index) : ((64+56) - index); padlen = (index < 56) ? (56 - index) : ((64+56) - index);
rmd320_update(tfm, padding, padlen); rmd320_update(desc, padding, padlen);
/* Append length */ /* Append length */
rmd320_update(tfm, (const u8 *)&bits, sizeof(bits)); rmd320_update(desc, (const u8 *)&bits, sizeof(bits));
/* Store state in digest */ /* Store state in digest */
for (i = 0; i < 10; i++) for (i = 0; i < 10; i++)
...@@ -357,31 +361,32 @@ static void rmd320_final(struct crypto_tfm *tfm, u8 *out) ...@@ -357,31 +361,32 @@ static void rmd320_final(struct crypto_tfm *tfm, u8 *out)
/* Wipe context */ /* Wipe context */
memset(rctx, 0, sizeof(*rctx)); memset(rctx, 0, sizeof(*rctx));
return 0;
} }
static struct crypto_alg alg = { static struct shash_alg alg = {
.cra_name = "rmd320", .digestsize = RMD320_DIGEST_SIZE,
.cra_driver_name = "rmd320", .init = rmd320_init,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .update = rmd320_update,
.cra_blocksize = RMD320_BLOCK_SIZE, .final = rmd320_final,
.cra_ctxsize = sizeof(struct rmd320_ctx), .descsize = sizeof(struct rmd320_ctx),
.cra_module = THIS_MODULE, .base = {
.cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_name = "rmd320",
.cra_u = { .digest = { .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_digestsize = RMD320_DIGEST_SIZE, .cra_blocksize = RMD320_BLOCK_SIZE,
.dia_init = rmd320_init, .cra_module = THIS_MODULE,
.dia_update = rmd320_update, }
.dia_final = rmd320_final } }
}; };
static int __init rmd320_mod_init(void) static int __init rmd320_mod_init(void)
{ {
return crypto_register_alg(&alg); return crypto_register_shash(&alg);
} }
static void __exit rmd320_mod_fini(void) static void __exit rmd320_mod_fini(void)
{ {
crypto_unregister_alg(&alg); crypto_unregister_shash(&alg);
} }
module_init(rmd320_mod_init); module_init(rmd320_mod_init);
...@@ -389,5 +394,3 @@ module_exit(rmd320_mod_fini); ...@@ -389,5 +394,3 @@ module_exit(rmd320_mod_fini);
MODULE_LICENSE("GPL"); MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("RIPEMD-320 Message Digest"); MODULE_DESCRIPTION("RIPEMD-320 Message Digest");
MODULE_ALIAS("rmd320");
...@@ -24,6 +24,7 @@ ...@@ -24,6 +24,7 @@
#include <linux/errno.h> #include <linux/errno.h>
#include <linux/crypto.h> #include <linux/crypto.h>
#include <linux/types.h> #include <linux/types.h>
#include <linux/bitops.h>
#include <crypto/algapi.h> #include <crypto/algapi.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
...@@ -42,10 +43,6 @@ D. J. Bernstein ...@@ -42,10 +43,6 @@ D. J. Bernstein
Public domain. Public domain.
*/ */
#define ROTATE(v,n) (((v) << (n)) | ((v) >> (32 - (n))))
#define XOR(v,w) ((v) ^ (w))
#define PLUS(v,w) (((v) + (w)))
#define PLUSONE(v) (PLUS((v),1))
#define U32TO8_LITTLE(p, v) \ #define U32TO8_LITTLE(p, v) \
{ (p)[0] = (v >> 0) & 0xff; (p)[1] = (v >> 8) & 0xff; \ { (p)[0] = (v >> 0) & 0xff; (p)[1] = (v >> 8) & 0xff; \
(p)[2] = (v >> 16) & 0xff; (p)[3] = (v >> 24) & 0xff; } (p)[2] = (v >> 16) & 0xff; (p)[3] = (v >> 24) & 0xff; }
...@@ -65,41 +62,41 @@ static void salsa20_wordtobyte(u8 output[64], const u32 input[16]) ...@@ -65,41 +62,41 @@ static void salsa20_wordtobyte(u8 output[64], const u32 input[16])
memcpy(x, input, sizeof(x)); memcpy(x, input, sizeof(x));
for (i = 20; i > 0; i -= 2) { for (i = 20; i > 0; i -= 2) {
x[ 4] = XOR(x[ 4],ROTATE(PLUS(x[ 0],x[12]), 7)); x[ 4] ^= rol32((x[ 0] + x[12]), 7);
x[ 8] = XOR(x[ 8],ROTATE(PLUS(x[ 4],x[ 0]), 9)); x[ 8] ^= rol32((x[ 4] + x[ 0]), 9);
x[12] = XOR(x[12],ROTATE(PLUS(x[ 8],x[ 4]),13)); x[12] ^= rol32((x[ 8] + x[ 4]), 13);
x[ 0] = XOR(x[ 0],ROTATE(PLUS(x[12],x[ 8]),18)); x[ 0] ^= rol32((x[12] + x[ 8]), 18);
x[ 9] = XOR(x[ 9],ROTATE(PLUS(x[ 5],x[ 1]), 7)); x[ 9] ^= rol32((x[ 5] + x[ 1]), 7);
x[13] = XOR(x[13],ROTATE(PLUS(x[ 9],x[ 5]), 9)); x[13] ^= rol32((x[ 9] + x[ 5]), 9);
x[ 1] = XOR(x[ 1],ROTATE(PLUS(x[13],x[ 9]),13)); x[ 1] ^= rol32((x[13] + x[ 9]), 13);
x[ 5] = XOR(x[ 5],ROTATE(PLUS(x[ 1],x[13]),18)); x[ 5] ^= rol32((x[ 1] + x[13]), 18);
x[14] = XOR(x[14],ROTATE(PLUS(x[10],x[ 6]), 7)); x[14] ^= rol32((x[10] + x[ 6]), 7);
x[ 2] = XOR(x[ 2],ROTATE(PLUS(x[14],x[10]), 9)); x[ 2] ^= rol32((x[14] + x[10]), 9);
x[ 6] = XOR(x[ 6],ROTATE(PLUS(x[ 2],x[14]),13)); x[ 6] ^= rol32((x[ 2] + x[14]), 13);
x[10] = XOR(x[10],ROTATE(PLUS(x[ 6],x[ 2]),18)); x[10] ^= rol32((x[ 6] + x[ 2]), 18);
x[ 3] = XOR(x[ 3],ROTATE(PLUS(x[15],x[11]), 7)); x[ 3] ^= rol32((x[15] + x[11]), 7);
x[ 7] = XOR(x[ 7],ROTATE(PLUS(x[ 3],x[15]), 9)); x[ 7] ^= rol32((x[ 3] + x[15]), 9);
x[11] = XOR(x[11],ROTATE(PLUS(x[ 7],x[ 3]),13)); x[11] ^= rol32((x[ 7] + x[ 3]), 13);
x[15] = XOR(x[15],ROTATE(PLUS(x[11],x[ 7]),18)); x[15] ^= rol32((x[11] + x[ 7]), 18);
x[ 1] = XOR(x[ 1],ROTATE(PLUS(x[ 0],x[ 3]), 7)); x[ 1] ^= rol32((x[ 0] + x[ 3]), 7);
x[ 2] = XOR(x[ 2],ROTATE(PLUS(x[ 1],x[ 0]), 9)); x[ 2] ^= rol32((x[ 1] + x[ 0]), 9);
x[ 3] = XOR(x[ 3],ROTATE(PLUS(x[ 2],x[ 1]),13)); x[ 3] ^= rol32((x[ 2] + x[ 1]), 13);
x[ 0] = XOR(x[ 0],ROTATE(PLUS(x[ 3],x[ 2]),18)); x[ 0] ^= rol32((x[ 3] + x[ 2]), 18);
x[ 6] = XOR(x[ 6],ROTATE(PLUS(x[ 5],x[ 4]), 7)); x[ 6] ^= rol32((x[ 5] + x[ 4]), 7);
x[ 7] = XOR(x[ 7],ROTATE(PLUS(x[ 6],x[ 5]), 9)); x[ 7] ^= rol32((x[ 6] + x[ 5]), 9);
x[ 4] = XOR(x[ 4],ROTATE(PLUS(x[ 7],x[ 6]),13)); x[ 4] ^= rol32((x[ 7] + x[ 6]), 13);
x[ 5] = XOR(x[ 5],ROTATE(PLUS(x[ 4],x[ 7]),18)); x[ 5] ^= rol32((x[ 4] + x[ 7]), 18);
x[11] = XOR(x[11],ROTATE(PLUS(x[10],x[ 9]), 7)); x[11] ^= rol32((x[10] + x[ 9]), 7);
x[ 8] = XOR(x[ 8],ROTATE(PLUS(x[11],x[10]), 9)); x[ 8] ^= rol32((x[11] + x[10]), 9);
x[ 9] = XOR(x[ 9],ROTATE(PLUS(x[ 8],x[11]),13)); x[ 9] ^= rol32((x[ 8] + x[11]), 13);
x[10] = XOR(x[10],ROTATE(PLUS(x[ 9],x[ 8]),18)); x[10] ^= rol32((x[ 9] + x[ 8]), 18);
x[12] = XOR(x[12],ROTATE(PLUS(x[15],x[14]), 7)); x[12] ^= rol32((x[15] + x[14]), 7);
x[13] = XOR(x[13],ROTATE(PLUS(x[12],x[15]), 9)); x[13] ^= rol32((x[12] + x[15]), 9);
x[14] = XOR(x[14],ROTATE(PLUS(x[13],x[12]),13)); x[14] ^= rol32((x[13] + x[12]), 13);
x[15] = XOR(x[15],ROTATE(PLUS(x[14],x[13]),18)); x[15] ^= rol32((x[14] + x[13]), 18);
} }
for (i = 0; i < 16; ++i) for (i = 0; i < 16; ++i)
x[i] = PLUS(x[i],input[i]); x[i] += input[i];
for (i = 0; i < 16; ++i) for (i = 0; i < 16; ++i)
U32TO8_LITTLE(output + 4 * i,x[i]); U32TO8_LITTLE(output + 4 * i,x[i]);
} }
...@@ -150,9 +147,9 @@ static void salsa20_encrypt_bytes(struct salsa20_ctx *ctx, u8 *dst, ...@@ -150,9 +147,9 @@ static void salsa20_encrypt_bytes(struct salsa20_ctx *ctx, u8 *dst,
while (bytes) { while (bytes) {
salsa20_wordtobyte(buf, ctx->input); salsa20_wordtobyte(buf, ctx->input);
ctx->input[8] = PLUSONE(ctx->input[8]); ctx->input[8]++;
if (!ctx->input[8]) if (!ctx->input[8])
ctx->input[9] = PLUSONE(ctx->input[9]); ctx->input[9]++;
if (bytes <= 64) { if (bytes <= 64) {
crypto_xor(dst, buf, bytes); crypto_xor(dst, buf, bytes);
......
...@@ -16,10 +16,10 @@ ...@@ -16,10 +16,10 @@
* any later version. * any later version.
* *
*/ */
#include <crypto/internal/hash.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/mm.h> #include <linux/mm.h>
#include <linux/crypto.h>
#include <linux/cryptohash.h> #include <linux/cryptohash.h>
#include <linux/types.h> #include <linux/types.h>
#include <crypto/sha.h> #include <crypto/sha.h>
...@@ -31,9 +31,10 @@ struct sha1_ctx { ...@@ -31,9 +31,10 @@ struct sha1_ctx {
u8 buffer[64]; u8 buffer[64];
}; };
static void sha1_init(struct crypto_tfm *tfm) static int sha1_init(struct shash_desc *desc)
{ {
struct sha1_ctx *sctx = crypto_tfm_ctx(tfm); struct sha1_ctx *sctx = shash_desc_ctx(desc);
static const struct sha1_ctx initstate = { static const struct sha1_ctx initstate = {
0, 0,
{ SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 }, { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
...@@ -41,12 +42,14 @@ static void sha1_init(struct crypto_tfm *tfm) ...@@ -41,12 +42,14 @@ static void sha1_init(struct crypto_tfm *tfm)
}; };
*sctx = initstate; *sctx = initstate;
return 0;
} }
static void sha1_update(struct crypto_tfm *tfm, const u8 *data, static int sha1_update(struct shash_desc *desc, const u8 *data,
unsigned int len) unsigned int len)
{ {
struct sha1_ctx *sctx = crypto_tfm_ctx(tfm); struct sha1_ctx *sctx = shash_desc_ctx(desc);
unsigned int partial, done; unsigned int partial, done;
const u8 *src; const u8 *src;
...@@ -74,13 +77,15 @@ static void sha1_update(struct crypto_tfm *tfm, const u8 *data, ...@@ -74,13 +77,15 @@ static void sha1_update(struct crypto_tfm *tfm, const u8 *data,
partial = 0; partial = 0;
} }
memcpy(sctx->buffer + partial, src, len - done); memcpy(sctx->buffer + partial, src, len - done);
return 0;
} }
/* Add padding and return the message digest. */ /* Add padding and return the message digest. */
static void sha1_final(struct crypto_tfm *tfm, u8 *out) static int sha1_final(struct shash_desc *desc, u8 *out)
{ {
struct sha1_ctx *sctx = crypto_tfm_ctx(tfm); struct sha1_ctx *sctx = shash_desc_ctx(desc);
__be32 *dst = (__be32 *)out; __be32 *dst = (__be32 *)out;
u32 i, index, padlen; u32 i, index, padlen;
__be64 bits; __be64 bits;
...@@ -91,10 +96,10 @@ static void sha1_final(struct crypto_tfm *tfm, u8 *out) ...@@ -91,10 +96,10 @@ static void sha1_final(struct crypto_tfm *tfm, u8 *out)
/* Pad out to 56 mod 64 */ /* Pad out to 56 mod 64 */
index = sctx->count & 0x3f; index = sctx->count & 0x3f;
padlen = (index < 56) ? (56 - index) : ((64+56) - index); padlen = (index < 56) ? (56 - index) : ((64+56) - index);
sha1_update(tfm, padding, padlen); sha1_update(desc, padding, padlen);
/* Append length */ /* Append length */
sha1_update(tfm, (const u8 *)&bits, sizeof(bits)); sha1_update(desc, (const u8 *)&bits, sizeof(bits));
/* Store state in digest */ /* Store state in digest */
for (i = 0; i < 5; i++) for (i = 0; i < 5; i++)
...@@ -102,32 +107,33 @@ static void sha1_final(struct crypto_tfm *tfm, u8 *out) ...@@ -102,32 +107,33 @@ static void sha1_final(struct crypto_tfm *tfm, u8 *out)
/* Wipe context */ /* Wipe context */
memset(sctx, 0, sizeof *sctx); memset(sctx, 0, sizeof *sctx);
return 0;
} }
static struct crypto_alg alg = { static struct shash_alg alg = {
.cra_name = "sha1", .digestsize = SHA1_DIGEST_SIZE,
.cra_driver_name= "sha1-generic", .init = sha1_init,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .update = sha1_update,
.cra_blocksize = SHA1_BLOCK_SIZE, .final = sha1_final,
.cra_ctxsize = sizeof(struct sha1_ctx), .descsize = sizeof(struct sha1_ctx),
.cra_module = THIS_MODULE, .base = {
.cra_alignmask = 3, .cra_name = "sha1",
.cra_list = LIST_HEAD_INIT(alg.cra_list), .cra_driver_name= "sha1-generic",
.cra_u = { .digest = { .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_digestsize = SHA1_DIGEST_SIZE, .cra_blocksize = SHA1_BLOCK_SIZE,
.dia_init = sha1_init, .cra_module = THIS_MODULE,
.dia_update = sha1_update, }
.dia_final = sha1_final } }
}; };
static int __init sha1_generic_mod_init(void) static int __init sha1_generic_mod_init(void)
{ {
return crypto_register_alg(&alg); return crypto_register_shash(&alg);
} }
static void __exit sha1_generic_mod_fini(void) static void __exit sha1_generic_mod_fini(void)
{ {
crypto_unregister_alg(&alg); crypto_unregister_shash(&alg);
} }
module_init(sha1_generic_mod_init); module_init(sha1_generic_mod_init);
......
...@@ -17,10 +17,10 @@ ...@@ -17,10 +17,10 @@
* any later version. * any later version.
* *
*/ */
#include <crypto/internal/hash.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/mm.h> #include <linux/mm.h>
#include <linux/crypto.h>
#include <linux/types.h> #include <linux/types.h>
#include <crypto/sha.h> #include <crypto/sha.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
...@@ -69,7 +69,7 @@ static void sha256_transform(u32 *state, const u8 *input) ...@@ -69,7 +69,7 @@ static void sha256_transform(u32 *state, const u8 *input)
/* now blend */ /* now blend */
for (i = 16; i < 64; i++) for (i = 16; i < 64; i++)
BLEND_OP(i, W); BLEND_OP(i, W);
/* load the state into our registers */ /* load the state into our registers */
a=state[0]; b=state[1]; c=state[2]; d=state[3]; a=state[0]; b=state[1]; c=state[2]; d=state[3];
e=state[4]; f=state[5]; g=state[6]; h=state[7]; e=state[4]; f=state[5]; g=state[6]; h=state[7];
...@@ -220,9 +220,9 @@ static void sha256_transform(u32 *state, const u8 *input) ...@@ -220,9 +220,9 @@ static void sha256_transform(u32 *state, const u8 *input)
} }
static void sha224_init(struct crypto_tfm *tfm) static int sha224_init(struct shash_desc *desc)
{ {
struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); struct sha256_ctx *sctx = shash_desc_ctx(desc);
sctx->state[0] = SHA224_H0; sctx->state[0] = SHA224_H0;
sctx->state[1] = SHA224_H1; sctx->state[1] = SHA224_H1;
sctx->state[2] = SHA224_H2; sctx->state[2] = SHA224_H2;
...@@ -233,11 +233,13 @@ static void sha224_init(struct crypto_tfm *tfm) ...@@ -233,11 +233,13 @@ static void sha224_init(struct crypto_tfm *tfm)
sctx->state[7] = SHA224_H7; sctx->state[7] = SHA224_H7;
sctx->count[0] = 0; sctx->count[0] = 0;
sctx->count[1] = 0; sctx->count[1] = 0;
return 0;
} }
static void sha256_init(struct crypto_tfm *tfm) static int sha256_init(struct shash_desc *desc)
{ {
struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); struct sha256_ctx *sctx = shash_desc_ctx(desc);
sctx->state[0] = SHA256_H0; sctx->state[0] = SHA256_H0;
sctx->state[1] = SHA256_H1; sctx->state[1] = SHA256_H1;
sctx->state[2] = SHA256_H2; sctx->state[2] = SHA256_H2;
...@@ -247,12 +249,14 @@ static void sha256_init(struct crypto_tfm *tfm) ...@@ -247,12 +249,14 @@ static void sha256_init(struct crypto_tfm *tfm)
sctx->state[6] = SHA256_H6; sctx->state[6] = SHA256_H6;
sctx->state[7] = SHA256_H7; sctx->state[7] = SHA256_H7;
sctx->count[0] = sctx->count[1] = 0; sctx->count[0] = sctx->count[1] = 0;
return 0;
} }
static void sha256_update(struct crypto_tfm *tfm, const u8 *data, static int sha256_update(struct shash_desc *desc, const u8 *data,
unsigned int len) unsigned int len)
{ {
struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); struct sha256_ctx *sctx = shash_desc_ctx(desc);
unsigned int i, index, part_len; unsigned int i, index, part_len;
/* Compute number of bytes mod 128 */ /* Compute number of bytes mod 128 */
...@@ -277,14 +281,16 @@ static void sha256_update(struct crypto_tfm *tfm, const u8 *data, ...@@ -277,14 +281,16 @@ static void sha256_update(struct crypto_tfm *tfm, const u8 *data,
} else { } else {
i = 0; i = 0;
} }
/* Buffer remaining input */ /* Buffer remaining input */
memcpy(&sctx->buf[index], &data[i], len-i); memcpy(&sctx->buf[index], &data[i], len-i);
return 0;
} }
static void sha256_final(struct crypto_tfm *tfm, u8 *out) static int sha256_final(struct shash_desc *desc, u8 *out)
{ {
struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); struct sha256_ctx *sctx = shash_desc_ctx(desc);
__be32 *dst = (__be32 *)out; __be32 *dst = (__be32 *)out;
__be32 bits[2]; __be32 bits[2];
unsigned int index, pad_len; unsigned int index, pad_len;
...@@ -298,10 +304,10 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out) ...@@ -298,10 +304,10 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out)
/* Pad out to 56 mod 64. */ /* Pad out to 56 mod 64. */
index = (sctx->count[0] >> 3) & 0x3f; index = (sctx->count[0] >> 3) & 0x3f;
pad_len = (index < 56) ? (56 - index) : ((64+56) - index); pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
sha256_update(tfm, padding, pad_len); sha256_update(desc, padding, pad_len);
/* Append length (before padding) */ /* Append length (before padding) */
sha256_update(tfm, (const u8 *)bits, sizeof(bits)); sha256_update(desc, (const u8 *)bits, sizeof(bits));
/* Store state in digest */ /* Store state in digest */
for (i = 0; i < 8; i++) for (i = 0; i < 8; i++)
...@@ -309,71 +315,73 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out) ...@@ -309,71 +315,73 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out)
/* Zeroize sensitive information. */ /* Zeroize sensitive information. */
memset(sctx, 0, sizeof(*sctx)); memset(sctx, 0, sizeof(*sctx));
return 0;
} }
static void sha224_final(struct crypto_tfm *tfm, u8 *hash) static int sha224_final(struct shash_desc *desc, u8 *hash)
{ {
u8 D[SHA256_DIGEST_SIZE]; u8 D[SHA256_DIGEST_SIZE];
sha256_final(tfm, D); sha256_final(desc, D);
memcpy(hash, D, SHA224_DIGEST_SIZE); memcpy(hash, D, SHA224_DIGEST_SIZE);
memset(D, 0, SHA256_DIGEST_SIZE); memset(D, 0, SHA256_DIGEST_SIZE);
return 0;
} }
static struct crypto_alg sha256 = { static struct shash_alg sha256 = {
.cra_name = "sha256", .digestsize = SHA256_DIGEST_SIZE,
.cra_driver_name= "sha256-generic", .init = sha256_init,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .update = sha256_update,
.cra_blocksize = SHA256_BLOCK_SIZE, .final = sha256_final,
.cra_ctxsize = sizeof(struct sha256_ctx), .descsize = sizeof(struct sha256_ctx),
.cra_module = THIS_MODULE, .base = {
.cra_alignmask = 3, .cra_name = "sha256",
.cra_list = LIST_HEAD_INIT(sha256.cra_list), .cra_driver_name= "sha256-generic",
.cra_u = { .digest = { .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_digestsize = SHA256_DIGEST_SIZE, .cra_blocksize = SHA256_BLOCK_SIZE,
.dia_init = sha256_init, .cra_module = THIS_MODULE,
.dia_update = sha256_update, }
.dia_final = sha256_final } }
}; };
static struct crypto_alg sha224 = { static struct shash_alg sha224 = {
.cra_name = "sha224", .digestsize = SHA224_DIGEST_SIZE,
.cra_driver_name = "sha224-generic", .init = sha224_init,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .update = sha256_update,
.cra_blocksize = SHA224_BLOCK_SIZE, .final = sha224_final,
.cra_ctxsize = sizeof(struct sha256_ctx), .descsize = sizeof(struct sha256_ctx),
.cra_module = THIS_MODULE, .base = {
.cra_alignmask = 3, .cra_name = "sha224",
.cra_list = LIST_HEAD_INIT(sha224.cra_list), .cra_driver_name= "sha224-generic",
.cra_u = { .digest = { .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_digestsize = SHA224_DIGEST_SIZE, .cra_blocksize = SHA224_BLOCK_SIZE,
.dia_init = sha224_init, .cra_module = THIS_MODULE,
.dia_update = sha256_update, }
.dia_final = sha224_final } }
}; };
static int __init sha256_generic_mod_init(void) static int __init sha256_generic_mod_init(void)
{ {
int ret = 0; int ret = 0;
ret = crypto_register_alg(&sha224); ret = crypto_register_shash(&sha224);
if (ret < 0) if (ret < 0)
return ret; return ret;
ret = crypto_register_alg(&sha256); ret = crypto_register_shash(&sha256);
if (ret < 0) if (ret < 0)
crypto_unregister_alg(&sha224); crypto_unregister_shash(&sha224);
return ret; return ret;
} }
static void __exit sha256_generic_mod_fini(void) static void __exit sha256_generic_mod_fini(void)
{ {
crypto_unregister_alg(&sha224); crypto_unregister_shash(&sha224);
crypto_unregister_alg(&sha256); crypto_unregister_shash(&sha256);
} }
module_init(sha256_generic_mod_init); module_init(sha256_generic_mod_init);
......
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
* later version. * later version.
* *
*/ */
#include <crypto/internal/hash.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/mm.h> #include <linux/mm.h>
...@@ -18,16 +18,17 @@ ...@@ -18,16 +18,17 @@
#include <linux/crypto.h> #include <linux/crypto.h>
#include <linux/types.h> #include <linux/types.h>
#include <crypto/sha.h> #include <crypto/sha.h>
#include <linux/percpu.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
struct sha512_ctx { struct sha512_ctx {
u64 state[8]; u64 state[8];
u32 count[4]; u32 count[4];
u8 buf[128]; u8 buf[128];
u64 W[80];
}; };
static DEFINE_PER_CPU(u64[80], msg_schedule);
static inline u64 Ch(u64 x, u64 y, u64 z) static inline u64 Ch(u64 x, u64 y, u64 z)
{ {
return z ^ (x & (y ^ z)); return z ^ (x & (y ^ z));
...@@ -89,11 +90,12 @@ static inline void BLEND_OP(int I, u64 *W) ...@@ -89,11 +90,12 @@ static inline void BLEND_OP(int I, u64 *W)
} }
static void static void
sha512_transform(u64 *state, u64 *W, const u8 *input) sha512_transform(u64 *state, const u8 *input)
{ {
u64 a, b, c, d, e, f, g, h, t1, t2; u64 a, b, c, d, e, f, g, h, t1, t2;
int i; int i;
u64 *W = get_cpu_var(msg_schedule);
/* load the input */ /* load the input */
for (i = 0; i < 16; i++) for (i = 0; i < 16; i++)
...@@ -132,12 +134,14 @@ sha512_transform(u64 *state, u64 *W, const u8 *input) ...@@ -132,12 +134,14 @@ sha512_transform(u64 *state, u64 *W, const u8 *input)
/* erase our data */ /* erase our data */
a = b = c = d = e = f = g = h = t1 = t2 = 0; a = b = c = d = e = f = g = h = t1 = t2 = 0;
memset(W, 0, sizeof(__get_cpu_var(msg_schedule)));
put_cpu_var(msg_schedule);
} }
static void static int
sha512_init(struct crypto_tfm *tfm) sha512_init(struct shash_desc *desc)
{ {
struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); struct sha512_ctx *sctx = shash_desc_ctx(desc);
sctx->state[0] = SHA512_H0; sctx->state[0] = SHA512_H0;
sctx->state[1] = SHA512_H1; sctx->state[1] = SHA512_H1;
sctx->state[2] = SHA512_H2; sctx->state[2] = SHA512_H2;
...@@ -147,12 +151,14 @@ sha512_init(struct crypto_tfm *tfm) ...@@ -147,12 +151,14 @@ sha512_init(struct crypto_tfm *tfm)
sctx->state[6] = SHA512_H6; sctx->state[6] = SHA512_H6;
sctx->state[7] = SHA512_H7; sctx->state[7] = SHA512_H7;
sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0;
return 0;
} }
static void static int
sha384_init(struct crypto_tfm *tfm) sha384_init(struct shash_desc *desc)
{ {
struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); struct sha512_ctx *sctx = shash_desc_ctx(desc);
sctx->state[0] = SHA384_H0; sctx->state[0] = SHA384_H0;
sctx->state[1] = SHA384_H1; sctx->state[1] = SHA384_H1;
sctx->state[2] = SHA384_H2; sctx->state[2] = SHA384_H2;
...@@ -162,12 +168,14 @@ sha384_init(struct crypto_tfm *tfm) ...@@ -162,12 +168,14 @@ sha384_init(struct crypto_tfm *tfm)
sctx->state[6] = SHA384_H6; sctx->state[6] = SHA384_H6;
sctx->state[7] = SHA384_H7; sctx->state[7] = SHA384_H7;
sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0;
return 0;
} }
static void static int
sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len)
{ {
struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); struct sha512_ctx *sctx = shash_desc_ctx(desc);
unsigned int i, index, part_len; unsigned int i, index, part_len;
...@@ -187,10 +195,10 @@ sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) ...@@ -187,10 +195,10 @@ sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
/* Transform as many times as possible. */ /* Transform as many times as possible. */
if (len >= part_len) { if (len >= part_len) {
memcpy(&sctx->buf[index], data, part_len); memcpy(&sctx->buf[index], data, part_len);
sha512_transform(sctx->state, sctx->W, sctx->buf); sha512_transform(sctx->state, sctx->buf);
for (i = part_len; i + 127 < len; i+=128) for (i = part_len; i + 127 < len; i+=128)
sha512_transform(sctx->state, sctx->W, &data[i]); sha512_transform(sctx->state, &data[i]);
index = 0; index = 0;
} else { } else {
...@@ -200,14 +208,13 @@ sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) ...@@ -200,14 +208,13 @@ sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
/* Buffer remaining input */ /* Buffer remaining input */
memcpy(&sctx->buf[index], &data[i], len - i); memcpy(&sctx->buf[index], &data[i], len - i);
/* erase our data */ return 0;
memset(sctx->W, 0, sizeof(sctx->W));
} }
static void static int
sha512_final(struct crypto_tfm *tfm, u8 *hash) sha512_final(struct shash_desc *desc, u8 *hash)
{ {
struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); struct sha512_ctx *sctx = shash_desc_ctx(desc);
static u8 padding[128] = { 0x80, }; static u8 padding[128] = { 0x80, };
__be64 *dst = (__be64 *)hash; __be64 *dst = (__be64 *)hash;
__be32 bits[4]; __be32 bits[4];
...@@ -223,10 +230,10 @@ sha512_final(struct crypto_tfm *tfm, u8 *hash) ...@@ -223,10 +230,10 @@ sha512_final(struct crypto_tfm *tfm, u8 *hash)
/* Pad out to 112 mod 128. */ /* Pad out to 112 mod 128. */
index = (sctx->count[0] >> 3) & 0x7f; index = (sctx->count[0] >> 3) & 0x7f;
pad_len = (index < 112) ? (112 - index) : ((128+112) - index); pad_len = (index < 112) ? (112 - index) : ((128+112) - index);
sha512_update(tfm, padding, pad_len); sha512_update(desc, padding, pad_len);
/* Append length (before padding) */ /* Append length (before padding) */
sha512_update(tfm, (const u8 *)bits, sizeof(bits)); sha512_update(desc, (const u8 *)bits, sizeof(bits));
/* Store state in digest */ /* Store state in digest */
for (i = 0; i < 8; i++) for (i = 0; i < 8; i++)
...@@ -234,66 +241,66 @@ sha512_final(struct crypto_tfm *tfm, u8 *hash) ...@@ -234,66 +241,66 @@ sha512_final(struct crypto_tfm *tfm, u8 *hash)
/* Zeroize sensitive information. */ /* Zeroize sensitive information. */
memset(sctx, 0, sizeof(struct sha512_ctx)); memset(sctx, 0, sizeof(struct sha512_ctx));
return 0;
} }
static void sha384_final(struct crypto_tfm *tfm, u8 *hash) static int sha384_final(struct shash_desc *desc, u8 *hash)
{ {
u8 D[64]; u8 D[64];
sha512_final(desc, D);
sha512_final(tfm, D); memcpy(hash, D, 48);
memset(D, 0, 64);
memcpy(hash, D, 48); return 0;
memset(D, 0, 64);
} }
static struct crypto_alg sha512 = { static struct shash_alg sha512 = {
.cra_name = "sha512", .digestsize = SHA512_DIGEST_SIZE,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .init = sha512_init,
.cra_blocksize = SHA512_BLOCK_SIZE, .update = sha512_update,
.cra_ctxsize = sizeof(struct sha512_ctx), .final = sha512_final,
.cra_module = THIS_MODULE, .descsize = sizeof(struct sha512_ctx),
.cra_alignmask = 3, .base = {
.cra_list = LIST_HEAD_INIT(sha512.cra_list), .cra_name = "sha512",
.cra_u = { .digest = { .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_digestsize = SHA512_DIGEST_SIZE, .cra_blocksize = SHA512_BLOCK_SIZE,
.dia_init = sha512_init, .cra_module = THIS_MODULE,
.dia_update = sha512_update, }
.dia_final = sha512_final }
}
}; };
static struct crypto_alg sha384 = { static struct shash_alg sha384 = {
.cra_name = "sha384", .digestsize = SHA384_DIGEST_SIZE,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .init = sha384_init,
.cra_blocksize = SHA384_BLOCK_SIZE, .update = sha512_update,
.cra_ctxsize = sizeof(struct sha512_ctx), .final = sha384_final,
.cra_alignmask = 3, .descsize = sizeof(struct sha512_ctx),
.cra_module = THIS_MODULE, .base = {
.cra_list = LIST_HEAD_INIT(sha384.cra_list), .cra_name = "sha384",
.cra_u = { .digest = { .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_digestsize = SHA384_DIGEST_SIZE, .cra_blocksize = SHA384_BLOCK_SIZE,
.dia_init = sha384_init, .cra_module = THIS_MODULE,
.dia_update = sha512_update, }
.dia_final = sha384_final }
}
}; };
static int __init sha512_generic_mod_init(void) static int __init sha512_generic_mod_init(void)
{ {
int ret = 0; int ret = 0;
if ((ret = crypto_register_alg(&sha384)) < 0) if ((ret = crypto_register_shash(&sha384)) < 0)
goto out; goto out;
if ((ret = crypto_register_alg(&sha512)) < 0) if ((ret = crypto_register_shash(&sha512)) < 0)
crypto_unregister_alg(&sha384); crypto_unregister_shash(&sha384);
out: out:
return ret; return ret;
} }
static void __exit sha512_generic_mod_fini(void) static void __exit sha512_generic_mod_fini(void)
{ {
crypto_unregister_alg(&sha384); crypto_unregister_shash(&sha384);
crypto_unregister_alg(&sha512); crypto_unregister_shash(&sha512);
} }
module_init(sha512_generic_mod_init); module_init(sha512_generic_mod_init);
......
This diff is collapsed.
...@@ -843,6 +843,14 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, ...@@ -843,6 +843,14 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
goto out; goto out;
} }
if (dlen != ctemplate[i].outlen) {
printk(KERN_ERR "alg: comp: Compression test %d "
"failed for %s: output len = %d\n", i + 1, algo,
dlen);
ret = -EINVAL;
goto out;
}
if (memcmp(result, ctemplate[i].output, dlen)) { if (memcmp(result, ctemplate[i].output, dlen)) {
printk(KERN_ERR "alg: comp: Compression test %d " printk(KERN_ERR "alg: comp: Compression test %d "
"failed for %s\n", i + 1, algo); "failed for %s\n", i + 1, algo);
...@@ -853,7 +861,7 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, ...@@ -853,7 +861,7 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
} }
for (i = 0; i < dtcount; i++) { for (i = 0; i < dtcount; i++) {
int ilen, ret, dlen = COMP_BUF_SIZE; int ilen, dlen = COMP_BUF_SIZE;
memset(result, 0, sizeof (result)); memset(result, 0, sizeof (result));
...@@ -867,6 +875,14 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, ...@@ -867,6 +875,14 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
goto out; goto out;
} }
if (dlen != dtemplate[i].outlen) {
printk(KERN_ERR "alg: comp: Decompression test %d "
"failed for %s: output len = %d\n", i + 1, algo,
dlen);
ret = -EINVAL;
goto out;
}
if (memcmp(result, dtemplate[i].output, dlen)) { if (memcmp(result, dtemplate[i].output, dlen)) {
printk(KERN_ERR "alg: comp: Decompression test %d " printk(KERN_ERR "alg: comp: Decompression test %d "
"failed for %s\n", i + 1, algo); "failed for %s\n", i + 1, algo);
...@@ -1010,6 +1026,55 @@ static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, ...@@ -1010,6 +1026,55 @@ static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
return err; return err;
} }
static int alg_test_crc32c(const struct alg_test_desc *desc,
const char *driver, u32 type, u32 mask)
{
struct crypto_shash *tfm;
u32 val;
int err;
err = alg_test_hash(desc, driver, type, mask);
if (err)
goto out;
tfm = crypto_alloc_shash(driver, type, mask);
if (IS_ERR(tfm)) {
printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
"%ld\n", driver, PTR_ERR(tfm));
err = PTR_ERR(tfm);
goto out;
}
do {
struct {
struct shash_desc shash;
char ctx[crypto_shash_descsize(tfm)];
} sdesc;
sdesc.shash.tfm = tfm;
sdesc.shash.flags = 0;
*(u32 *)sdesc.ctx = le32_to_cpu(420553207);
err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
if (err) {
printk(KERN_ERR "alg: crc32c: Operation failed for "
"%s: %d\n", driver, err);
break;
}
if (val != ~420553207) {
printk(KERN_ERR "alg: crc32c: Test failed for %s: "
"%d\n", driver, val);
err = -EINVAL;
}
} while (0);
crypto_free_shash(tfm);
out:
return err;
}
/* Please keep this list sorted by algorithm name. */ /* Please keep this list sorted by algorithm name. */
static const struct alg_test_desc alg_test_descs[] = { static const struct alg_test_desc alg_test_descs[] = {
{ {
...@@ -1134,7 +1199,7 @@ static const struct alg_test_desc alg_test_descs[] = { ...@@ -1134,7 +1199,7 @@ static const struct alg_test_desc alg_test_descs[] = {
} }
}, { }, {
.alg = "crc32c", .alg = "crc32c",
.test = alg_test_hash, .test = alg_test_crc32c,
.suite = { .suite = {
.hash = { .hash = {
.vecs = crc32c_tv_template, .vecs = crc32c_tv_template,
...@@ -1801,6 +1866,7 @@ static int alg_find_test(const char *alg) ...@@ -1801,6 +1866,7 @@ static int alg_find_test(const char *alg)
int alg_test(const char *driver, const char *alg, u32 type, u32 mask) int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
{ {
int i; int i;
int rc;
if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
char nalg[CRYPTO_MAX_ALG_NAME]; char nalg[CRYPTO_MAX_ALG_NAME];
...@@ -1820,8 +1886,12 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask) ...@@ -1820,8 +1886,12 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
if (i < 0) if (i < 0)
goto notest; goto notest;
return alg_test_descs[i].test(alg_test_descs + i, driver, rc = alg_test_descs[i].test(alg_test_descs + i, driver,
type, mask); type, mask);
if (fips_enabled && rc)
panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
return rc;
notest: notest:
printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
......
...@@ -8349,7 +8349,7 @@ struct comp_testvec { ...@@ -8349,7 +8349,7 @@ struct comp_testvec {
/* /*
* Deflate test vectors (null-terminated strings). * Deflate test vectors (null-terminated strings).
* Params: winbits=11, Z_DEFAULT_COMPRESSION, MAX_MEM_LEVEL. * Params: winbits=-11, Z_DEFAULT_COMPRESSION, MAX_MEM_LEVEL.
*/ */
#define DEFLATE_COMP_TEST_VECTORS 2 #define DEFLATE_COMP_TEST_VECTORS 2
#define DEFLATE_DECOMP_TEST_VECTORS 2 #define DEFLATE_DECOMP_TEST_VECTORS 2
......
This diff is collapsed.
...@@ -19,11 +19,11 @@ ...@@ -19,11 +19,11 @@
* (at your option) any later version. * (at your option) any later version.
* *
*/ */
#include <crypto/internal/hash.h>
#include <linux/init.h> #include <linux/init.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/mm.h> #include <linux/mm.h>
#include <asm/byteorder.h> #include <asm/byteorder.h>
#include <linux/crypto.h>
#include <linux/types.h> #include <linux/types.h>
#define WP512_DIGEST_SIZE 64 #define WP512_DIGEST_SIZE 64
...@@ -980,8 +980,8 @@ static void wp512_process_buffer(struct wp512_ctx *wctx) { ...@@ -980,8 +980,8 @@ static void wp512_process_buffer(struct wp512_ctx *wctx) {
} }
static void wp512_init(struct crypto_tfm *tfm) { static int wp512_init(struct shash_desc *desc) {
struct wp512_ctx *wctx = crypto_tfm_ctx(tfm); struct wp512_ctx *wctx = shash_desc_ctx(desc);
int i; int i;
memset(wctx->bitLength, 0, 32); memset(wctx->bitLength, 0, 32);
...@@ -990,12 +990,14 @@ static void wp512_init(struct crypto_tfm *tfm) { ...@@ -990,12 +990,14 @@ static void wp512_init(struct crypto_tfm *tfm) {
for (i = 0; i < 8; i++) { for (i = 0; i < 8; i++) {
wctx->hash[i] = 0L; wctx->hash[i] = 0L;
} }
return 0;
} }
static void wp512_update(struct crypto_tfm *tfm, const u8 *source, static int wp512_update(struct shash_desc *desc, const u8 *source,
unsigned int len) unsigned int len)
{ {
struct wp512_ctx *wctx = crypto_tfm_ctx(tfm); struct wp512_ctx *wctx = shash_desc_ctx(desc);
int sourcePos = 0; int sourcePos = 0;
unsigned int bits_len = len * 8; // convert to number of bits unsigned int bits_len = len * 8; // convert to number of bits
int sourceGap = (8 - ((int)bits_len & 7)) & 7; int sourceGap = (8 - ((int)bits_len & 7)) & 7;
...@@ -1051,11 +1053,12 @@ static void wp512_update(struct crypto_tfm *tfm, const u8 *source, ...@@ -1051,11 +1053,12 @@ static void wp512_update(struct crypto_tfm *tfm, const u8 *source,
wctx->bufferBits = bufferBits; wctx->bufferBits = bufferBits;
wctx->bufferPos = bufferPos; wctx->bufferPos = bufferPos;
return 0;
} }
static void wp512_final(struct crypto_tfm *tfm, u8 *out) static int wp512_final(struct shash_desc *desc, u8 *out)
{ {
struct wp512_ctx *wctx = crypto_tfm_ctx(tfm); struct wp512_ctx *wctx = shash_desc_ctx(desc);
int i; int i;
u8 *buffer = wctx->buffer; u8 *buffer = wctx->buffer;
u8 *bitLength = wctx->bitLength; u8 *bitLength = wctx->bitLength;
...@@ -1084,89 +1087,95 @@ static void wp512_final(struct crypto_tfm *tfm, u8 *out) ...@@ -1084,89 +1087,95 @@ static void wp512_final(struct crypto_tfm *tfm, u8 *out)
digest[i] = cpu_to_be64(wctx->hash[i]); digest[i] = cpu_to_be64(wctx->hash[i]);
wctx->bufferBits = bufferBits; wctx->bufferBits = bufferBits;
wctx->bufferPos = bufferPos; wctx->bufferPos = bufferPos;
return 0;
} }
static void wp384_final(struct crypto_tfm *tfm, u8 *out) static int wp384_final(struct shash_desc *desc, u8 *out)
{ {
u8 D[64]; u8 D[64];
wp512_final(tfm, D); wp512_final(desc, D);
memcpy (out, D, WP384_DIGEST_SIZE); memcpy (out, D, WP384_DIGEST_SIZE);
memset (D, 0, WP512_DIGEST_SIZE); memset (D, 0, WP512_DIGEST_SIZE);
return 0;
} }
static void wp256_final(struct crypto_tfm *tfm, u8 *out) static int wp256_final(struct shash_desc *desc, u8 *out)
{ {
u8 D[64]; u8 D[64];
wp512_final(tfm, D); wp512_final(desc, D);
memcpy (out, D, WP256_DIGEST_SIZE); memcpy (out, D, WP256_DIGEST_SIZE);
memset (D, 0, WP512_DIGEST_SIZE); memset (D, 0, WP512_DIGEST_SIZE);
return 0;
} }
static struct crypto_alg wp512 = { static struct shash_alg wp512 = {
.cra_name = "wp512", .digestsize = WP512_DIGEST_SIZE,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .init = wp512_init,
.cra_blocksize = WP512_BLOCK_SIZE, .update = wp512_update,
.cra_ctxsize = sizeof(struct wp512_ctx), .final = wp512_final,
.cra_module = THIS_MODULE, .descsize = sizeof(struct wp512_ctx),
.cra_list = LIST_HEAD_INIT(wp512.cra_list), .base = {
.cra_u = { .digest = { .cra_name = "wp512",
.dia_digestsize = WP512_DIGEST_SIZE, .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_init = wp512_init, .cra_blocksize = WP512_BLOCK_SIZE,
.dia_update = wp512_update, .cra_module = THIS_MODULE,
.dia_final = wp512_final } } }
}; };
static struct crypto_alg wp384 = { static struct shash_alg wp384 = {
.cra_name = "wp384", .digestsize = WP384_DIGEST_SIZE,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .init = wp512_init,
.cra_blocksize = WP512_BLOCK_SIZE, .update = wp512_update,
.cra_ctxsize = sizeof(struct wp512_ctx), .final = wp384_final,
.cra_module = THIS_MODULE, .descsize = sizeof(struct wp512_ctx),
.cra_list = LIST_HEAD_INIT(wp384.cra_list), .base = {
.cra_u = { .digest = { .cra_name = "wp384",
.dia_digestsize = WP384_DIGEST_SIZE, .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_init = wp512_init, .cra_blocksize = WP512_BLOCK_SIZE,
.dia_update = wp512_update, .cra_module = THIS_MODULE,
.dia_final = wp384_final } } }
}; };
static struct crypto_alg wp256 = { static struct shash_alg wp256 = {
.cra_name = "wp256", .digestsize = WP256_DIGEST_SIZE,
.cra_flags = CRYPTO_ALG_TYPE_DIGEST, .init = wp512_init,
.cra_blocksize = WP512_BLOCK_SIZE, .update = wp512_update,
.cra_ctxsize = sizeof(struct wp512_ctx), .final = wp256_final,
.cra_module = THIS_MODULE, .descsize = sizeof(struct wp512_ctx),
.cra_list = LIST_HEAD_INIT(wp256.cra_list), .base = {
.cra_u = { .digest = { .cra_name = "wp256",
.dia_digestsize = WP256_DIGEST_SIZE, .cra_flags = CRYPTO_ALG_TYPE_SHASH,
.dia_init = wp512_init, .cra_blocksize = WP512_BLOCK_SIZE,
.dia_update = wp512_update, .cra_module = THIS_MODULE,
.dia_final = wp256_final } } }
}; };
static int __init wp512_mod_init(void) static int __init wp512_mod_init(void)
{ {
int ret = 0; int ret = 0;
ret = crypto_register_alg(&wp512); ret = crypto_register_shash(&wp512);
if (ret < 0) if (ret < 0)
goto out; goto out;
ret = crypto_register_alg(&wp384); ret = crypto_register_shash(&wp384);
if (ret < 0) if (ret < 0)
{ {
crypto_unregister_alg(&wp512); crypto_unregister_shash(&wp512);
goto out; goto out;
} }
ret = crypto_register_alg(&wp256); ret = crypto_register_shash(&wp256);
if (ret < 0) if (ret < 0)
{ {
crypto_unregister_alg(&wp512); crypto_unregister_shash(&wp512);
crypto_unregister_alg(&wp384); crypto_unregister_shash(&wp384);
} }
out: out:
return ret; return ret;
...@@ -1174,9 +1183,9 @@ static int __init wp512_mod_init(void) ...@@ -1174,9 +1183,9 @@ static int __init wp512_mod_init(void)
static void __exit wp512_mod_fini(void) static void __exit wp512_mod_fini(void)
{ {
crypto_unregister_alg(&wp512); crypto_unregister_shash(&wp512);
crypto_unregister_alg(&wp384); crypto_unregister_shash(&wp384);
crypto_unregister_alg(&wp256); crypto_unregister_shash(&wp256);
} }
MODULE_ALIAS("wp384"); MODULE_ALIAS("wp384");
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -23,10 +23,10 @@ struct crypto_aes_ctx { ...@@ -23,10 +23,10 @@ struct crypto_aes_ctx {
u32 key_dec[AES_MAX_KEYLENGTH_U32]; u32 key_dec[AES_MAX_KEYLENGTH_U32];
}; };
extern u32 crypto_ft_tab[4][256]; extern const u32 crypto_ft_tab[4][256];
extern u32 crypto_fl_tab[4][256]; extern const u32 crypto_fl_tab[4][256];
extern u32 crypto_it_tab[4][256]; extern const u32 crypto_it_tab[4][256];
extern u32 crypto_il_tab[4][256]; extern const u32 crypto_il_tab[4][256];
int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
unsigned int key_len); unsigned int key_len);
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -64,6 +64,8 @@ config CRC7 ...@@ -64,6 +64,8 @@ config CRC7
config LIBCRC32C config LIBCRC32C
tristate "CRC32c (Castagnoli, et al) Cyclic Redundancy-Check" tristate "CRC32c (Castagnoli, et al) Cyclic Redundancy-Check"
select CRYPTO
select CRYPTO_CRC32C
help help
This option is provided for the case where no in-kernel-tree This option is provided for the case where no in-kernel-tree
modules require CRC32c functions, but a module built outside the modules require CRC32c functions, but a module built outside the
......
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment