Commit 6e8e72cd authored by Corentin Labbe's avatar Corentin Labbe Committed by Herbert Xu

crypto: user - convert all stats from u32 to u64

All the 32-bit fields need to be 64-bit.  In some cases, UINT32_MAX crypto
operations can be done in seconds.
Reported-by: default avatarEric Biggers <ebiggers@kernel.org>
Signed-off-by: default avatarCorentin Labbe <clabbe@baylibre.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent a6a31385
...@@ -259,13 +259,13 @@ static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg) ...@@ -259,13 +259,13 @@ static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
list_add(&larval->alg.cra_list, &crypto_alg_list); list_add(&larval->alg.cra_list, &crypto_alg_list);
#ifdef CONFIG_CRYPTO_STATS #ifdef CONFIG_CRYPTO_STATS
atomic_set(&alg->encrypt_cnt, 0); atomic64_set(&alg->encrypt_cnt, 0);
atomic_set(&alg->decrypt_cnt, 0); atomic64_set(&alg->decrypt_cnt, 0);
atomic64_set(&alg->encrypt_tlen, 0); atomic64_set(&alg->encrypt_tlen, 0);
atomic64_set(&alg->decrypt_tlen, 0); atomic64_set(&alg->decrypt_tlen, 0);
atomic_set(&alg->verify_cnt, 0); atomic64_set(&alg->verify_cnt, 0);
atomic_set(&alg->cipher_err_cnt, 0); atomic64_set(&alg->cipher_err_cnt, 0);
atomic_set(&alg->sign_cnt, 0); atomic64_set(&alg->sign_cnt, 0);
#endif #endif
out: out:
......
...@@ -35,22 +35,21 @@ static int crypto_report_aead(struct sk_buff *skb, struct crypto_alg *alg) ...@@ -35,22 +35,21 @@ static int crypto_report_aead(struct sk_buff *skb, struct crypto_alg *alg)
{ {
struct crypto_stat raead; struct crypto_stat raead;
u64 v64; u64 v64;
u32 v32;
memset(&raead, 0, sizeof(raead)); memset(&raead, 0, sizeof(raead));
strscpy(raead.type, "aead", sizeof(raead.type)); strscpy(raead.type, "aead", sizeof(raead.type));
v32 = atomic_read(&alg->encrypt_cnt); v64 = atomic64_read(&alg->encrypt_cnt);
raead.stat_encrypt_cnt = v32; raead.stat_encrypt_cnt = v64;
v64 = atomic64_read(&alg->encrypt_tlen); v64 = atomic64_read(&alg->encrypt_tlen);
raead.stat_encrypt_tlen = v64; raead.stat_encrypt_tlen = v64;
v32 = atomic_read(&alg->decrypt_cnt); v64 = atomic64_read(&alg->decrypt_cnt);
raead.stat_decrypt_cnt = v32; raead.stat_decrypt_cnt = v64;
v64 = atomic64_read(&alg->decrypt_tlen); v64 = atomic64_read(&alg->decrypt_tlen);
raead.stat_decrypt_tlen = v64; raead.stat_decrypt_tlen = v64;
v32 = atomic_read(&alg->aead_err_cnt); v64 = atomic64_read(&alg->aead_err_cnt);
raead.stat_aead_err_cnt = v32; raead.stat_aead_err_cnt = v64;
return nla_put(skb, CRYPTOCFGA_STAT_AEAD, sizeof(raead), &raead); return nla_put(skb, CRYPTOCFGA_STAT_AEAD, sizeof(raead), &raead);
} }
...@@ -59,22 +58,21 @@ static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg) ...@@ -59,22 +58,21 @@ static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg)
{ {
struct crypto_stat rcipher; struct crypto_stat rcipher;
u64 v64; u64 v64;
u32 v32;
memset(&rcipher, 0, sizeof(rcipher)); memset(&rcipher, 0, sizeof(rcipher));
strscpy(rcipher.type, "cipher", sizeof(rcipher.type)); strscpy(rcipher.type, "cipher", sizeof(rcipher.type));
v32 = atomic_read(&alg->encrypt_cnt); v64 = atomic64_read(&alg->encrypt_cnt);
rcipher.stat_encrypt_cnt = v32; rcipher.stat_encrypt_cnt = v64;
v64 = atomic64_read(&alg->encrypt_tlen); v64 = atomic64_read(&alg->encrypt_tlen);
rcipher.stat_encrypt_tlen = v64; rcipher.stat_encrypt_tlen = v64;
v32 = atomic_read(&alg->decrypt_cnt); v64 = atomic64_read(&alg->decrypt_cnt);
rcipher.stat_decrypt_cnt = v32; rcipher.stat_decrypt_cnt = v64;
v64 = atomic64_read(&alg->decrypt_tlen); v64 = atomic64_read(&alg->decrypt_tlen);
rcipher.stat_decrypt_tlen = v64; rcipher.stat_decrypt_tlen = v64;
v32 = atomic_read(&alg->cipher_err_cnt); v64 = atomic64_read(&alg->cipher_err_cnt);
rcipher.stat_cipher_err_cnt = v32; rcipher.stat_cipher_err_cnt = v64;
return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher); return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher);
} }
...@@ -83,21 +81,20 @@ static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg) ...@@ -83,21 +81,20 @@ static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg)
{ {
struct crypto_stat rcomp; struct crypto_stat rcomp;
u64 v64; u64 v64;
u32 v32;
memset(&rcomp, 0, sizeof(rcomp)); memset(&rcomp, 0, sizeof(rcomp));
strscpy(rcomp.type, "compression", sizeof(rcomp.type)); strscpy(rcomp.type, "compression", sizeof(rcomp.type));
v32 = atomic_read(&alg->compress_cnt); v64 = atomic64_read(&alg->compress_cnt);
rcomp.stat_compress_cnt = v32; rcomp.stat_compress_cnt = v64;
v64 = atomic64_read(&alg->compress_tlen); v64 = atomic64_read(&alg->compress_tlen);
rcomp.stat_compress_tlen = v64; rcomp.stat_compress_tlen = v64;
v32 = atomic_read(&alg->decompress_cnt); v64 = atomic64_read(&alg->decompress_cnt);
rcomp.stat_decompress_cnt = v32; rcomp.stat_decompress_cnt = v64;
v64 = atomic64_read(&alg->decompress_tlen); v64 = atomic64_read(&alg->decompress_tlen);
rcomp.stat_decompress_tlen = v64; rcomp.stat_decompress_tlen = v64;
v32 = atomic_read(&alg->cipher_err_cnt); v64 = atomic64_read(&alg->cipher_err_cnt);
rcomp.stat_compress_err_cnt = v32; rcomp.stat_compress_err_cnt = v64;
return nla_put(skb, CRYPTOCFGA_STAT_COMPRESS, sizeof(rcomp), &rcomp); return nla_put(skb, CRYPTOCFGA_STAT_COMPRESS, sizeof(rcomp), &rcomp);
} }
...@@ -106,21 +103,20 @@ static int crypto_report_acomp(struct sk_buff *skb, struct crypto_alg *alg) ...@@ -106,21 +103,20 @@ static int crypto_report_acomp(struct sk_buff *skb, struct crypto_alg *alg)
{ {
struct crypto_stat racomp; struct crypto_stat racomp;
u64 v64; u64 v64;
u32 v32;
memset(&racomp, 0, sizeof(racomp)); memset(&racomp, 0, sizeof(racomp));
strscpy(racomp.type, "acomp", sizeof(racomp.type)); strscpy(racomp.type, "acomp", sizeof(racomp.type));
v32 = atomic_read(&alg->compress_cnt); v64 = atomic64_read(&alg->compress_cnt);
racomp.stat_compress_cnt = v32; racomp.stat_compress_cnt = v64;
v64 = atomic64_read(&alg->compress_tlen); v64 = atomic64_read(&alg->compress_tlen);
racomp.stat_compress_tlen = v64; racomp.stat_compress_tlen = v64;
v32 = atomic_read(&alg->decompress_cnt); v64 = atomic64_read(&alg->decompress_cnt);
racomp.stat_decompress_cnt = v32; racomp.stat_decompress_cnt = v64;
v64 = atomic64_read(&alg->decompress_tlen); v64 = atomic64_read(&alg->decompress_tlen);
racomp.stat_decompress_tlen = v64; racomp.stat_decompress_tlen = v64;
v32 = atomic_read(&alg->cipher_err_cnt); v64 = atomic64_read(&alg->cipher_err_cnt);
racomp.stat_compress_err_cnt = v32; racomp.stat_compress_err_cnt = v64;
return nla_put(skb, CRYPTOCFGA_STAT_ACOMP, sizeof(racomp), &racomp); return nla_put(skb, CRYPTOCFGA_STAT_ACOMP, sizeof(racomp), &racomp);
} }
...@@ -129,25 +125,24 @@ static int crypto_report_akcipher(struct sk_buff *skb, struct crypto_alg *alg) ...@@ -129,25 +125,24 @@ static int crypto_report_akcipher(struct sk_buff *skb, struct crypto_alg *alg)
{ {
struct crypto_stat rakcipher; struct crypto_stat rakcipher;
u64 v64; u64 v64;
u32 v32;
memset(&rakcipher, 0, sizeof(rakcipher)); memset(&rakcipher, 0, sizeof(rakcipher));
strscpy(rakcipher.type, "akcipher", sizeof(rakcipher.type)); strscpy(rakcipher.type, "akcipher", sizeof(rakcipher.type));
v32 = atomic_read(&alg->encrypt_cnt); v64 = atomic64_read(&alg->encrypt_cnt);
rakcipher.stat_encrypt_cnt = v32; rakcipher.stat_encrypt_cnt = v64;
v64 = atomic64_read(&alg->encrypt_tlen); v64 = atomic64_read(&alg->encrypt_tlen);
rakcipher.stat_encrypt_tlen = v64; rakcipher.stat_encrypt_tlen = v64;
v32 = atomic_read(&alg->decrypt_cnt); v64 = atomic64_read(&alg->decrypt_cnt);
rakcipher.stat_decrypt_cnt = v32; rakcipher.stat_decrypt_cnt = v64;
v64 = atomic64_read(&alg->decrypt_tlen); v64 = atomic64_read(&alg->decrypt_tlen);
rakcipher.stat_decrypt_tlen = v64; rakcipher.stat_decrypt_tlen = v64;
v32 = atomic_read(&alg->sign_cnt); v64 = atomic64_read(&alg->sign_cnt);
rakcipher.stat_sign_cnt = v32; rakcipher.stat_sign_cnt = v64;
v32 = atomic_read(&alg->verify_cnt); v64 = atomic64_read(&alg->verify_cnt);
rakcipher.stat_verify_cnt = v32; rakcipher.stat_verify_cnt = v64;
v32 = atomic_read(&alg->akcipher_err_cnt); v64 = atomic64_read(&alg->akcipher_err_cnt);
rakcipher.stat_akcipher_err_cnt = v32; rakcipher.stat_akcipher_err_cnt = v64;
return nla_put(skb, CRYPTOCFGA_STAT_AKCIPHER, return nla_put(skb, CRYPTOCFGA_STAT_AKCIPHER,
sizeof(rakcipher), &rakcipher); sizeof(rakcipher), &rakcipher);
...@@ -156,19 +151,19 @@ static int crypto_report_akcipher(struct sk_buff *skb, struct crypto_alg *alg) ...@@ -156,19 +151,19 @@ static int crypto_report_akcipher(struct sk_buff *skb, struct crypto_alg *alg)
static int crypto_report_kpp(struct sk_buff *skb, struct crypto_alg *alg) static int crypto_report_kpp(struct sk_buff *skb, struct crypto_alg *alg)
{ {
struct crypto_stat rkpp; struct crypto_stat rkpp;
u32 v; u64 v;
memset(&rkpp, 0, sizeof(rkpp)); memset(&rkpp, 0, sizeof(rkpp));
strscpy(rkpp.type, "kpp", sizeof(rkpp.type)); strscpy(rkpp.type, "kpp", sizeof(rkpp.type));
v = atomic_read(&alg->setsecret_cnt); v = atomic64_read(&alg->setsecret_cnt);
rkpp.stat_setsecret_cnt = v; rkpp.stat_setsecret_cnt = v;
v = atomic_read(&alg->generate_public_key_cnt); v = atomic64_read(&alg->generate_public_key_cnt);
rkpp.stat_generate_public_key_cnt = v; rkpp.stat_generate_public_key_cnt = v;
v = atomic_read(&alg->compute_shared_secret_cnt); v = atomic64_read(&alg->compute_shared_secret_cnt);
rkpp.stat_compute_shared_secret_cnt = v; rkpp.stat_compute_shared_secret_cnt = v;
v = atomic_read(&alg->kpp_err_cnt); v = atomic64_read(&alg->kpp_err_cnt);
rkpp.stat_kpp_err_cnt = v; rkpp.stat_kpp_err_cnt = v;
return nla_put(skb, CRYPTOCFGA_STAT_KPP, sizeof(rkpp), &rkpp); return nla_put(skb, CRYPTOCFGA_STAT_KPP, sizeof(rkpp), &rkpp);
...@@ -178,18 +173,17 @@ static int crypto_report_ahash(struct sk_buff *skb, struct crypto_alg *alg) ...@@ -178,18 +173,17 @@ static int crypto_report_ahash(struct sk_buff *skb, struct crypto_alg *alg)
{ {
struct crypto_stat rhash; struct crypto_stat rhash;
u64 v64; u64 v64;
u32 v32;
memset(&rhash, 0, sizeof(rhash)); memset(&rhash, 0, sizeof(rhash));
strscpy(rhash.type, "ahash", sizeof(rhash.type)); strscpy(rhash.type, "ahash", sizeof(rhash.type));
v32 = atomic_read(&alg->hash_cnt); v64 = atomic64_read(&alg->hash_cnt);
rhash.stat_hash_cnt = v32; rhash.stat_hash_cnt = v64;
v64 = atomic64_read(&alg->hash_tlen); v64 = atomic64_read(&alg->hash_tlen);
rhash.stat_hash_tlen = v64; rhash.stat_hash_tlen = v64;
v32 = atomic_read(&alg->hash_err_cnt); v64 = atomic64_read(&alg->hash_err_cnt);
rhash.stat_hash_err_cnt = v32; rhash.stat_hash_err_cnt = v64;
return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash); return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
} }
...@@ -198,18 +192,17 @@ static int crypto_report_shash(struct sk_buff *skb, struct crypto_alg *alg) ...@@ -198,18 +192,17 @@ static int crypto_report_shash(struct sk_buff *skb, struct crypto_alg *alg)
{ {
struct crypto_stat rhash; struct crypto_stat rhash;
u64 v64; u64 v64;
u32 v32;
memset(&rhash, 0, sizeof(rhash)); memset(&rhash, 0, sizeof(rhash));
strscpy(rhash.type, "shash", sizeof(rhash.type)); strscpy(rhash.type, "shash", sizeof(rhash.type));
v32 = atomic_read(&alg->hash_cnt); v64 = atomic64_read(&alg->hash_cnt);
rhash.stat_hash_cnt = v32; rhash.stat_hash_cnt = v64;
v64 = atomic64_read(&alg->hash_tlen); v64 = atomic64_read(&alg->hash_tlen);
rhash.stat_hash_tlen = v64; rhash.stat_hash_tlen = v64;
v32 = atomic_read(&alg->hash_err_cnt); v64 = atomic64_read(&alg->hash_err_cnt);
rhash.stat_hash_err_cnt = v32; rhash.stat_hash_err_cnt = v64;
return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash); return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
} }
...@@ -218,20 +211,19 @@ static int crypto_report_rng(struct sk_buff *skb, struct crypto_alg *alg) ...@@ -218,20 +211,19 @@ static int crypto_report_rng(struct sk_buff *skb, struct crypto_alg *alg)
{ {
struct crypto_stat rrng; struct crypto_stat rrng;
u64 v64; u64 v64;
u32 v32;
memset(&rrng, 0, sizeof(rrng)); memset(&rrng, 0, sizeof(rrng));
strscpy(rrng.type, "rng", sizeof(rrng.type)); strscpy(rrng.type, "rng", sizeof(rrng.type));
v32 = atomic_read(&alg->generate_cnt); v64 = atomic64_read(&alg->generate_cnt);
rrng.stat_generate_cnt = v32; rrng.stat_generate_cnt = v64;
v64 = atomic64_read(&alg->generate_tlen); v64 = atomic64_read(&alg->generate_tlen);
rrng.stat_generate_tlen = v64; rrng.stat_generate_tlen = v64;
v32 = atomic_read(&alg->seed_cnt); v64 = atomic64_read(&alg->seed_cnt);
rrng.stat_seed_cnt = v32; rrng.stat_seed_cnt = v64;
v32 = atomic_read(&alg->hash_err_cnt); v64 = atomic64_read(&alg->hash_err_cnt);
rrng.stat_rng_err_cnt = v32; rrng.stat_rng_err_cnt = v64;
return nla_put(skb, CRYPTOCFGA_STAT_RNG, sizeof(rrng), &rrng); return nla_put(skb, CRYPTOCFGA_STAT_RNG, sizeof(rrng), &rrng);
} }
......
...@@ -240,9 +240,9 @@ static inline void crypto_stat_compress(struct acomp_req *req, int ret) ...@@ -240,9 +240,9 @@ static inline void crypto_stat_compress(struct acomp_req *req, int ret)
struct crypto_acomp *tfm = crypto_acomp_reqtfm(req); struct crypto_acomp *tfm = crypto_acomp_reqtfm(req);
if (ret && ret != -EINPROGRESS && ret != -EBUSY) { if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic_inc(&tfm->base.__crt_alg->compress_err_cnt); atomic64_inc(&tfm->base.__crt_alg->compress_err_cnt);
} else { } else {
atomic_inc(&tfm->base.__crt_alg->compress_cnt); atomic64_inc(&tfm->base.__crt_alg->compress_cnt);
atomic64_add(req->slen, &tfm->base.__crt_alg->compress_tlen); atomic64_add(req->slen, &tfm->base.__crt_alg->compress_tlen);
} }
#endif #endif
...@@ -254,9 +254,9 @@ static inline void crypto_stat_decompress(struct acomp_req *req, int ret) ...@@ -254,9 +254,9 @@ static inline void crypto_stat_decompress(struct acomp_req *req, int ret)
struct crypto_acomp *tfm = crypto_acomp_reqtfm(req); struct crypto_acomp *tfm = crypto_acomp_reqtfm(req);
if (ret && ret != -EINPROGRESS && ret != -EBUSY) { if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic_inc(&tfm->base.__crt_alg->compress_err_cnt); atomic64_inc(&tfm->base.__crt_alg->compress_err_cnt);
} else { } else {
atomic_inc(&tfm->base.__crt_alg->decompress_cnt); atomic64_inc(&tfm->base.__crt_alg->decompress_cnt);
atomic64_add(req->slen, &tfm->base.__crt_alg->decompress_tlen); atomic64_add(req->slen, &tfm->base.__crt_alg->decompress_tlen);
} }
#endif #endif
......
...@@ -312,9 +312,9 @@ static inline void crypto_stat_aead_encrypt(struct aead_request *req, int ret) ...@@ -312,9 +312,9 @@ static inline void crypto_stat_aead_encrypt(struct aead_request *req, int ret)
struct crypto_aead *tfm = crypto_aead_reqtfm(req); struct crypto_aead *tfm = crypto_aead_reqtfm(req);
if (ret && ret != -EINPROGRESS && ret != -EBUSY) { if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic_inc(&tfm->base.__crt_alg->aead_err_cnt); atomic64_inc(&tfm->base.__crt_alg->aead_err_cnt);
} else { } else {
atomic_inc(&tfm->base.__crt_alg->encrypt_cnt); atomic64_inc(&tfm->base.__crt_alg->encrypt_cnt);
atomic64_add(req->cryptlen, &tfm->base.__crt_alg->encrypt_tlen); atomic64_add(req->cryptlen, &tfm->base.__crt_alg->encrypt_tlen);
} }
#endif #endif
...@@ -326,9 +326,9 @@ static inline void crypto_stat_aead_decrypt(struct aead_request *req, int ret) ...@@ -326,9 +326,9 @@ static inline void crypto_stat_aead_decrypt(struct aead_request *req, int ret)
struct crypto_aead *tfm = crypto_aead_reqtfm(req); struct crypto_aead *tfm = crypto_aead_reqtfm(req);
if (ret && ret != -EINPROGRESS && ret != -EBUSY) { if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic_inc(&tfm->base.__crt_alg->aead_err_cnt); atomic64_inc(&tfm->base.__crt_alg->aead_err_cnt);
} else { } else {
atomic_inc(&tfm->base.__crt_alg->decrypt_cnt); atomic64_inc(&tfm->base.__crt_alg->decrypt_cnt);
atomic64_add(req->cryptlen, &tfm->base.__crt_alg->decrypt_tlen); atomic64_add(req->cryptlen, &tfm->base.__crt_alg->decrypt_tlen);
} }
#endif #endif
......
...@@ -278,9 +278,9 @@ static inline void crypto_stat_akcipher_encrypt(struct akcipher_request *req, ...@@ -278,9 +278,9 @@ static inline void crypto_stat_akcipher_encrypt(struct akcipher_request *req,
struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
if (ret && ret != -EINPROGRESS && ret != -EBUSY) { if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic_inc(&tfm->base.__crt_alg->akcipher_err_cnt); atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
} else { } else {
atomic_inc(&tfm->base.__crt_alg->encrypt_cnt); atomic64_inc(&tfm->base.__crt_alg->encrypt_cnt);
atomic64_add(req->src_len, &tfm->base.__crt_alg->encrypt_tlen); atomic64_add(req->src_len, &tfm->base.__crt_alg->encrypt_tlen);
} }
#endif #endif
...@@ -293,9 +293,9 @@ static inline void crypto_stat_akcipher_decrypt(struct akcipher_request *req, ...@@ -293,9 +293,9 @@ static inline void crypto_stat_akcipher_decrypt(struct akcipher_request *req,
struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
if (ret && ret != -EINPROGRESS && ret != -EBUSY) { if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic_inc(&tfm->base.__crt_alg->akcipher_err_cnt); atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
} else { } else {
atomic_inc(&tfm->base.__crt_alg->decrypt_cnt); atomic64_inc(&tfm->base.__crt_alg->decrypt_cnt);
atomic64_add(req->src_len, &tfm->base.__crt_alg->decrypt_tlen); atomic64_add(req->src_len, &tfm->base.__crt_alg->decrypt_tlen);
} }
#endif #endif
...@@ -308,9 +308,9 @@ static inline void crypto_stat_akcipher_sign(struct akcipher_request *req, ...@@ -308,9 +308,9 @@ static inline void crypto_stat_akcipher_sign(struct akcipher_request *req,
struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
if (ret && ret != -EINPROGRESS && ret != -EBUSY) if (ret && ret != -EINPROGRESS && ret != -EBUSY)
atomic_inc(&tfm->base.__crt_alg->akcipher_err_cnt); atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
else else
atomic_inc(&tfm->base.__crt_alg->sign_cnt); atomic64_inc(&tfm->base.__crt_alg->sign_cnt);
#endif #endif
} }
...@@ -321,9 +321,9 @@ static inline void crypto_stat_akcipher_verify(struct akcipher_request *req, ...@@ -321,9 +321,9 @@ static inline void crypto_stat_akcipher_verify(struct akcipher_request *req,
struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req); struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
if (ret && ret != -EINPROGRESS && ret != -EBUSY) if (ret && ret != -EINPROGRESS && ret != -EBUSY)
atomic_inc(&tfm->base.__crt_alg->akcipher_err_cnt); atomic64_inc(&tfm->base.__crt_alg->akcipher_err_cnt);
else else
atomic_inc(&tfm->base.__crt_alg->verify_cnt); atomic64_inc(&tfm->base.__crt_alg->verify_cnt);
#endif #endif
} }
......
...@@ -418,7 +418,7 @@ static inline void crypto_stat_ahash_update(struct ahash_request *req, int ret) ...@@ -418,7 +418,7 @@ static inline void crypto_stat_ahash_update(struct ahash_request *req, int ret)
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
if (ret && ret != -EINPROGRESS && ret != -EBUSY) if (ret && ret != -EINPROGRESS && ret != -EBUSY)
atomic_inc(&tfm->base.__crt_alg->hash_err_cnt); atomic64_inc(&tfm->base.__crt_alg->hash_err_cnt);
else else
atomic64_add(req->nbytes, &tfm->base.__crt_alg->hash_tlen); atomic64_add(req->nbytes, &tfm->base.__crt_alg->hash_tlen);
#endif #endif
...@@ -430,9 +430,9 @@ static inline void crypto_stat_ahash_final(struct ahash_request *req, int ret) ...@@ -430,9 +430,9 @@ static inline void crypto_stat_ahash_final(struct ahash_request *req, int ret)
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
if (ret && ret != -EINPROGRESS && ret != -EBUSY) { if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic_inc(&tfm->base.__crt_alg->hash_err_cnt); atomic64_inc(&tfm->base.__crt_alg->hash_err_cnt);
} else { } else {
atomic_inc(&tfm->base.__crt_alg->hash_cnt); atomic64_inc(&tfm->base.__crt_alg->hash_cnt);
atomic64_add(req->nbytes, &tfm->base.__crt_alg->hash_tlen); atomic64_add(req->nbytes, &tfm->base.__crt_alg->hash_tlen);
} }
#endif #endif
......
...@@ -272,9 +272,9 @@ static inline void crypto_stat_kpp_set_secret(struct crypto_kpp *tfm, int ret) ...@@ -272,9 +272,9 @@ static inline void crypto_stat_kpp_set_secret(struct crypto_kpp *tfm, int ret)
{ {
#ifdef CONFIG_CRYPTO_STATS #ifdef CONFIG_CRYPTO_STATS
if (ret) if (ret)
atomic_inc(&tfm->base.__crt_alg->kpp_err_cnt); atomic64_inc(&tfm->base.__crt_alg->kpp_err_cnt);
else else
atomic_inc(&tfm->base.__crt_alg->setsecret_cnt); atomic64_inc(&tfm->base.__crt_alg->setsecret_cnt);
#endif #endif
} }
...@@ -285,9 +285,9 @@ static inline void crypto_stat_kpp_generate_public_key(struct kpp_request *req, ...@@ -285,9 +285,9 @@ static inline void crypto_stat_kpp_generate_public_key(struct kpp_request *req,
struct crypto_kpp *tfm = crypto_kpp_reqtfm(req); struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
if (ret) if (ret)
atomic_inc(&tfm->base.__crt_alg->kpp_err_cnt); atomic64_inc(&tfm->base.__crt_alg->kpp_err_cnt);
else else
atomic_inc(&tfm->base.__crt_alg->generate_public_key_cnt); atomic64_inc(&tfm->base.__crt_alg->generate_public_key_cnt);
#endif #endif
} }
...@@ -298,9 +298,9 @@ static inline void crypto_stat_kpp_compute_shared_secret(struct kpp_request *req ...@@ -298,9 +298,9 @@ static inline void crypto_stat_kpp_compute_shared_secret(struct kpp_request *req
struct crypto_kpp *tfm = crypto_kpp_reqtfm(req); struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
if (ret) if (ret)
atomic_inc(&tfm->base.__crt_alg->kpp_err_cnt); atomic64_inc(&tfm->base.__crt_alg->kpp_err_cnt);
else else
atomic_inc(&tfm->base.__crt_alg->compute_shared_secret_cnt); atomic64_inc(&tfm->base.__crt_alg->compute_shared_secret_cnt);
#endif #endif
} }
......
...@@ -126,9 +126,9 @@ static inline void crypto_stat_rng_seed(struct crypto_rng *tfm, int ret) ...@@ -126,9 +126,9 @@ static inline void crypto_stat_rng_seed(struct crypto_rng *tfm, int ret)
{ {
#ifdef CONFIG_CRYPTO_STATS #ifdef CONFIG_CRYPTO_STATS
if (ret && ret != -EINPROGRESS && ret != -EBUSY) if (ret && ret != -EINPROGRESS && ret != -EBUSY)
atomic_inc(&tfm->base.__crt_alg->rng_err_cnt); atomic64_inc(&tfm->base.__crt_alg->rng_err_cnt);
else else
atomic_inc(&tfm->base.__crt_alg->seed_cnt); atomic64_inc(&tfm->base.__crt_alg->seed_cnt);
#endif #endif
} }
...@@ -137,9 +137,9 @@ static inline void crypto_stat_rng_generate(struct crypto_rng *tfm, ...@@ -137,9 +137,9 @@ static inline void crypto_stat_rng_generate(struct crypto_rng *tfm,
{ {
#ifdef CONFIG_CRYPTO_STATS #ifdef CONFIG_CRYPTO_STATS
if (ret && ret != -EINPROGRESS && ret != -EBUSY) { if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic_inc(&tfm->base.__crt_alg->rng_err_cnt); atomic64_inc(&tfm->base.__crt_alg->rng_err_cnt);
} else { } else {
atomic_inc(&tfm->base.__crt_alg->generate_cnt); atomic64_inc(&tfm->base.__crt_alg->generate_cnt);
atomic64_add(dlen, &tfm->base.__crt_alg->generate_tlen); atomic64_add(dlen, &tfm->base.__crt_alg->generate_tlen);
} }
#endif #endif
......
...@@ -491,9 +491,9 @@ static inline void crypto_stat_skcipher_encrypt(struct skcipher_request *req, ...@@ -491,9 +491,9 @@ static inline void crypto_stat_skcipher_encrypt(struct skcipher_request *req,
{ {
#ifdef CONFIG_CRYPTO_STATS #ifdef CONFIG_CRYPTO_STATS
if (ret && ret != -EINPROGRESS && ret != -EBUSY) { if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic_inc(&alg->cipher_err_cnt); atomic64_inc(&alg->cipher_err_cnt);
} else { } else {
atomic_inc(&alg->encrypt_cnt); atomic64_inc(&alg->encrypt_cnt);
atomic64_add(req->cryptlen, &alg->encrypt_tlen); atomic64_add(req->cryptlen, &alg->encrypt_tlen);
} }
#endif #endif
...@@ -504,9 +504,9 @@ static inline void crypto_stat_skcipher_decrypt(struct skcipher_request *req, ...@@ -504,9 +504,9 @@ static inline void crypto_stat_skcipher_decrypt(struct skcipher_request *req,
{ {
#ifdef CONFIG_CRYPTO_STATS #ifdef CONFIG_CRYPTO_STATS
if (ret && ret != -EINPROGRESS && ret != -EBUSY) { if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic_inc(&alg->cipher_err_cnt); atomic64_inc(&alg->cipher_err_cnt);
} else { } else {
atomic_inc(&alg->decrypt_cnt); atomic64_inc(&alg->decrypt_cnt);
atomic64_add(req->cryptlen, &alg->decrypt_tlen); atomic64_add(req->cryptlen, &alg->decrypt_tlen);
} }
#endif #endif
......
...@@ -517,11 +517,11 @@ struct crypto_alg { ...@@ -517,11 +517,11 @@ struct crypto_alg {
#ifdef CONFIG_CRYPTO_STATS #ifdef CONFIG_CRYPTO_STATS
union { union {
atomic_t encrypt_cnt; atomic64_t encrypt_cnt;
atomic_t compress_cnt; atomic64_t compress_cnt;
atomic_t generate_cnt; atomic64_t generate_cnt;
atomic_t hash_cnt; atomic64_t hash_cnt;
atomic_t setsecret_cnt; atomic64_t setsecret_cnt;
}; };
union { union {
atomic64_t encrypt_tlen; atomic64_t encrypt_tlen;
...@@ -530,29 +530,29 @@ struct crypto_alg { ...@@ -530,29 +530,29 @@ struct crypto_alg {
atomic64_t hash_tlen; atomic64_t hash_tlen;
}; };
union { union {
atomic_t akcipher_err_cnt; atomic64_t akcipher_err_cnt;
atomic_t cipher_err_cnt; atomic64_t cipher_err_cnt;
atomic_t compress_err_cnt; atomic64_t compress_err_cnt;
atomic_t aead_err_cnt; atomic64_t aead_err_cnt;
atomic_t hash_err_cnt; atomic64_t hash_err_cnt;
atomic_t rng_err_cnt; atomic64_t rng_err_cnt;
atomic_t kpp_err_cnt; atomic64_t kpp_err_cnt;
}; };
union { union {
atomic_t decrypt_cnt; atomic64_t decrypt_cnt;
atomic_t decompress_cnt; atomic64_t decompress_cnt;
atomic_t seed_cnt; atomic64_t seed_cnt;
atomic_t generate_public_key_cnt; atomic64_t generate_public_key_cnt;
}; };
union { union {
atomic64_t decrypt_tlen; atomic64_t decrypt_tlen;
atomic64_t decompress_tlen; atomic64_t decompress_tlen;
}; };
union { union {
atomic_t verify_cnt; atomic64_t verify_cnt;
atomic_t compute_shared_secret_cnt; atomic64_t compute_shared_secret_cnt;
}; };
atomic_t sign_cnt; atomic64_t sign_cnt;
#endif /* CONFIG_CRYPTO_STATS */ #endif /* CONFIG_CRYPTO_STATS */
} CRYPTO_MINALIGN_ATTR; } CRYPTO_MINALIGN_ATTR;
...@@ -983,9 +983,9 @@ static inline void crypto_stat_ablkcipher_encrypt(struct ablkcipher_request *req ...@@ -983,9 +983,9 @@ static inline void crypto_stat_ablkcipher_encrypt(struct ablkcipher_request *req
crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
if (ret && ret != -EINPROGRESS && ret != -EBUSY) { if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic_inc(&crt->base->base.__crt_alg->cipher_err_cnt); atomic64_inc(&crt->base->base.__crt_alg->cipher_err_cnt);
} else { } else {
atomic_inc(&crt->base->base.__crt_alg->encrypt_cnt); atomic64_inc(&crt->base->base.__crt_alg->encrypt_cnt);
atomic64_add(req->nbytes, &crt->base->base.__crt_alg->encrypt_tlen); atomic64_add(req->nbytes, &crt->base->base.__crt_alg->encrypt_tlen);
} }
#endif #endif
...@@ -999,9 +999,9 @@ static inline void crypto_stat_ablkcipher_decrypt(struct ablkcipher_request *req ...@@ -999,9 +999,9 @@ static inline void crypto_stat_ablkcipher_decrypt(struct ablkcipher_request *req
crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req)); crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
if (ret && ret != -EINPROGRESS && ret != -EBUSY) { if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
atomic_inc(&crt->base->base.__crt_alg->cipher_err_cnt); atomic64_inc(&crt->base->base.__crt_alg->cipher_err_cnt);
} else { } else {
atomic_inc(&crt->base->base.__crt_alg->decrypt_cnt); atomic64_inc(&crt->base->base.__crt_alg->decrypt_cnt);
atomic64_add(req->nbytes, &crt->base->base.__crt_alg->decrypt_tlen); atomic64_add(req->nbytes, &crt->base->base.__crt_alg->decrypt_tlen);
} }
#endif #endif
......
...@@ -79,11 +79,11 @@ struct crypto_user_alg { ...@@ -79,11 +79,11 @@ struct crypto_user_alg {
struct crypto_stat { struct crypto_stat {
char type[CRYPTO_MAX_NAME]; char type[CRYPTO_MAX_NAME];
union { union {
__u32 stat_encrypt_cnt; __u64 stat_encrypt_cnt;
__u32 stat_compress_cnt; __u64 stat_compress_cnt;
__u32 stat_generate_cnt; __u64 stat_generate_cnt;
__u32 stat_hash_cnt; __u64 stat_hash_cnt;
__u32 stat_setsecret_cnt; __u64 stat_setsecret_cnt;
}; };
union { union {
__u64 stat_encrypt_tlen; __u64 stat_encrypt_tlen;
...@@ -92,29 +92,29 @@ struct crypto_stat { ...@@ -92,29 +92,29 @@ struct crypto_stat {
__u64 stat_hash_tlen; __u64 stat_hash_tlen;
}; };
union { union {
__u32 stat_akcipher_err_cnt; __u64 stat_akcipher_err_cnt;
__u32 stat_cipher_err_cnt; __u64 stat_cipher_err_cnt;
__u32 stat_compress_err_cnt; __u64 stat_compress_err_cnt;
__u32 stat_aead_err_cnt; __u64 stat_aead_err_cnt;
__u32 stat_hash_err_cnt; __u64 stat_hash_err_cnt;
__u32 stat_rng_err_cnt; __u64 stat_rng_err_cnt;
__u32 stat_kpp_err_cnt; __u64 stat_kpp_err_cnt;
}; };
union { union {
__u32 stat_decrypt_cnt; __u64 stat_decrypt_cnt;
__u32 stat_decompress_cnt; __u64 stat_decompress_cnt;
__u32 stat_seed_cnt; __u64 stat_seed_cnt;
__u32 stat_generate_public_key_cnt; __u64 stat_generate_public_key_cnt;
}; };
union { union {
__u64 stat_decrypt_tlen; __u64 stat_decrypt_tlen;
__u64 stat_decompress_tlen; __u64 stat_decompress_tlen;
}; };
union { union {
__u32 stat_verify_cnt; __u64 stat_verify_cnt;
__u32 stat_compute_shared_secret_cnt; __u64 stat_compute_shared_secret_cnt;
}; };
__u32 stat_sign_cnt; __u64 stat_sign_cnt;
}; };
struct crypto_report_larval { struct crypto_report_larval {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment