mirror of
https://mirrors.bfsu.edu.cn/git/linux.git
synced 2024-11-14 07:44:21 +08:00
2f1f34c1bf
The "ahash" API provides access to both CPU-based and hardware offload- based implementations of hash algorithms. Typically the former are implemented as "shash" algorithms under the hood, while the latter are implemented as "ahash" algorithms. The "ahash" API provides access to both. Various kernel subsystems use the ahash API because they want to support hashing hardware offload without using a separate API for it. Yet, the common case is that a crypto accelerator is not actually being used, and ahash is just wrapping a CPU-based shash algorithm. This patch optimizes the ahash API for that common case by eliminating the extra indirect call for each ahash operation on top of shash. It also fixes the double-counting of crypto stats in this scenario (though CONFIG_CRYPTO_STATS should *not* be enabled by anyone interested in performance anyway...), and it eliminates redundant checking of CRYPTO_TFM_NEED_KEY. As a bonus, it also shrinks struct crypto_ahash. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
49 lines
1.2 KiB
C
49 lines
1.2 KiB
C
/* SPDX-License-Identifier: GPL-2.0-or-later */
|
|
/*
|
|
* Cryptographic API.
|
|
*
|
|
* Copyright (c) 2023 Herbert Xu <herbert@gondor.apana.org.au>
|
|
*/
|
|
#ifndef _LOCAL_CRYPTO_HASH_H
|
|
#define _LOCAL_CRYPTO_HASH_H
|
|
|
|
#include <crypto/internal/hash.h>
|
|
#include <linux/cryptouser.h>
|
|
|
|
#include "internal.h"
|
|
|
|
static inline struct crypto_istat_hash *hash_get_stat(
|
|
struct hash_alg_common *alg)
|
|
{
|
|
#ifdef CONFIG_CRYPTO_STATS
|
|
return &alg->stat;
|
|
#else
|
|
return NULL;
|
|
#endif
|
|
}
|
|
|
|
static inline int crypto_hash_report_stat(struct sk_buff *skb,
|
|
struct crypto_alg *alg,
|
|
const char *type)
|
|
{
|
|
struct hash_alg_common *halg = __crypto_hash_alg_common(alg);
|
|
struct crypto_istat_hash *istat = hash_get_stat(halg);
|
|
struct crypto_stat_hash rhash;
|
|
|
|
memset(&rhash, 0, sizeof(rhash));
|
|
|
|
strscpy(rhash.type, type, sizeof(rhash.type));
|
|
|
|
rhash.stat_hash_cnt = atomic64_read(&istat->hash_cnt);
|
|
rhash.stat_hash_tlen = atomic64_read(&istat->hash_tlen);
|
|
rhash.stat_err_cnt = atomic64_read(&istat->err_cnt);
|
|
|
|
return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
|
|
}
|
|
|
|
extern const struct crypto_type crypto_shash_type;
|
|
|
|
int hash_prepare_alg(struct hash_alg_common *alg);
|
|
|
|
#endif /* _LOCAL_CRYPTO_HASH_H */
|