2
0
mirror of https://github.com/edk2-porting/linux-next.git synced 2024-12-29 15:43:59 +08:00

crypto: s390/aes - Use skcipher for fallback

This patch replaces use of the obsolete blkcipher with skcipher.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Herbert Xu 2016-06-29 18:04:07 +08:00
parent 678adecd11
commit 64e26807bb

View File

@ -22,6 +22,7 @@
#include <crypto/aes.h> #include <crypto/aes.h>
#include <crypto/algapi.h> #include <crypto/algapi.h>
#include <crypto/internal/skcipher.h>
#include <linux/err.h> #include <linux/err.h>
#include <linux/module.h> #include <linux/module.h>
#include <linux/cpufeature.h> #include <linux/cpufeature.h>
@ -44,7 +45,7 @@ struct s390_aes_ctx {
long dec; long dec;
int key_len; int key_len;
union { union {
struct crypto_blkcipher *blk; struct crypto_skcipher *blk;
struct crypto_cipher *cip; struct crypto_cipher *cip;
} fallback; } fallback;
}; };
@ -63,7 +64,7 @@ struct s390_xts_ctx {
long enc; long enc;
long dec; long dec;
int key_len; int key_len;
struct crypto_blkcipher *fallback; struct crypto_skcipher *fallback;
}; };
/* /*
@ -237,16 +238,16 @@ static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
unsigned int ret; unsigned int ret;
sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; crypto_skcipher_clear_flags(sctx->fallback.blk, CRYPTO_TFM_REQ_MASK);
sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags & crypto_skcipher_set_flags(sctx->fallback.blk, tfm->crt_flags &
CRYPTO_TFM_REQ_MASK); CRYPTO_TFM_REQ_MASK);
ret = crypto_skcipher_setkey(sctx->fallback.blk, key, len);
tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
tfm->crt_flags |= crypto_skcipher_get_flags(sctx->fallback.blk) &
CRYPTO_TFM_RES_MASK;
ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
if (ret) {
tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
CRYPTO_TFM_RES_MASK);
}
return ret; return ret;
} }
@ -255,15 +256,17 @@ static int fallback_blk_dec(struct blkcipher_desc *desc,
unsigned int nbytes) unsigned int nbytes)
{ {
unsigned int ret; unsigned int ret;
struct crypto_blkcipher *tfm; struct crypto_blkcipher *tfm = desc->tfm;
struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm);
SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
tfm = desc->tfm; skcipher_request_set_tfm(req, sctx->fallback.blk);
desc->tfm = sctx->fallback.blk; skcipher_request_set_callback(req, desc->flags, NULL, NULL);
skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); ret = crypto_skcipher_decrypt(req);
desc->tfm = tfm; skcipher_request_zero(req);
return ret; return ret;
} }
@ -272,15 +275,15 @@ static int fallback_blk_enc(struct blkcipher_desc *desc,
unsigned int nbytes) unsigned int nbytes)
{ {
unsigned int ret; unsigned int ret;
struct crypto_blkcipher *tfm; struct crypto_blkcipher *tfm = desc->tfm;
struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm);
SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
tfm = desc->tfm; skcipher_request_set_tfm(req, sctx->fallback.blk);
desc->tfm = sctx->fallback.blk; skcipher_request_set_callback(req, desc->flags, NULL, NULL);
skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); ret = crypto_skcipher_encrypt(req);
desc->tfm = tfm;
return ret; return ret;
} }
@ -370,8 +373,9 @@ static int fallback_init_blk(struct crypto_tfm *tfm)
const char *name = tfm->__crt_alg->cra_name; const char *name = tfm->__crt_alg->cra_name;
struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
sctx->fallback.blk = crypto_alloc_blkcipher(name, 0, sctx->fallback.blk = crypto_alloc_skcipher(name, 0,
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); CRYPTO_ALG_ASYNC |
CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(sctx->fallback.blk)) { if (IS_ERR(sctx->fallback.blk)) {
pr_err("Allocating AES fallback algorithm %s failed\n", pr_err("Allocating AES fallback algorithm %s failed\n",
@ -386,8 +390,7 @@ static void fallback_exit_blk(struct crypto_tfm *tfm)
{ {
struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
crypto_free_blkcipher(sctx->fallback.blk); crypto_free_skcipher(sctx->fallback.blk);
sctx->fallback.blk = NULL;
} }
static struct crypto_alg ecb_aes_alg = { static struct crypto_alg ecb_aes_alg = {
@ -536,16 +539,16 @@ static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
unsigned int ret; unsigned int ret;
xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; crypto_skcipher_clear_flags(xts_ctx->fallback, CRYPTO_TFM_REQ_MASK);
xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags & crypto_skcipher_set_flags(xts_ctx->fallback, tfm->crt_flags &
CRYPTO_TFM_REQ_MASK); CRYPTO_TFM_REQ_MASK);
ret = crypto_skcipher_setkey(xts_ctx->fallback, key, len);
tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
tfm->crt_flags |= crypto_skcipher_get_flags(xts_ctx->fallback) &
CRYPTO_TFM_RES_MASK;
ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
if (ret) {
tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
CRYPTO_TFM_RES_MASK);
}
return ret; return ret;
} }
@ -553,16 +556,18 @@ static int xts_fallback_decrypt(struct blkcipher_desc *desc,
struct scatterlist *dst, struct scatterlist *src, struct scatterlist *dst, struct scatterlist *src,
unsigned int nbytes) unsigned int nbytes)
{ {
struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); struct crypto_blkcipher *tfm = desc->tfm;
struct crypto_blkcipher *tfm; struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm);
SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
unsigned int ret; unsigned int ret;
tfm = desc->tfm; skcipher_request_set_tfm(req, xts_ctx->fallback);
desc->tfm = xts_ctx->fallback; skcipher_request_set_callback(req, desc->flags, NULL, NULL);
skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); ret = crypto_skcipher_decrypt(req);
desc->tfm = tfm; skcipher_request_zero(req);
return ret; return ret;
} }
@ -570,16 +575,18 @@ static int xts_fallback_encrypt(struct blkcipher_desc *desc,
struct scatterlist *dst, struct scatterlist *src, struct scatterlist *dst, struct scatterlist *src,
unsigned int nbytes) unsigned int nbytes)
{ {
struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm); struct crypto_blkcipher *tfm = desc->tfm;
struct crypto_blkcipher *tfm; struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm);
SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
unsigned int ret; unsigned int ret;
tfm = desc->tfm; skcipher_request_set_tfm(req, xts_ctx->fallback);
desc->tfm = xts_ctx->fallback; skcipher_request_set_callback(req, desc->flags, NULL, NULL);
skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); ret = crypto_skcipher_encrypt(req);
desc->tfm = tfm; skcipher_request_zero(req);
return ret; return ret;
} }
@ -700,8 +707,9 @@ static int xts_fallback_init(struct crypto_tfm *tfm)
const char *name = tfm->__crt_alg->cra_name; const char *name = tfm->__crt_alg->cra_name;
struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
xts_ctx->fallback = crypto_alloc_blkcipher(name, 0, xts_ctx->fallback = crypto_alloc_skcipher(name, 0,
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); CRYPTO_ALG_ASYNC |
CRYPTO_ALG_NEED_FALLBACK);
if (IS_ERR(xts_ctx->fallback)) { if (IS_ERR(xts_ctx->fallback)) {
pr_err("Allocating XTS fallback algorithm %s failed\n", pr_err("Allocating XTS fallback algorithm %s failed\n",
@ -715,8 +723,7 @@ static void xts_fallback_exit(struct crypto_tfm *tfm)
{ {
struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm); struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
crypto_free_blkcipher(xts_ctx->fallback); crypto_free_skcipher(xts_ctx->fallback);
xts_ctx->fallback = NULL;
} }
static struct crypto_alg xts_aes_alg = { static struct crypto_alg xts_aes_alg = {