mirror of
https://github.com/edk2-porting/linux-next.git
synced 2024-12-21 19:53:59 +08:00
791b4d5f73
This patch ensures that kernel.h and slab.h are included for the setkey_unaligned function. It also breaks a couple of long lines. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
123 lines
3.3 KiB
C
123 lines
3.3 KiB
C
/*
|
|
* Cryptographic API.
|
|
*
|
|
* Cipher operations.
|
|
*
|
|
* Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
|
|
* Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
|
|
*
|
|
* This program is free software; you can redistribute it and/or modify it
|
|
* under the terms of the GNU General Public License as published by the Free
|
|
* Software Foundation; either version 2 of the License, or (at your option)
|
|
* any later version.
|
|
*
|
|
*/
|
|
|
|
#include <linux/kernel.h>
|
|
#include <linux/crypto.h>
|
|
#include <linux/errno.h>
|
|
#include <linux/slab.h>
|
|
#include <linux/string.h>
|
|
#include "internal.h"
|
|
|
|
static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key,
|
|
unsigned int keylen)
|
|
{
|
|
struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
|
|
unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
|
|
int ret;
|
|
u8 *buffer, *alignbuffer;
|
|
unsigned long absize;
|
|
|
|
absize = keylen + alignmask;
|
|
buffer = kmalloc(absize, GFP_ATOMIC);
|
|
if (!buffer)
|
|
return -ENOMEM;
|
|
|
|
alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
|
|
memcpy(alignbuffer, key, keylen);
|
|
ret = cia->cia_setkey(tfm, alignbuffer, keylen);
|
|
memset(alignbuffer, 0, keylen);
|
|
kfree(buffer);
|
|
return ret;
|
|
|
|
}
|
|
|
|
static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
|
|
{
|
|
struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
|
|
unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
|
|
|
|
tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
|
|
if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) {
|
|
tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
|
|
return -EINVAL;
|
|
}
|
|
|
|
if ((unsigned long)key & alignmask)
|
|
return setkey_unaligned(tfm, key, keylen);
|
|
|
|
return cia->cia_setkey(tfm, key, keylen);
|
|
}
|
|
|
|
static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *,
|
|
const u8 *),
|
|
struct crypto_tfm *tfm,
|
|
u8 *dst, const u8 *src)
|
|
{
|
|
unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
|
|
unsigned int size = crypto_tfm_alg_blocksize(tfm);
|
|
u8 buffer[size + alignmask];
|
|
u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
|
|
|
|
memcpy(tmp, src, size);
|
|
fn(tfm, tmp, tmp);
|
|
memcpy(dst, tmp, size);
|
|
}
|
|
|
|
static void cipher_encrypt_unaligned(struct crypto_tfm *tfm,
|
|
u8 *dst, const u8 *src)
|
|
{
|
|
unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
|
|
struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
|
|
|
|
if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
|
|
cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src);
|
|
return;
|
|
}
|
|
|
|
cipher->cia_encrypt(tfm, dst, src);
|
|
}
|
|
|
|
static void cipher_decrypt_unaligned(struct crypto_tfm *tfm,
|
|
u8 *dst, const u8 *src)
|
|
{
|
|
unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
|
|
struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
|
|
|
|
if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
|
|
cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src);
|
|
return;
|
|
}
|
|
|
|
cipher->cia_decrypt(tfm, dst, src);
|
|
}
|
|
|
|
int crypto_init_cipher_ops(struct crypto_tfm *tfm)
|
|
{
|
|
struct cipher_tfm *ops = &tfm->crt_cipher;
|
|
struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
|
|
|
|
ops->cit_setkey = setkey;
|
|
ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ?
|
|
cipher_encrypt_unaligned : cipher->cia_encrypt;
|
|
ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ?
|
|
cipher_decrypt_unaligned : cipher->cia_decrypt;
|
|
|
|
return 0;
|
|
}
|
|
|
|
void crypto_exit_cipher_ops(struct crypto_tfm *tfm)
|
|
{
|
|
}
|