mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git
synced 2024-10-29 23:53:32 +00:00
c4741b2305
Use subsys_initcall for registration of all templates and generic algorithm implementations, rather than module_init. Then change cryptomgr to use arch_initcall, to place it before the subsys_initcalls. This is needed so that when both a generic and optimized implementation of an algorithm are built into the kernel (not loadable modules), the generic implementation is registered before the optimized one. Otherwise, the self-tests for the optimized implementation are unable to allocate the generic implementation for the new comparison fuzz tests. Note that on arm, a side effect of this change is that self-tests for generic implementations may run before the unaligned access handler has been installed. So, unaligned accesses will crash the kernel. This is arguably a good thing as it makes it easier to detect that type of bug. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
199 lines
4.9 KiB
C
199 lines
4.9 KiB
C
/*
|
|
* PCBC: Propagating Cipher Block Chaining mode
|
|
*
|
|
* Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
|
|
* Written by David Howells (dhowells@redhat.com)
|
|
*
|
|
* Derived from cbc.c
|
|
* - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
|
|
*
|
|
* This program is free software; you can redistribute it and/or modify it
|
|
* under the terms of the GNU General Public License as published by the Free
|
|
* Software Foundation; either version 2 of the License, or (at your option)
|
|
* any later version.
|
|
*
|
|
*/
|
|
|
|
#include <crypto/algapi.h>
|
|
#include <crypto/internal/skcipher.h>
|
|
#include <linux/err.h>
|
|
#include <linux/init.h>
|
|
#include <linux/kernel.h>
|
|
#include <linux/module.h>
|
|
|
|
static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
|
|
struct skcipher_walk *walk,
|
|
struct crypto_cipher *tfm)
|
|
{
|
|
int bsize = crypto_cipher_blocksize(tfm);
|
|
unsigned int nbytes = walk->nbytes;
|
|
u8 *src = walk->src.virt.addr;
|
|
u8 *dst = walk->dst.virt.addr;
|
|
u8 * const iv = walk->iv;
|
|
|
|
do {
|
|
crypto_xor(iv, src, bsize);
|
|
crypto_cipher_encrypt_one(tfm, dst, iv);
|
|
crypto_xor_cpy(iv, dst, src, bsize);
|
|
|
|
src += bsize;
|
|
dst += bsize;
|
|
} while ((nbytes -= bsize) >= bsize);
|
|
|
|
return nbytes;
|
|
}
|
|
|
|
static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
|
|
struct skcipher_walk *walk,
|
|
struct crypto_cipher *tfm)
|
|
{
|
|
int bsize = crypto_cipher_blocksize(tfm);
|
|
unsigned int nbytes = walk->nbytes;
|
|
u8 *src = walk->src.virt.addr;
|
|
u8 * const iv = walk->iv;
|
|
u8 tmpbuf[MAX_CIPHER_BLOCKSIZE];
|
|
|
|
do {
|
|
memcpy(tmpbuf, src, bsize);
|
|
crypto_xor(iv, src, bsize);
|
|
crypto_cipher_encrypt_one(tfm, src, iv);
|
|
crypto_xor_cpy(iv, tmpbuf, src, bsize);
|
|
|
|
src += bsize;
|
|
} while ((nbytes -= bsize) >= bsize);
|
|
|
|
return nbytes;
|
|
}
|
|
|
|
static int crypto_pcbc_encrypt(struct skcipher_request *req)
|
|
{
|
|
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
|
struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
|
|
struct skcipher_walk walk;
|
|
unsigned int nbytes;
|
|
int err;
|
|
|
|
err = skcipher_walk_virt(&walk, req, false);
|
|
|
|
while ((nbytes = walk.nbytes)) {
|
|
if (walk.src.virt.addr == walk.dst.virt.addr)
|
|
nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
|
|
cipher);
|
|
else
|
|
nbytes = crypto_pcbc_encrypt_segment(req, &walk,
|
|
cipher);
|
|
err = skcipher_walk_done(&walk, nbytes);
|
|
}
|
|
|
|
return err;
|
|
}
|
|
|
|
static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
|
|
struct skcipher_walk *walk,
|
|
struct crypto_cipher *tfm)
|
|
{
|
|
int bsize = crypto_cipher_blocksize(tfm);
|
|
unsigned int nbytes = walk->nbytes;
|
|
u8 *src = walk->src.virt.addr;
|
|
u8 *dst = walk->dst.virt.addr;
|
|
u8 * const iv = walk->iv;
|
|
|
|
do {
|
|
crypto_cipher_decrypt_one(tfm, dst, src);
|
|
crypto_xor(dst, iv, bsize);
|
|
crypto_xor_cpy(iv, dst, src, bsize);
|
|
|
|
src += bsize;
|
|
dst += bsize;
|
|
} while ((nbytes -= bsize) >= bsize);
|
|
|
|
return nbytes;
|
|
}
|
|
|
|
static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
|
|
struct skcipher_walk *walk,
|
|
struct crypto_cipher *tfm)
|
|
{
|
|
int bsize = crypto_cipher_blocksize(tfm);
|
|
unsigned int nbytes = walk->nbytes;
|
|
u8 *src = walk->src.virt.addr;
|
|
u8 * const iv = walk->iv;
|
|
u8 tmpbuf[MAX_CIPHER_BLOCKSIZE] __aligned(__alignof__(u32));
|
|
|
|
do {
|
|
memcpy(tmpbuf, src, bsize);
|
|
crypto_cipher_decrypt_one(tfm, src, src);
|
|
crypto_xor(src, iv, bsize);
|
|
crypto_xor_cpy(iv, src, tmpbuf, bsize);
|
|
|
|
src += bsize;
|
|
} while ((nbytes -= bsize) >= bsize);
|
|
|
|
return nbytes;
|
|
}
|
|
|
|
static int crypto_pcbc_decrypt(struct skcipher_request *req)
|
|
{
|
|
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
|
struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
|
|
struct skcipher_walk walk;
|
|
unsigned int nbytes;
|
|
int err;
|
|
|
|
err = skcipher_walk_virt(&walk, req, false);
|
|
|
|
while ((nbytes = walk.nbytes)) {
|
|
if (walk.src.virt.addr == walk.dst.virt.addr)
|
|
nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
|
|
cipher);
|
|
else
|
|
nbytes = crypto_pcbc_decrypt_segment(req, &walk,
|
|
cipher);
|
|
err = skcipher_walk_done(&walk, nbytes);
|
|
}
|
|
|
|
return err;
|
|
}
|
|
|
|
static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
|
|
{
|
|
struct skcipher_instance *inst;
|
|
struct crypto_alg *alg;
|
|
int err;
|
|
|
|
inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
|
|
if (IS_ERR(inst))
|
|
return PTR_ERR(inst);
|
|
|
|
inst->alg.encrypt = crypto_pcbc_encrypt;
|
|
inst->alg.decrypt = crypto_pcbc_decrypt;
|
|
|
|
err = skcipher_register_instance(tmpl, inst);
|
|
if (err)
|
|
inst->free(inst);
|
|
crypto_mod_put(alg);
|
|
return err;
|
|
}
|
|
|
|
static struct crypto_template crypto_pcbc_tmpl = {
|
|
.name = "pcbc",
|
|
.create = crypto_pcbc_create,
|
|
.module = THIS_MODULE,
|
|
};
|
|
|
|
static int __init crypto_pcbc_module_init(void)
|
|
{
|
|
return crypto_register_template(&crypto_pcbc_tmpl);
|
|
}
|
|
|
|
static void __exit crypto_pcbc_module_exit(void)
|
|
{
|
|
crypto_unregister_template(&crypto_pcbc_tmpl);
|
|
}
|
|
|
|
subsys_initcall(crypto_pcbc_module_init);
|
|
module_exit(crypto_pcbc_module_exit);
|
|
|
|
MODULE_LICENSE("GPL");
|
|
MODULE_DESCRIPTION("PCBC block cipher mode of operation");
|
|
MODULE_ALIAS_CRYPTO("pcbc");
|