crypto: sparc/crc32c - stop using the shash alignmask

As far as I can tell, "crc32c-sparc64" is the only "shash" algorithm in
the kernel that sets a nonzero alignmask and actually relies on it to
get the crypto API to align the inputs and outputs.  This capability is
not really useful, though.  To unblock removing the support for
alignmask from shash_alg, this patch updates crc32c-sparc64 to no longer
use the alignmask.  This means doing 8-byte alignment of the data when
doing an update, using get_unaligned_le32() when setting a non-default
initial CRC, and using put_unaligned_le32() to output the final CRC.

Partially tested with:

    export ARCH=sparc64 CROSS_COMPILE=sparc64-linux-gnu-
    make sparc64_defconfig
    echo CONFIG_CRYPTO_CRC32C_SPARC64=y >> .config
    echo '# CONFIG_CRYPTO_MANAGER_DISABLE_TESTS is not set' >> .config
    echo CONFIG_DEBUG_KERNEL=y >> .config
    echo CONFIG_CRYPTO_MANAGER_EXTRA_TESTS=y >> .config
    make olddefconfig
    make -j$(getconf _NPROCESSORS_ONLN)
    qemu-system-sparc64 -kernel arch/sparc/boot/image  -nographic

However, qemu doesn't actually support the sparc CRC32C instructions, so
for the test I temporarily replaced crc32c_sparc64() with __crc32c_le()
and made sparc64_has_crc32c_opcode() always return true.  So essentially
I tested the glue code, not the actual SPARC part which is unchanged.

Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Eric Biggers 2023-10-18 22:53:27 -07:00 committed by Herbert Xu
parent 08debaa5cb
commit 9924003807
1 changed files with 23 additions and 20 deletions

View File

@ -20,6 +20,7 @@
#include <asm/pstate.h>
#include <asm/elf.h>
#include <asm/unaligned.h>
#include "opcodes.h"
@ -35,7 +36,7 @@ static int crc32c_sparc64_setkey(struct crypto_shash *hash, const u8 *key,
if (keylen != sizeof(u32))
return -EINVAL;
*mctx = le32_to_cpup((__le32 *)key);
*mctx = get_unaligned_le32(key);
return 0;
}
@ -51,18 +52,26 @@ static int crc32c_sparc64_init(struct shash_desc *desc)
extern void crc32c_sparc64(u32 *crcp, const u64 *data, unsigned int len);
static void crc32c_compute(u32 *crcp, const u64 *data, unsigned int len)
static u32 crc32c_compute(u32 crc, const u8 *data, unsigned int len)
{
unsigned int asm_len;
unsigned int n = -(uintptr_t)data & 7;
asm_len = len & ~7U;
if (asm_len) {
crc32c_sparc64(crcp, data, asm_len);
data += asm_len / 8;
len -= asm_len;
if (n) {
/* Data isn't 8-byte aligned. Align it. */
n = min(n, len);
crc = __crc32c_le(crc, data, n);
data += n;
len -= n;
}
n = len & ~7U;
if (n) {
crc32c_sparc64(&crc, (const u64 *)data, n);
data += n;
len -= n;
}
if (len)
*crcp = __crc32c_le(*crcp, (const unsigned char *) data, len);
crc = __crc32c_le(crc, data, len);
return crc;
}
static int crc32c_sparc64_update(struct shash_desc *desc, const u8 *data,
@ -70,19 +79,14 @@ static int crc32c_sparc64_update(struct shash_desc *desc, const u8 *data,
{
u32 *crcp = shash_desc_ctx(desc);
crc32c_compute(crcp, (const u64 *) data, len);
*crcp = crc32c_compute(*crcp, data, len);
return 0;
}
static int __crc32c_sparc64_finup(u32 *crcp, const u8 *data, unsigned int len,
u8 *out)
static int __crc32c_sparc64_finup(const u32 *crcp, const u8 *data,
unsigned int len, u8 *out)
{
u32 tmp = *crcp;
crc32c_compute(&tmp, (const u64 *) data, len);
*(__le32 *) out = ~cpu_to_le32(tmp);
put_unaligned_le32(~crc32c_compute(*crcp, data, len), out);
return 0;
}
@ -96,7 +100,7 @@ static int crc32c_sparc64_final(struct shash_desc *desc, u8 *out)
{
u32 *crcp = shash_desc_ctx(desc);
*(__le32 *) out = ~cpu_to_le32p(crcp);
put_unaligned_le32(~*crcp, out);
return 0;
}
@ -135,7 +139,6 @@ static struct shash_alg alg = {
.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
.cra_blocksize = CHKSUM_BLOCK_SIZE,
.cra_ctxsize = sizeof(u32),
.cra_alignmask = 7,
.cra_module = THIS_MODULE,
.cra_init = crc32c_sparc64_cra_init,
}