summaryrefslogtreecommitdiffstats
path: root/arch/arm/crypto/blake2b-neon-glue.c
blob: 4b59d027ba4acde262603caecda9d11bd2a952f1 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
// SPDX-License-Identifier: GPL-2.0-or-later
/*
 * BLAKE2b digest algorithm, NEON accelerated
 *
 * Copyright 2020 Google LLC
 */

#include <crypto/internal/blake2b.h>
#include <crypto/internal/hash.h>
#include <crypto/internal/simd.h>

#include <linux/module.h>
#include <linux/sizes.h>

#include <asm/neon.h>
#include <asm/simd.h>

asmlinkage void blake2b_compress_neon(struct blake2b_state *state,
				      const u8 *block, size_t nblocks, u32 inc);

static void blake2b_compress_arch(struct blake2b_state *state,
				  const u8 *block, size_t nblocks, u32 inc)
{
	if (!crypto_simd_usable()) {
		blake2b_compress_generic(state, block, nblocks, inc);
		return;
	}

	do {
		const size_t blocks = min_t(size_t, nblocks,
					    SZ_4K / BLAKE2B_BLOCK_SIZE);

		kernel_neon_begin();
		blake2b_compress_neon(state, block, blocks, inc);
		kernel_neon_end();

		nblocks -= blocks;
		block += blocks * BLAKE2B_BLOCK_SIZE;
	} while (nblocks);
}

static int crypto_blake2b_update_neon(struct shash_desc *desc,
				      const u8 *in, unsigned int inlen)
{
	return crypto_blake2b_update(desc, in, inlen, blake2b_compress_arch);
}

static int crypto_blake2b_final_neon(struct shash_desc *desc, u8 *out)
{
	return crypto_blake2b_final(desc, out, blake2b_compress_arch);
}

#define BLAKE2B_ALG(name, driver_name, digest_size)			\
	{								\
		.base.cra_name		= name,				\
		.base.cra_driver_name	= driver_name,			\
		.base.cra_priority	= 200,				\
		.base.cra_flags		= CRYPTO_ALG_OPTIONAL_KEY,	\
		.base.cra_blocksize	= BLAKE2B_BLOCK_SIZE,		\
		.base.cra_ctxsize	= sizeof(struct blake2b_tfm_ctx), \
		.base.cra_module	= THIS_MODULE,			\
		.digestsize		= digest_size,			\
		.setkey			= crypto_blake2b_setkey,	\
		.init			= crypto_blake2b_init,		\
		.update			= crypto_blake2b_update_neon,	\
		.final			= crypto_blake2b_final_neon,	\
		.descsize		= sizeof(struct blake2b_state),	\
	}

static struct shash_alg blake2b_neon_algs[] = {
	BLAKE2B_ALG("blake2b-160", "blake2b-160-neon", BLAKE2B_160_HASH_SIZE),
	BLAKE2B_ALG("blake2b-256", "blake2b-256-neon", BLAKE2B_256_HASH_SIZE),
	BLAKE2B_ALG("blake2b-384", "blake2b-384-neon", BLAKE2B_384_HASH_SIZE),
	BLAKE2B_ALG("blake2b-512", "blake2b-512-neon", BLAKE2B_512_HASH_SIZE),
};

static int __init blake2b_neon_mod_init(void)
{
	if (!(elf_hwcap & HWCAP_NEON))
		return -ENODEV;

	return crypto_register_shashes(blake2b_neon_algs,
				       ARRAY_SIZE(blake2b_neon_algs));
}

static void __exit blake2b_neon_mod_exit(void)
{
	crypto_unregister_shashes(blake2b_neon_algs,
				  ARRAY_SIZE(blake2b_neon_algs));
}

module_init(blake2b_neon_mod_init);
module_exit(blake2b_neon_mod_exit);

MODULE_DESCRIPTION("BLAKE2b digest algorithm, NEON accelerated");
MODULE_LICENSE("GPL");
MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>");
MODULE_ALIAS_CRYPTO("blake2b-160");
MODULE_ALIAS_CRYPTO("blake2b-160-neon");
MODULE_ALIAS_CRYPTO("blake2b-256");
MODULE_ALIAS_CRYPTO("blake2b-256-neon");
MODULE_ALIAS_CRYPTO("blake2b-384");
MODULE_ALIAS_CRYPTO("blake2b-384-neon");
MODULE_ALIAS_CRYPTO("blake2b-512");
MODULE_ALIAS_CRYPTO("blake2b-512-neon");