1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * SHA-256 optimized for ARM
4  *
5  * Copyright 2025 Google LLC
6  */
7 #include <asm/neon.h>
8 #include <crypto/internal/sha2.h>
9 #include <linux/kernel.h>
10 #include <linux/module.h>
11 
12 asmlinkage void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS],
13 				   const u8 *data, size_t nblocks);
14 EXPORT_SYMBOL_GPL(sha256_blocks_arch);
15 asmlinkage void sha256_block_data_order_neon(u32 state[SHA256_STATE_WORDS],
16 					     const u8 *data, size_t nblocks);
17 asmlinkage void sha256_ce_transform(u32 state[SHA256_STATE_WORDS],
18 				    const u8 *data, size_t nblocks);
19 
20 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
21 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce);
22 
23 void sha256_blocks_simd(u32 state[SHA256_STATE_WORDS],
24 			const u8 *data, size_t nblocks)
25 {
26 	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
27 	    static_branch_likely(&have_neon)) {
28 		kernel_neon_begin();
29 		if (static_branch_likely(&have_ce))
30 			sha256_ce_transform(state, data, nblocks);
31 		else
32 			sha256_block_data_order_neon(state, data, nblocks);
33 		kernel_neon_end();
34 	} else {
35 		sha256_blocks_arch(state, data, nblocks);
36 	}
37 }
38 EXPORT_SYMBOL_GPL(sha256_blocks_simd);
39 
40 bool sha256_is_arch_optimized(void)
41 {
42 	/* We always can use at least the ARM scalar implementation. */
43 	return true;
44 }
45 EXPORT_SYMBOL_GPL(sha256_is_arch_optimized);
46 
47 static int __init sha256_arm_mod_init(void)
48 {
49 	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON)) {
50 		static_branch_enable(&have_neon);
51 		if (elf_hwcap2 & HWCAP2_SHA2)
52 			static_branch_enable(&have_ce);
53 	}
54 	return 0;
55 }
56 subsys_initcall(sha256_arm_mod_init);
57 
58 static void __exit sha256_arm_mod_exit(void)
59 {
60 }
61 module_exit(sha256_arm_mod_exit);
62 
63 MODULE_LICENSE("GPL");
64 MODULE_DESCRIPTION("SHA-256 optimized for ARM");
65