xref: /linux/lib/crypto/powerpc/gf128hash.h (revision 370c3883195566ee3e7d79e0146c3d735a406573)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * GHASH routines supporting VMX instructions on the Power 8
4  *
5  * Copyright (C) 2015, 2019 International Business Machines Inc.
6  * Copyright (C) 2014 - 2018 Linaro Ltd.
7  * Copyright 2026 Google LLC
8  */
9 
10 #include <asm/simd.h>
11 #include <asm/switch_to.h>
12 #include <linux/cpufeature.h>
13 #include <linux/jump_label.h>
14 #include <linux/preempt.h>
15 #include <linux/uaccess.h>
16 
17 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_vec_crypto);
18 
19 void gcm_init_p8(u64 htable[4][2], const u8 h[16]);
20 void gcm_gmult_p8(u8 Xi[16], const u64 htable[4][2]);
21 void gcm_ghash_p8(u8 Xi[16], const u64 htable[4][2], const u8 *in, size_t len);
22 
23 #define ghash_preparekey_arch ghash_preparekey_arch
ghash_preparekey_arch(struct ghash_key * key,const u8 raw_key[GHASH_BLOCK_SIZE])24 static void ghash_preparekey_arch(struct ghash_key *key,
25 				  const u8 raw_key[GHASH_BLOCK_SIZE])
26 {
27 	ghash_key_to_polyval(raw_key, &key->h);
28 
29 	if (static_branch_likely(&have_vec_crypto) && likely(may_use_simd())) {
30 		preempt_disable();
31 		pagefault_disable();
32 		enable_kernel_vsx();
33 		gcm_init_p8(key->htable, raw_key);
34 		disable_kernel_vsx();
35 		pagefault_enable();
36 		preempt_enable();
37 	} else {
38 		/* This reproduces gcm_init_p8() on both LE and BE systems. */
39 		key->htable[0][0] = 0;
40 		key->htable[0][1] = 0xc200000000000000;
41 
42 		key->htable[1][0] = 0;
43 		key->htable[1][1] = le64_to_cpu(key->h.lo);
44 
45 		key->htable[2][0] = le64_to_cpu(key->h.lo);
46 		key->htable[2][1] = le64_to_cpu(key->h.hi);
47 
48 		key->htable[3][0] = le64_to_cpu(key->h.hi);
49 		key->htable[3][1] = 0;
50 	}
51 }
52 
53 #define ghash_mul_arch ghash_mul_arch
ghash_mul_arch(struct polyval_elem * acc,const struct ghash_key * key)54 static void ghash_mul_arch(struct polyval_elem *acc,
55 			   const struct ghash_key *key)
56 {
57 	if (static_branch_likely(&have_vec_crypto) && likely(may_use_simd())) {
58 		u8 ghash_acc[GHASH_BLOCK_SIZE];
59 
60 		polyval_acc_to_ghash(acc, ghash_acc);
61 
62 		preempt_disable();
63 		pagefault_disable();
64 		enable_kernel_vsx();
65 		gcm_gmult_p8(ghash_acc, key->htable);
66 		disable_kernel_vsx();
67 		pagefault_enable();
68 		preempt_enable();
69 
70 		ghash_acc_to_polyval(ghash_acc, acc);
71 		memzero_explicit(ghash_acc, sizeof(ghash_acc));
72 	} else {
73 		polyval_mul_generic(acc, &key->h);
74 	}
75 }
76 
77 #define ghash_blocks_arch ghash_blocks_arch
ghash_blocks_arch(struct polyval_elem * acc,const struct ghash_key * key,const u8 * data,size_t nblocks)78 static void ghash_blocks_arch(struct polyval_elem *acc,
79 			      const struct ghash_key *key,
80 			      const u8 *data, size_t nblocks)
81 {
82 	if (static_branch_likely(&have_vec_crypto) && likely(may_use_simd())) {
83 		u8 ghash_acc[GHASH_BLOCK_SIZE];
84 
85 		polyval_acc_to_ghash(acc, ghash_acc);
86 
87 		preempt_disable();
88 		pagefault_disable();
89 		enable_kernel_vsx();
90 		gcm_ghash_p8(ghash_acc, key->htable, data,
91 			     nblocks * GHASH_BLOCK_SIZE);
92 		disable_kernel_vsx();
93 		pagefault_enable();
94 		preempt_enable();
95 
96 		ghash_acc_to_polyval(ghash_acc, acc);
97 		memzero_explicit(ghash_acc, sizeof(ghash_acc));
98 	} else {
99 		ghash_blocks_generic(acc, &key->h, data, nblocks);
100 	}
101 }
102 
103 #define gf128hash_mod_init_arch gf128hash_mod_init_arch
gf128hash_mod_init_arch(void)104 static void gf128hash_mod_init_arch(void)
105 {
106 	if (cpu_has_feature(CPU_FTR_ARCH_207S) &&
107 	    (cur_cpu_spec->cpu_user_features2 & PPC_FEATURE2_VEC_CRYPTO))
108 		static_branch_enable(&have_vec_crypto);
109 }
110