1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * AES XTS routines supporting VMX In-core instructions on Power 8
4 *
5 * Copyright (C) 2015 International Business Machines Inc.
6 *
7 * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com>
8 */
9
10 #include <asm/simd.h>
11 #include <asm/switch_to.h>
12 #include <crypto/aes.h>
13 #include <crypto/internal/simd.h>
14 #include <crypto/internal/skcipher.h>
15 #include <crypto/xts.h>
16 #include <linux/err.h>
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/uaccess.h>
20
21 #include "aesp8-ppc.h"
22
23 struct p8_aes_xts_ctx {
24 struct crypto_skcipher *fallback;
25 struct aes_key enc_key;
26 struct aes_key dec_key;
27 struct aes_key tweak_key;
28 };
29
p8_aes_xts_init(struct crypto_skcipher * tfm)30 static int p8_aes_xts_init(struct crypto_skcipher *tfm)
31 {
32 struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
33 struct crypto_skcipher *fallback;
34
35 fallback = crypto_alloc_skcipher("xts(aes)", 0,
36 CRYPTO_ALG_NEED_FALLBACK |
37 CRYPTO_ALG_ASYNC);
38 if (IS_ERR(fallback)) {
39 pr_err("Failed to allocate xts(aes) fallback: %ld\n",
40 PTR_ERR(fallback));
41 return PTR_ERR(fallback);
42 }
43
44 crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
45 crypto_skcipher_reqsize(fallback));
46 ctx->fallback = fallback;
47 return 0;
48 }
49
p8_aes_xts_exit(struct crypto_skcipher * tfm)50 static void p8_aes_xts_exit(struct crypto_skcipher *tfm)
51 {
52 struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
53
54 crypto_free_skcipher(ctx->fallback);
55 }
56
p8_aes_xts_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)57 static int p8_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
58 unsigned int keylen)
59 {
60 struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
61 int ret;
62
63 ret = xts_verify_key(tfm, key, keylen);
64 if (ret)
65 return ret;
66
67 preempt_disable();
68 pagefault_disable();
69 enable_kernel_vsx();
70 ret = aes_p8_set_encrypt_key(key + keylen/2, (keylen/2) * 8, &ctx->tweak_key);
71 ret |= aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key);
72 ret |= aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key);
73 disable_kernel_vsx();
74 pagefault_enable();
75 preempt_enable();
76
77 ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
78
79 return ret ? -EINVAL : 0;
80 }
81
p8_aes_xts_crypt(struct skcipher_request * req,int enc)82 static int p8_aes_xts_crypt(struct skcipher_request *req, int enc)
83 {
84 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
85 const struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
86 struct skcipher_walk walk;
87 unsigned int nbytes;
88 u8 tweak[AES_BLOCK_SIZE];
89 int ret;
90
91 if (req->cryptlen < AES_BLOCK_SIZE)
92 return -EINVAL;
93
94 if (!crypto_simd_usable() || (req->cryptlen % XTS_BLOCK_SIZE) != 0) {
95 struct skcipher_request *subreq = skcipher_request_ctx(req);
96
97 *subreq = *req;
98 skcipher_request_set_tfm(subreq, ctx->fallback);
99 return enc ? crypto_skcipher_encrypt(subreq) :
100 crypto_skcipher_decrypt(subreq);
101 }
102
103 ret = skcipher_walk_virt(&walk, req, false);
104 if (ret)
105 return ret;
106
107 preempt_disable();
108 pagefault_disable();
109 enable_kernel_vsx();
110
111 aes_p8_encrypt(walk.iv, tweak, &ctx->tweak_key);
112
113 disable_kernel_vsx();
114 pagefault_enable();
115 preempt_enable();
116
117 while ((nbytes = walk.nbytes) != 0) {
118 preempt_disable();
119 pagefault_disable();
120 enable_kernel_vsx();
121 if (enc)
122 aes_p8_xts_encrypt(walk.src.virt.addr,
123 walk.dst.virt.addr,
124 round_down(nbytes, AES_BLOCK_SIZE),
125 &ctx->enc_key, NULL, tweak);
126 else
127 aes_p8_xts_decrypt(walk.src.virt.addr,
128 walk.dst.virt.addr,
129 round_down(nbytes, AES_BLOCK_SIZE),
130 &ctx->dec_key, NULL, tweak);
131 disable_kernel_vsx();
132 pagefault_enable();
133 preempt_enable();
134
135 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
136 }
137 return ret;
138 }
139
p8_aes_xts_encrypt(struct skcipher_request * req)140 static int p8_aes_xts_encrypt(struct skcipher_request *req)
141 {
142 return p8_aes_xts_crypt(req, 1);
143 }
144
p8_aes_xts_decrypt(struct skcipher_request * req)145 static int p8_aes_xts_decrypt(struct skcipher_request *req)
146 {
147 return p8_aes_xts_crypt(req, 0);
148 }
149
150 struct skcipher_alg p8_aes_xts_alg = {
151 .base.cra_name = "xts(aes)",
152 .base.cra_driver_name = "p8_aes_xts",
153 .base.cra_module = THIS_MODULE,
154 .base.cra_priority = 2000,
155 .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
156 .base.cra_blocksize = AES_BLOCK_SIZE,
157 .base.cra_ctxsize = sizeof(struct p8_aes_xts_ctx),
158 .setkey = p8_aes_xts_setkey,
159 .encrypt = p8_aes_xts_encrypt,
160 .decrypt = p8_aes_xts_decrypt,
161 .init = p8_aes_xts_init,
162 .exit = p8_aes_xts_exit,
163 .min_keysize = 2 * AES_MIN_KEY_SIZE,
164 .max_keysize = 2 * AES_MAX_KEY_SIZE,
165 .ivsize = AES_BLOCK_SIZE,
166 };
167