1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * Copyright IBM Corp. 2024 4 * 5 * s390 specific HMAC support. 6 */ 7 8 #define KMSG_COMPONENT "hmac_s390" 9 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt 10 11 #include <asm/cpacf.h> 12 #include <crypto/internal/hash.h> 13 #include <crypto/hmac.h> 14 #include <crypto/sha2.h> 15 #include <linux/cpufeature.h> 16 #include <linux/errno.h> 17 #include <linux/kernel.h> 18 #include <linux/module.h> 19 #include <linux/string.h> 20 21 /* 22 * KMAC param block layout for sha2 function codes: 23 * The layout of the param block for the KMAC instruction depends on the 24 * blocksize of the used hashing sha2-algorithm function codes. The param block 25 * contains the hash chaining value (cv), the input message bit-length (imbl) 26 * and the hmac-secret (key). To prevent code duplication, the sizes of all 27 * these are calculated based on the blocksize. 28 * 29 * param-block: 30 * +-------+ 31 * | cv | 32 * +-------+ 33 * | imbl | 34 * +-------+ 35 * | key | 36 * +-------+ 37 * 38 * sizes: 39 * part | sh2-alg | calculation | size | type 40 * -----+---------+-------------+------+-------- 41 * cv | 224/256 | blocksize/2 | 32 | u64[8] 42 * | 384/512 | | 64 | u128[8] 43 * imbl | 224/256 | blocksize/8 | 8 | u64 44 * | 384/512 | | 16 | u128 45 * key | 224/256 | blocksize | 64 | u8[64] 46 * | 384/512 | | 128 | u8[128] 47 */ 48 49 #define MAX_DIGEST_SIZE SHA512_DIGEST_SIZE 50 #define MAX_IMBL_SIZE sizeof(u128) 51 #define MAX_BLOCK_SIZE SHA512_BLOCK_SIZE 52 53 #define SHA2_CV_SIZE(bs) ((bs) >> 1) 54 #define SHA2_IMBL_SIZE(bs) ((bs) >> 3) 55 56 #define SHA2_IMBL_OFFSET(bs) (SHA2_CV_SIZE(bs)) 57 #define SHA2_KEY_OFFSET(bs) (SHA2_CV_SIZE(bs) + SHA2_IMBL_SIZE(bs)) 58 59 struct s390_hmac_ctx { 60 u8 key[MAX_BLOCK_SIZE]; 61 }; 62 63 union s390_kmac_gr0 { 64 unsigned long reg; 65 struct { 66 unsigned long : 48; 67 unsigned long ikp : 1; 68 unsigned long iimp : 1; 69 unsigned long ccup : 1; 70 unsigned long : 6; 71 unsigned long fc : 7; 72 }; 73 }; 74 75 struct s390_kmac_sha2_ctx { 76 u8 param[MAX_DIGEST_SIZE + MAX_IMBL_SIZE + MAX_BLOCK_SIZE]; 77 union s390_kmac_gr0 gr0; 78 u64 buflen[2]; 79 }; 80 81 /* 82 * kmac_sha2_set_imbl - sets the input message bit-length based on the blocksize 83 */ 84 static inline void kmac_sha2_set_imbl(u8 *param, u64 buflen_lo, 85 u64 buflen_hi, unsigned int blocksize) 86 { 87 u8 *imbl = param + SHA2_IMBL_OFFSET(blocksize); 88 89 switch (blocksize) { 90 case SHA256_BLOCK_SIZE: 91 *(u64 *)imbl = buflen_lo * BITS_PER_BYTE; 92 break; 93 case SHA512_BLOCK_SIZE: 94 *(u128 *)imbl = (((u128)buflen_hi << 64) + buflen_lo) << 3; 95 break; 96 default: 97 break; 98 } 99 } 100 101 static int hash_data(const u8 *in, unsigned int inlen, 102 u8 *digest, unsigned int digestsize, bool final) 103 { 104 unsigned long func; 105 union { 106 struct sha256_paramblock { 107 u32 h[8]; 108 u64 mbl; 109 } sha256; 110 struct sha512_paramblock { 111 u64 h[8]; 112 u128 mbl; 113 } sha512; 114 } __packed param; 115 116 #define PARAM_INIT(x, y, z) \ 117 param.sha##x.h[0] = SHA##y ## _H0; \ 118 param.sha##x.h[1] = SHA##y ## _H1; \ 119 param.sha##x.h[2] = SHA##y ## _H2; \ 120 param.sha##x.h[3] = SHA##y ## _H3; \ 121 param.sha##x.h[4] = SHA##y ## _H4; \ 122 param.sha##x.h[5] = SHA##y ## _H5; \ 123 param.sha##x.h[6] = SHA##y ## _H6; \ 124 param.sha##x.h[7] = SHA##y ## _H7; \ 125 param.sha##x.mbl = (z) 126 127 switch (digestsize) { 128 case SHA224_DIGEST_SIZE: 129 func = final ? CPACF_KLMD_SHA_256 : CPACF_KIMD_SHA_256; 130 PARAM_INIT(256, 224, inlen * 8); 131 if (!final) 132 digestsize = SHA256_DIGEST_SIZE; 133 break; 134 case SHA256_DIGEST_SIZE: 135 func = final ? CPACF_KLMD_SHA_256 : CPACF_KIMD_SHA_256; 136 PARAM_INIT(256, 256, inlen * 8); 137 break; 138 case SHA384_DIGEST_SIZE: 139 func = final ? CPACF_KLMD_SHA_512 : CPACF_KIMD_SHA_512; 140 PARAM_INIT(512, 384, inlen * 8); 141 if (!final) 142 digestsize = SHA512_DIGEST_SIZE; 143 break; 144 case SHA512_DIGEST_SIZE: 145 func = final ? CPACF_KLMD_SHA_512 : CPACF_KIMD_SHA_512; 146 PARAM_INIT(512, 512, inlen * 8); 147 break; 148 default: 149 return -EINVAL; 150 } 151 152 #undef PARAM_INIT 153 154 cpacf_klmd(func, ¶m, in, inlen); 155 156 memcpy(digest, ¶m, digestsize); 157 158 return 0; 159 } 160 161 static int hash_key(const u8 *in, unsigned int inlen, 162 u8 *digest, unsigned int digestsize) 163 { 164 return hash_data(in, inlen, digest, digestsize, true); 165 } 166 167 static int s390_hmac_sha2_setkey(struct crypto_shash *tfm, 168 const u8 *key, unsigned int keylen) 169 { 170 struct s390_hmac_ctx *tfm_ctx = crypto_shash_ctx(tfm); 171 unsigned int ds = crypto_shash_digestsize(tfm); 172 unsigned int bs = crypto_shash_blocksize(tfm); 173 174 memset(tfm_ctx, 0, sizeof(*tfm_ctx)); 175 176 if (keylen > bs) 177 return hash_key(key, keylen, tfm_ctx->key, ds); 178 179 memcpy(tfm_ctx->key, key, keylen); 180 return 0; 181 } 182 183 static int s390_hmac_sha2_init(struct shash_desc *desc) 184 { 185 struct s390_hmac_ctx *tfm_ctx = crypto_shash_ctx(desc->tfm); 186 struct s390_kmac_sha2_ctx *ctx = shash_desc_ctx(desc); 187 unsigned int bs = crypto_shash_blocksize(desc->tfm); 188 189 memcpy(ctx->param + SHA2_KEY_OFFSET(bs), 190 tfm_ctx->key, bs); 191 192 ctx->buflen[0] = 0; 193 ctx->buflen[1] = 0; 194 ctx->gr0.reg = 0; 195 switch (crypto_shash_digestsize(desc->tfm)) { 196 case SHA224_DIGEST_SIZE: 197 ctx->gr0.fc = CPACF_KMAC_HMAC_SHA_224; 198 break; 199 case SHA256_DIGEST_SIZE: 200 ctx->gr0.fc = CPACF_KMAC_HMAC_SHA_256; 201 break; 202 case SHA384_DIGEST_SIZE: 203 ctx->gr0.fc = CPACF_KMAC_HMAC_SHA_384; 204 break; 205 case SHA512_DIGEST_SIZE: 206 ctx->gr0.fc = CPACF_KMAC_HMAC_SHA_512; 207 break; 208 default: 209 return -EINVAL; 210 } 211 212 return 0; 213 } 214 215 static int s390_hmac_sha2_update(struct shash_desc *desc, 216 const u8 *data, unsigned int len) 217 { 218 struct s390_kmac_sha2_ctx *ctx = shash_desc_ctx(desc); 219 unsigned int bs = crypto_shash_blocksize(desc->tfm); 220 unsigned int n = round_down(len, bs); 221 222 ctx->buflen[0] += n; 223 if (ctx->buflen[0] < n) 224 ctx->buflen[1]++; 225 226 /* process as many blocks as possible */ 227 ctx->gr0.iimp = 1; 228 _cpacf_kmac(&ctx->gr0.reg, ctx->param, data, n); 229 return len - n; 230 } 231 232 static int s390_hmac_sha2_finup(struct shash_desc *desc, const u8 *src, 233 unsigned int len, u8 *out) 234 { 235 struct s390_kmac_sha2_ctx *ctx = shash_desc_ctx(desc); 236 unsigned int bs = crypto_shash_blocksize(desc->tfm); 237 238 ctx->buflen[0] += len; 239 if (ctx->buflen[0] < len) 240 ctx->buflen[1]++; 241 242 ctx->gr0.iimp = 0; 243 kmac_sha2_set_imbl(ctx->param, ctx->buflen[0], ctx->buflen[1], bs); 244 _cpacf_kmac(&ctx->gr0.reg, ctx->param, src, len); 245 memcpy(out, ctx->param, crypto_shash_digestsize(desc->tfm)); 246 247 return 0; 248 } 249 250 static int s390_hmac_sha2_digest(struct shash_desc *desc, 251 const u8 *data, unsigned int len, u8 *out) 252 { 253 struct s390_kmac_sha2_ctx *ctx = shash_desc_ctx(desc); 254 unsigned int ds = crypto_shash_digestsize(desc->tfm); 255 int rc; 256 257 rc = s390_hmac_sha2_init(desc); 258 if (rc) 259 return rc; 260 261 ctx->gr0.iimp = 0; 262 kmac_sha2_set_imbl(ctx->param, len, 0, 263 crypto_shash_blocksize(desc->tfm)); 264 _cpacf_kmac(&ctx->gr0.reg, ctx->param, data, len); 265 memcpy(out, ctx->param, ds); 266 267 return 0; 268 } 269 270 static int s390_hmac_export_zero(struct shash_desc *desc, void *out) 271 { 272 struct crypto_shash *tfm = desc->tfm; 273 u8 ipad[SHA512_BLOCK_SIZE]; 274 struct s390_hmac_ctx *ctx; 275 unsigned int bs; 276 int err, i; 277 278 ctx = crypto_shash_ctx(tfm); 279 bs = crypto_shash_blocksize(tfm); 280 for (i = 0; i < bs; i++) 281 ipad[i] = ctx->key[i] ^ HMAC_IPAD_VALUE; 282 283 err = hash_data(ipad, bs, out, crypto_shash_digestsize(tfm), false); 284 memzero_explicit(ipad, sizeof(ipad)); 285 return err; 286 } 287 288 static int s390_hmac_export(struct shash_desc *desc, void *out) 289 { 290 struct s390_kmac_sha2_ctx *ctx = shash_desc_ctx(desc); 291 unsigned int bs = crypto_shash_blocksize(desc->tfm); 292 unsigned int ds = bs / 2; 293 u64 lo = ctx->buflen[0]; 294 union { 295 u8 *u8; 296 u64 *u64; 297 } p = { .u8 = out }; 298 int err = 0; 299 300 if (!ctx->gr0.ikp) 301 err = s390_hmac_export_zero(desc, out); 302 else 303 memcpy(p.u8, ctx->param, ds); 304 p.u8 += ds; 305 lo += bs; 306 put_unaligned(lo, p.u64++); 307 if (ds == SHA512_DIGEST_SIZE) 308 put_unaligned(ctx->buflen[1] + (lo < bs), p.u64); 309 return err; 310 } 311 312 static int s390_hmac_import(struct shash_desc *desc, const void *in) 313 { 314 struct s390_kmac_sha2_ctx *ctx = shash_desc_ctx(desc); 315 unsigned int bs = crypto_shash_blocksize(desc->tfm); 316 unsigned int ds = bs / 2; 317 union { 318 const u8 *u8; 319 const u64 *u64; 320 } p = { .u8 = in }; 321 u64 lo; 322 int err; 323 324 err = s390_hmac_sha2_init(desc); 325 memcpy(ctx->param, p.u8, ds); 326 p.u8 += ds; 327 lo = get_unaligned(p.u64++); 328 ctx->buflen[0] = lo - bs; 329 if (ds == SHA512_DIGEST_SIZE) 330 ctx->buflen[1] = get_unaligned(p.u64) - (lo < bs); 331 if (ctx->buflen[0] | ctx->buflen[1]) 332 ctx->gr0.ikp = 1; 333 return err; 334 } 335 336 #define S390_HMAC_SHA2_ALG(x, ss) { \ 337 .fc = CPACF_KMAC_HMAC_SHA_##x, \ 338 .alg = { \ 339 .init = s390_hmac_sha2_init, \ 340 .update = s390_hmac_sha2_update, \ 341 .finup = s390_hmac_sha2_finup, \ 342 .digest = s390_hmac_sha2_digest, \ 343 .setkey = s390_hmac_sha2_setkey, \ 344 .export = s390_hmac_export, \ 345 .import = s390_hmac_import, \ 346 .descsize = sizeof(struct s390_kmac_sha2_ctx), \ 347 .halg = { \ 348 .statesize = ss, \ 349 .digestsize = SHA##x##_DIGEST_SIZE, \ 350 .base = { \ 351 .cra_name = "hmac(sha" #x ")", \ 352 .cra_driver_name = "hmac_s390_sha" #x, \ 353 .cra_blocksize = SHA##x##_BLOCK_SIZE, \ 354 .cra_priority = 400, \ 355 .cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY | \ 356 CRYPTO_AHASH_ALG_FINUP_MAX, \ 357 .cra_ctxsize = sizeof(struct s390_hmac_ctx), \ 358 .cra_module = THIS_MODULE, \ 359 }, \ 360 }, \ 361 }, \ 362 } 363 364 static struct s390_hmac_alg { 365 bool registered; 366 unsigned int fc; 367 struct shash_alg alg; 368 } s390_hmac_algs[] = { 369 S390_HMAC_SHA2_ALG(224, sizeof(struct crypto_sha256_state)), 370 S390_HMAC_SHA2_ALG(256, sizeof(struct crypto_sha256_state)), 371 S390_HMAC_SHA2_ALG(384, SHA512_STATE_SIZE), 372 S390_HMAC_SHA2_ALG(512, SHA512_STATE_SIZE), 373 }; 374 375 static __always_inline void _s390_hmac_algs_unregister(void) 376 { 377 struct s390_hmac_alg *hmac; 378 int i; 379 380 for (i = ARRAY_SIZE(s390_hmac_algs) - 1; i >= 0; i--) { 381 hmac = &s390_hmac_algs[i]; 382 if (!hmac->registered) 383 continue; 384 crypto_unregister_shash(&hmac->alg); 385 } 386 } 387 388 static int __init hmac_s390_init(void) 389 { 390 struct s390_hmac_alg *hmac; 391 int i, rc = -ENODEV; 392 393 if (!cpacf_query_func(CPACF_KLMD, CPACF_KLMD_SHA_256)) 394 return -ENODEV; 395 if (!cpacf_query_func(CPACF_KLMD, CPACF_KLMD_SHA_512)) 396 return -ENODEV; 397 398 for (i = 0; i < ARRAY_SIZE(s390_hmac_algs); i++) { 399 hmac = &s390_hmac_algs[i]; 400 if (!cpacf_query_func(CPACF_KMAC, hmac->fc)) 401 continue; 402 403 rc = crypto_register_shash(&hmac->alg); 404 if (rc) { 405 pr_err("unable to register %s\n", 406 hmac->alg.halg.base.cra_name); 407 goto out; 408 } 409 hmac->registered = true; 410 pr_debug("registered %s\n", hmac->alg.halg.base.cra_name); 411 } 412 return rc; 413 out: 414 _s390_hmac_algs_unregister(); 415 return rc; 416 } 417 418 static void __exit hmac_s390_exit(void) 419 { 420 _s390_hmac_algs_unregister(); 421 } 422 423 module_cpu_feature_match(S390_CPU_FEATURE_MSA, hmac_s390_init); 424 module_exit(hmac_s390_exit); 425 426 MODULE_DESCRIPTION("S390 HMAC driver"); 427 MODULE_LICENSE("GPL"); 428