xref: /qemu/host/include/aarch64/host/crypto/clmul.h (revision 055c99015a4ec3c608d0260592368adc604429ea)
1  /*
2   * AArch64 specific clmul acceleration.
3   * SPDX-License-Identifier: GPL-2.0-or-later
4   */
5  
6  #ifndef AARCH64_HOST_CRYPTO_CLMUL_H
7  #define AARCH64_HOST_CRYPTO_CLMUL_H
8  
9  #include "host/cpuinfo.h"
10  #include <arm_neon.h>
11  
12  /*
13   * 64x64->128 pmull is available with FEAT_PMULL.
14   * Both FEAT_AES and FEAT_PMULL are covered under the same macro.
15   */
16  #ifdef __ARM_FEATURE_AES
17  # define HAVE_CLMUL_ACCEL  true
18  #else
19  # define HAVE_CLMUL_ACCEL  likely(cpuinfo & CPUINFO_PMULL)
20  #endif
21  #if !defined(__ARM_FEATURE_AES) && defined(CONFIG_ARM_AES_BUILTIN)
22  # define ATTR_CLMUL_ACCEL  __attribute__((target("+crypto")))
23  #else
24  # define ATTR_CLMUL_ACCEL
25  #endif
26  
27  static inline Int128 ATTR_CLMUL_ACCEL
28  clmul_64_accel(uint64_t n, uint64_t m)
29  {
30      union { poly128_t v; Int128 s; } u;
31  
32  #ifdef CONFIG_ARM_AES_BUILTIN
33      u.v = vmull_p64((poly64_t)n, (poly64_t)m);
34  #else
35      asm(".arch_extension aes\n\t"
36          "pmull %0.1q, %1.1d, %2.1d" : "=w"(u.v) : "w"(n), "w"(m));
37  #endif
38      return u.s;
39  }
40  
41  #endif /* AARCH64_HOST_CRYPTO_CLMUL_H */
42