xref: /linux/lib/crypto/arm64/aes-ce.S (revision 4b908403209252e59ecad4c068bf967fa3f07525) !
1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * AES cipher for ARMv8 with Crypto Extensions
4 *
5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6 */
7
8#include <linux/linkage.h>
9#include <asm/assembler.h>
10
11#define AES_FUNC_START(func)		SYM_FUNC_START(ce_ ## func)
12#define AES_FUNC_END(func)		SYM_FUNC_END(ce_ ## func)
13
14	.arch		armv8-a+crypto
15
16	xtsmask		.req	v16
17	cbciv		.req	v16
18	vctr		.req	v16
19
20	.macro		xts_reload_mask, tmp
21	.endm
22
23	.macro		xts_cts_skip_tw, reg, lbl
24	.endm
25
26	/* preload all round keys */
27	.macro		load_round_keys, rk, nr, tmp
28	add		\tmp, \rk, \nr, sxtw #4
29	sub		\tmp, \tmp, #160
30	ld1		{v17.4s-v20.4s}, [\rk]
31	ld1		{v21.4s-v24.4s}, [\tmp], #64
32	ld1		{v25.4s-v28.4s}, [\tmp], #64
33	ld1		{v29.4s-v31.4s}, [\tmp]
34	.endm
35
36	/* prepare for encryption with key in rk[] */
37	.macro		enc_prepare, rounds, rk, temp
38	load_round_keys	\rk, \rounds, \temp
39	.endm
40
41	/* prepare for encryption (again) but with new key in rk[] */
42	.macro		enc_switch_key, rounds, rk, temp
43	load_round_keys	\rk, \rounds, \temp
44	.endm
45
46	/* prepare for decryption with key in rk[] */
47	.macro		dec_prepare, rounds, rk, temp
48	load_round_keys	\rk, \rounds, \temp
49	.endm
50
51	.macro		do_enc_Nx, de, mc, k, i0, i1, i2, i3, i4
52	aes\de		\i0\().16b, \k\().16b
53	aes\mc		\i0\().16b, \i0\().16b
54	.ifnb		\i1
55	aes\de		\i1\().16b, \k\().16b
56	aes\mc		\i1\().16b, \i1\().16b
57	.ifnb		\i3
58	aes\de		\i2\().16b, \k\().16b
59	aes\mc		\i2\().16b, \i2\().16b
60	aes\de		\i3\().16b, \k\().16b
61	aes\mc		\i3\().16b, \i3\().16b
62	.ifnb		\i4
63	aes\de		\i4\().16b, \k\().16b
64	aes\mc		\i4\().16b, \i4\().16b
65	.endif
66	.endif
67	.endif
68	.endm
69
70	/* up to 5 interleaved encryption rounds with the same round key */
71	.macro		round_Nx, enc, k, i0, i1, i2, i3, i4
72	.ifc		\enc, e
73	do_enc_Nx	e, mc, \k, \i0, \i1, \i2, \i3, \i4
74	.else
75	do_enc_Nx	d, imc, \k, \i0, \i1, \i2, \i3, \i4
76	.endif
77	.endm
78
79	/* up to 5 interleaved final rounds */
80	.macro		fin_round_Nx, de, k, k2, i0, i1, i2, i3, i4
81	aes\de		\i0\().16b, \k\().16b
82	.ifnb		\i1
83	aes\de		\i1\().16b, \k\().16b
84	.ifnb		\i3
85	aes\de		\i2\().16b, \k\().16b
86	aes\de		\i3\().16b, \k\().16b
87	.ifnb		\i4
88	aes\de		\i4\().16b, \k\().16b
89	.endif
90	.endif
91	.endif
92	eor		\i0\().16b, \i0\().16b, \k2\().16b
93	.ifnb		\i1
94	eor		\i1\().16b, \i1\().16b, \k2\().16b
95	.ifnb		\i3
96	eor		\i2\().16b, \i2\().16b, \k2\().16b
97	eor		\i3\().16b, \i3\().16b, \k2\().16b
98	.ifnb		\i4
99	eor		\i4\().16b, \i4\().16b, \k2\().16b
100	.endif
101	.endif
102	.endif
103	.endm
104
105	/* up to 5 interleaved blocks */
106	.macro		do_block_Nx, enc, rounds, i0, i1, i2, i3, i4
107	tbz		\rounds, #2, .L\@	/* 128 bits */
108	round_Nx	\enc, v17, \i0, \i1, \i2, \i3, \i4
109	round_Nx	\enc, v18, \i0, \i1, \i2, \i3, \i4
110	tbz		\rounds, #1, .L\@	/* 192 bits */
111	round_Nx	\enc, v19, \i0, \i1, \i2, \i3, \i4
112	round_Nx	\enc, v20, \i0, \i1, \i2, \i3, \i4
113.L\@:	.irp		key, v21, v22, v23, v24, v25, v26, v27, v28, v29
114	round_Nx	\enc, \key, \i0, \i1, \i2, \i3, \i4
115	.endr
116	fin_round_Nx	\enc, v30, v31, \i0, \i1, \i2, \i3, \i4
117	.endm
118
119	.macro		encrypt_block, in, rounds, t0, t1, t2
120	do_block_Nx	e, \rounds, \in
121	.endm
122
123	.macro		encrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2
124	do_block_Nx	e, \rounds, \i0, \i1, \i2, \i3
125	.endm
126
127	.macro		encrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2
128	do_block_Nx	e, \rounds, \i0, \i1, \i2, \i3, \i4
129	.endm
130
131	.macro		decrypt_block, in, rounds, t0, t1, t2
132	do_block_Nx	d, \rounds, \in
133	.endm
134
135	.macro		decrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2
136	do_block_Nx	d, \rounds, \i0, \i1, \i2, \i3
137	.endm
138
139	.macro		decrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2
140	do_block_Nx	d, \rounds, \i0, \i1, \i2, \i3, \i4
141	.endm
142
143#define MAX_STRIDE	5
144
145#include "aes-modes.S"
146