xref: /linux/lib/crypto/arm64/aes-ce.S (revision 370c3883195566ee3e7d79e0146c3d735a406573)
1*4b908403SEric Biggers/* SPDX-License-Identifier: GPL-2.0-only */
2*4b908403SEric Biggers/*
3*4b908403SEric Biggers * AES cipher for ARMv8 with Crypto Extensions
4*4b908403SEric Biggers *
5*4b908403SEric Biggers * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6*4b908403SEric Biggers */
7*4b908403SEric Biggers
8*4b908403SEric Biggers#include <linux/linkage.h>
9*4b908403SEric Biggers#include <asm/assembler.h>
10*4b908403SEric Biggers
11*4b908403SEric Biggers#define AES_FUNC_START(func)		SYM_FUNC_START(ce_ ## func)
12*4b908403SEric Biggers#define AES_FUNC_END(func)		SYM_FUNC_END(ce_ ## func)
13*4b908403SEric Biggers
14*4b908403SEric Biggers	.arch		armv8-a+crypto
15*4b908403SEric Biggers
16*4b908403SEric Biggers	xtsmask		.req	v16
17*4b908403SEric Biggers	cbciv		.req	v16
18*4b908403SEric Biggers	vctr		.req	v16
19*4b908403SEric Biggers
20*4b908403SEric Biggers	.macro		xts_reload_mask, tmp
21*4b908403SEric Biggers	.endm
22*4b908403SEric Biggers
23*4b908403SEric Biggers	.macro		xts_cts_skip_tw, reg, lbl
24*4b908403SEric Biggers	.endm
25*4b908403SEric Biggers
26*4b908403SEric Biggers	/* preload all round keys */
27*4b908403SEric Biggers	.macro		load_round_keys, rk, nr, tmp
28*4b908403SEric Biggers	add		\tmp, \rk, \nr, sxtw #4
29*4b908403SEric Biggers	sub		\tmp, \tmp, #160
30*4b908403SEric Biggers	ld1		{v17.4s-v20.4s}, [\rk]
31*4b908403SEric Biggers	ld1		{v21.4s-v24.4s}, [\tmp], #64
32*4b908403SEric Biggers	ld1		{v25.4s-v28.4s}, [\tmp], #64
33*4b908403SEric Biggers	ld1		{v29.4s-v31.4s}, [\tmp]
34*4b908403SEric Biggers	.endm
35*4b908403SEric Biggers
36*4b908403SEric Biggers	/* prepare for encryption with key in rk[] */
37*4b908403SEric Biggers	.macro		enc_prepare, rounds, rk, temp
38*4b908403SEric Biggers	load_round_keys	\rk, \rounds, \temp
39*4b908403SEric Biggers	.endm
40*4b908403SEric Biggers
41*4b908403SEric Biggers	/* prepare for encryption (again) but with new key in rk[] */
42*4b908403SEric Biggers	.macro		enc_switch_key, rounds, rk, temp
43*4b908403SEric Biggers	load_round_keys	\rk, \rounds, \temp
44*4b908403SEric Biggers	.endm
45*4b908403SEric Biggers
46*4b908403SEric Biggers	/* prepare for decryption with key in rk[] */
47*4b908403SEric Biggers	.macro		dec_prepare, rounds, rk, temp
48*4b908403SEric Biggers	load_round_keys	\rk, \rounds, \temp
49*4b908403SEric Biggers	.endm
50*4b908403SEric Biggers
51*4b908403SEric Biggers	.macro		do_enc_Nx, de, mc, k, i0, i1, i2, i3, i4
52*4b908403SEric Biggers	aes\de		\i0\().16b, \k\().16b
53*4b908403SEric Biggers	aes\mc		\i0\().16b, \i0\().16b
54*4b908403SEric Biggers	.ifnb		\i1
55*4b908403SEric Biggers	aes\de		\i1\().16b, \k\().16b
56*4b908403SEric Biggers	aes\mc		\i1\().16b, \i1\().16b
57*4b908403SEric Biggers	.ifnb		\i3
58*4b908403SEric Biggers	aes\de		\i2\().16b, \k\().16b
59*4b908403SEric Biggers	aes\mc		\i2\().16b, \i2\().16b
60*4b908403SEric Biggers	aes\de		\i3\().16b, \k\().16b
61*4b908403SEric Biggers	aes\mc		\i3\().16b, \i3\().16b
62*4b908403SEric Biggers	.ifnb		\i4
63*4b908403SEric Biggers	aes\de		\i4\().16b, \k\().16b
64*4b908403SEric Biggers	aes\mc		\i4\().16b, \i4\().16b
65*4b908403SEric Biggers	.endif
66*4b908403SEric Biggers	.endif
67*4b908403SEric Biggers	.endif
68*4b908403SEric Biggers	.endm
69*4b908403SEric Biggers
70*4b908403SEric Biggers	/* up to 5 interleaved encryption rounds with the same round key */
71*4b908403SEric Biggers	.macro		round_Nx, enc, k, i0, i1, i2, i3, i4
72*4b908403SEric Biggers	.ifc		\enc, e
73*4b908403SEric Biggers	do_enc_Nx	e, mc, \k, \i0, \i1, \i2, \i3, \i4
74*4b908403SEric Biggers	.else
75*4b908403SEric Biggers	do_enc_Nx	d, imc, \k, \i0, \i1, \i2, \i3, \i4
76*4b908403SEric Biggers	.endif
77*4b908403SEric Biggers	.endm
78*4b908403SEric Biggers
79*4b908403SEric Biggers	/* up to 5 interleaved final rounds */
80*4b908403SEric Biggers	.macro		fin_round_Nx, de, k, k2, i0, i1, i2, i3, i4
81*4b908403SEric Biggers	aes\de		\i0\().16b, \k\().16b
82*4b908403SEric Biggers	.ifnb		\i1
83*4b908403SEric Biggers	aes\de		\i1\().16b, \k\().16b
84*4b908403SEric Biggers	.ifnb		\i3
85*4b908403SEric Biggers	aes\de		\i2\().16b, \k\().16b
86*4b908403SEric Biggers	aes\de		\i3\().16b, \k\().16b
87*4b908403SEric Biggers	.ifnb		\i4
88*4b908403SEric Biggers	aes\de		\i4\().16b, \k\().16b
89*4b908403SEric Biggers	.endif
90*4b908403SEric Biggers	.endif
91*4b908403SEric Biggers	.endif
92*4b908403SEric Biggers	eor		\i0\().16b, \i0\().16b, \k2\().16b
93*4b908403SEric Biggers	.ifnb		\i1
94*4b908403SEric Biggers	eor		\i1\().16b, \i1\().16b, \k2\().16b
95*4b908403SEric Biggers	.ifnb		\i3
96*4b908403SEric Biggers	eor		\i2\().16b, \i2\().16b, \k2\().16b
97*4b908403SEric Biggers	eor		\i3\().16b, \i3\().16b, \k2\().16b
98*4b908403SEric Biggers	.ifnb		\i4
99*4b908403SEric Biggers	eor		\i4\().16b, \i4\().16b, \k2\().16b
100*4b908403SEric Biggers	.endif
101*4b908403SEric Biggers	.endif
102*4b908403SEric Biggers	.endif
103*4b908403SEric Biggers	.endm
104*4b908403SEric Biggers
105*4b908403SEric Biggers	/* up to 5 interleaved blocks */
106*4b908403SEric Biggers	.macro		do_block_Nx, enc, rounds, i0, i1, i2, i3, i4
107*4b908403SEric Biggers	tbz		\rounds, #2, .L\@	/* 128 bits */
108*4b908403SEric Biggers	round_Nx	\enc, v17, \i0, \i1, \i2, \i3, \i4
109*4b908403SEric Biggers	round_Nx	\enc, v18, \i0, \i1, \i2, \i3, \i4
110*4b908403SEric Biggers	tbz		\rounds, #1, .L\@	/* 192 bits */
111*4b908403SEric Biggers	round_Nx	\enc, v19, \i0, \i1, \i2, \i3, \i4
112*4b908403SEric Biggers	round_Nx	\enc, v20, \i0, \i1, \i2, \i3, \i4
113*4b908403SEric Biggers.L\@:	.irp		key, v21, v22, v23, v24, v25, v26, v27, v28, v29
114*4b908403SEric Biggers	round_Nx	\enc, \key, \i0, \i1, \i2, \i3, \i4
115*4b908403SEric Biggers	.endr
116*4b908403SEric Biggers	fin_round_Nx	\enc, v30, v31, \i0, \i1, \i2, \i3, \i4
117*4b908403SEric Biggers	.endm
118*4b908403SEric Biggers
119*4b908403SEric Biggers	.macro		encrypt_block, in, rounds, t0, t1, t2
120*4b908403SEric Biggers	do_block_Nx	e, \rounds, \in
121*4b908403SEric Biggers	.endm
122*4b908403SEric Biggers
123*4b908403SEric Biggers	.macro		encrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2
124*4b908403SEric Biggers	do_block_Nx	e, \rounds, \i0, \i1, \i2, \i3
125*4b908403SEric Biggers	.endm
126*4b908403SEric Biggers
127*4b908403SEric Biggers	.macro		encrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2
128*4b908403SEric Biggers	do_block_Nx	e, \rounds, \i0, \i1, \i2, \i3, \i4
129*4b908403SEric Biggers	.endm
130*4b908403SEric Biggers
131*4b908403SEric Biggers	.macro		decrypt_block, in, rounds, t0, t1, t2
132*4b908403SEric Biggers	do_block_Nx	d, \rounds, \in
133*4b908403SEric Biggers	.endm
134*4b908403SEric Biggers
135*4b908403SEric Biggers	.macro		decrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2
136*4b908403SEric Biggers	do_block_Nx	d, \rounds, \i0, \i1, \i2, \i3
137*4b908403SEric Biggers	.endm
138*4b908403SEric Biggers
139*4b908403SEric Biggers	.macro		decrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2
140*4b908403SEric Biggers	do_block_Nx	d, \rounds, \i0, \i1, \i2, \i3, \i4
141*4b908403SEric Biggers	.endm
142*4b908403SEric Biggers
143*4b908403SEric Biggers#define MAX_STRIDE	5
144*4b908403SEric Biggers
145*4b908403SEric Biggers#include "aes-modes.S"
146