1/* SPDX-License-Identifier: GPL-2.0-only */ 2/* 3 * linux/arch/arm64/crypto/aes-ce.S - AES cipher for ARMv8 with 4 * Crypto Extensions 5 * 6 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org> 7 */ 8 9#include <linux/linkage.h> 10#include <asm/assembler.h> 11 12#define AES_FUNC_START(func) SYM_FUNC_START(ce_ ## func) 13#define AES_FUNC_END(func) SYM_FUNC_END(ce_ ## func) 14 15 .arch armv8-a+crypto 16 17 xtsmask .req v16 18 cbciv .req v16 19 vctr .req v16 20 21 .macro xts_reload_mask, tmp 22 .endm 23 24 .macro xts_cts_skip_tw, reg, lbl 25 .endm 26 27 /* preload all round keys */ 28 .macro load_round_keys, rk, nr, tmp 29 add \tmp, \rk, \nr, sxtw #4 30 sub \tmp, \tmp, #160 31 ld1 {v17.4s-v20.4s}, [\rk] 32 ld1 {v21.4s-v24.4s}, [\tmp], #64 33 ld1 {v25.4s-v28.4s}, [\tmp], #64 34 ld1 {v29.4s-v31.4s}, [\tmp] 35 .endm 36 37 /* prepare for encryption with key in rk[] */ 38 .macro enc_prepare, rounds, rk, temp 39 load_round_keys \rk, \rounds, \temp 40 .endm 41 42 /* prepare for encryption (again) but with new key in rk[] */ 43 .macro enc_switch_key, rounds, rk, temp 44 load_round_keys \rk, \rounds, \temp 45 .endm 46 47 /* prepare for decryption with key in rk[] */ 48 .macro dec_prepare, rounds, rk, temp 49 load_round_keys \rk, \rounds, \temp 50 .endm 51 52 .macro do_enc_Nx, de, mc, k, i0, i1, i2, i3, i4 53 aes\de \i0\().16b, \k\().16b 54 aes\mc \i0\().16b, \i0\().16b 55 .ifnb \i1 56 aes\de \i1\().16b, \k\().16b 57 aes\mc \i1\().16b, \i1\().16b 58 .ifnb \i3 59 aes\de \i2\().16b, \k\().16b 60 aes\mc \i2\().16b, \i2\().16b 61 aes\de \i3\().16b, \k\().16b 62 aes\mc \i3\().16b, \i3\().16b 63 .ifnb \i4 64 aes\de \i4\().16b, \k\().16b 65 aes\mc \i4\().16b, \i4\().16b 66 .endif 67 .endif 68 .endif 69 .endm 70 71 /* up to 5 interleaved encryption rounds with the same round key */ 72 .macro round_Nx, enc, k, i0, i1, i2, i3, i4 73 .ifc \enc, e 74 do_enc_Nx e, mc, \k, \i0, \i1, \i2, \i3, \i4 75 .else 76 do_enc_Nx d, imc, \k, \i0, \i1, \i2, \i3, \i4 77 .endif 78 .endm 79 80 /* up to 5 interleaved final rounds */ 81 .macro fin_round_Nx, de, k, k2, i0, i1, i2, i3, i4 82 aes\de \i0\().16b, \k\().16b 83 .ifnb \i1 84 aes\de \i1\().16b, \k\().16b 85 .ifnb \i3 86 aes\de \i2\().16b, \k\().16b 87 aes\de \i3\().16b, \k\().16b 88 .ifnb \i4 89 aes\de \i4\().16b, \k\().16b 90 .endif 91 .endif 92 .endif 93 eor \i0\().16b, \i0\().16b, \k2\().16b 94 .ifnb \i1 95 eor \i1\().16b, \i1\().16b, \k2\().16b 96 .ifnb \i3 97 eor \i2\().16b, \i2\().16b, \k2\().16b 98 eor \i3\().16b, \i3\().16b, \k2\().16b 99 .ifnb \i4 100 eor \i4\().16b, \i4\().16b, \k2\().16b 101 .endif 102 .endif 103 .endif 104 .endm 105 106 /* up to 5 interleaved blocks */ 107 .macro do_block_Nx, enc, rounds, i0, i1, i2, i3, i4 108 tbz \rounds, #2, .L\@ /* 128 bits */ 109 round_Nx \enc, v17, \i0, \i1, \i2, \i3, \i4 110 round_Nx \enc, v18, \i0, \i1, \i2, \i3, \i4 111 tbz \rounds, #1, .L\@ /* 192 bits */ 112 round_Nx \enc, v19, \i0, \i1, \i2, \i3, \i4 113 round_Nx \enc, v20, \i0, \i1, \i2, \i3, \i4 114.L\@: .irp key, v21, v22, v23, v24, v25, v26, v27, v28, v29 115 round_Nx \enc, \key, \i0, \i1, \i2, \i3, \i4 116 .endr 117 fin_round_Nx \enc, v30, v31, \i0, \i1, \i2, \i3, \i4 118 .endm 119 120 .macro encrypt_block, in, rounds, t0, t1, t2 121 do_block_Nx e, \rounds, \in 122 .endm 123 124 .macro encrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2 125 do_block_Nx e, \rounds, \i0, \i1, \i2, \i3 126 .endm 127 128 .macro encrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2 129 do_block_Nx e, \rounds, \i0, \i1, \i2, \i3, \i4 130 .endm 131 132 .macro decrypt_block, in, rounds, t0, t1, t2 133 do_block_Nx d, \rounds, \in 134 .endm 135 136 .macro decrypt_block4x, i0, i1, i2, i3, rounds, t0, t1, t2 137 do_block_Nx d, \rounds, \i0, \i1, \i2, \i3 138 .endm 139 140 .macro decrypt_block5x, i0, i1, i2, i3, i4, rounds, t0, t1, t2 141 do_block_Nx d, \rounds, \i0, \i1, \i2, \i3, \i4 142 .endm 143 144#define MAX_STRIDE 5 145 146#include "aes-modes.S" 147