xref: /freebsd/sys/crypto/armv8/armv8_crypto.h (revision d4eeb02986980bf33dd56c41ceb9fc5f180c0d47)
1 /*-
2  * Copyright (c) 2016 The FreeBSD Foundation
3  *
4  * This software was developed by Andrew Turner under
5  * sponsorship from the FreeBSD Foundation.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1. Redistributions of source code must retain the above copyright
11  *    notice, this list of conditions and the following disclaimer.
12  * 2. Redistributions in binary form must reproduce the above copyright
13  *    notice, this list of conditions and the following disclaimer in the
14  *    documentation and/or other materials provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND
17  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
20  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26  * SUCH DAMAGE.
27  *
28  * $FreeBSD$
29  */
30 
31 #ifndef _ARMV8_CRYPTO_H_
32 #define _ARMV8_CRYPTO_H_
33 
34 #define	AES256_ROUNDS	14
35 #define	AES_SCHED_LEN	((AES256_ROUNDS + 1) * AES_BLOCK_LEN)
36 
37 typedef struct {
38 	uint32_t		aes_key[AES_SCHED_LEN/4];
39 	int			aes_rounds;
40 } AES_key_t;
41 
42 typedef union {
43 		uint64_t u[2];
44 		uint32_t d[4];
45 		uint8_t c[16];
46 		size_t t[16 / sizeof(size_t)];
47 } __uint128_val_t;
48 
49 struct armv8_crypto_session {
50 	AES_key_t enc_schedule;
51 	AES_key_t dec_schedule;
52 	AES_key_t xts_schedule;
53 	__uint128_val_t Htable[16];
54 };
55 
56 /* Prototypes for aesv8-armx.S */
57 void aes_v8_encrypt(uint8_t *in, uint8_t *out, const AES_key_t *key);
58 int aes_v8_set_encrypt_key(const unsigned char *userKey, const int bits, const AES_key_t *key);
59 int aes_v8_set_decrypt_key(const unsigned char *userKey, const int bits, const AES_key_t *key);
60 
61 /* Prototypes for ghashv8-armx.S */
62 void gcm_init_v8(__uint128_val_t Htable[16], const uint64_t Xi[2]);
63 void gcm_gmult_v8(uint64_t Xi[2], const __uint128_val_t Htable[16]);
64 void gcm_ghash_v8(uint64_t Xi[2], const __uint128_val_t Htable[16], const uint8_t *inp, size_t len);
65 
66 void armv8_aes_encrypt_cbc(const AES_key_t *key, size_t len,
67     struct crypto_buffer_cursor *fromc, struct crypto_buffer_cursor *toc,
68     const uint8_t iv[static AES_BLOCK_LEN]);
69 void armv8_aes_decrypt_cbc(const AES_key_t *, size_t,
70     struct crypto_buffer_cursor *fromc, struct crypto_buffer_cursor *toc,
71     const uint8_t[static AES_BLOCK_LEN]);
72 void armv8_aes_encrypt_gcm(AES_key_t *, size_t,
73     struct crypto_buffer_cursor *, struct crypto_buffer_cursor *,
74     size_t, const uint8_t *,
75     uint8_t tag[static GMAC_DIGEST_LEN],
76     const uint8_t[static AES_BLOCK_LEN],
77     const __uint128_val_t *);
78 int armv8_aes_decrypt_gcm(AES_key_t *, size_t,
79     struct crypto_buffer_cursor *, struct crypto_buffer_cursor *,
80     size_t, const uint8_t *, const uint8_t tag[static GMAC_DIGEST_LEN],
81     const uint8_t[static AES_BLOCK_LEN],
82     const __uint128_val_t *);
83 
84 void armv8_aes_encrypt_xts(AES_key_t *, const void *, size_t,
85     struct crypto_buffer_cursor *, struct crypto_buffer_cursor *,
86     const uint8_t[AES_BLOCK_LEN]);
87 void armv8_aes_decrypt_xts(AES_key_t *, const void *, size_t,
88     struct crypto_buffer_cursor *, struct crypto_buffer_cursor *,
89     const uint8_t[AES_BLOCK_LEN]);
90 
91 #endif /* _ARMV8_CRYPTO_H_ */
92