1/* 2 * Copyright 2019-2025 The OpenSSL Project Authors. All Rights Reserved. 3 * 4 * Licensed under the Apache License 2.0 (the "License"). You may not use 5 * this file except in compliance with the License. You can obtain a copy 6 * in the file LICENSE in the source distribution or at 7 * https://www.openssl.org/source/license.html 8 */ 9 10/* 11 * Crypto extension support for AES GCM. 12 * This file is included by cipher_aes_gcm_hw.c 13 */ 14 15size_t armv8_aes_gcm_encrypt(const unsigned char *in, unsigned char *out, size_t len, 16 const void *key, unsigned char ivec[16], u64 *Xi) 17{ 18 AES_KEY *aes_key = (AES_KEY *)key; 19 size_t align_bytes = len - len % 16; 20 21 switch(aes_key->rounds) { 22 case 10: 23 if (IS_CPU_SUPPORT_UNROLL8_EOR3()) { 24 unroll8_eor3_aes_gcm_enc_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key); 25 } else { 26 aes_gcm_enc_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key); 27 } 28 break; 29 case 12: 30 if (IS_CPU_SUPPORT_UNROLL8_EOR3()) { 31 unroll8_eor3_aes_gcm_enc_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key); 32 } else { 33 aes_gcm_enc_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key); 34 } 35 break; 36 case 14: 37 if (IS_CPU_SUPPORT_UNROLL8_EOR3()) { 38 unroll8_eor3_aes_gcm_enc_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key); 39 } else { 40 aes_gcm_enc_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key); 41 } 42 break; 43 } 44 return align_bytes; 45} 46 47size_t armv8_aes_gcm_decrypt(const unsigned char *in, unsigned char *out, size_t len, 48 const void *key, unsigned char ivec[16], u64 *Xi) 49{ 50 AES_KEY *aes_key = (AES_KEY *)key; 51 size_t align_bytes = len - len % 16; 52 53 switch(aes_key->rounds) { 54 case 10: 55 if (IS_CPU_SUPPORT_UNROLL8_EOR3()) { 56 unroll8_eor3_aes_gcm_dec_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key); 57 } else { 58 aes_gcm_dec_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key); 59 } 60 break; 61 case 12: 62 if (IS_CPU_SUPPORT_UNROLL8_EOR3()) { 63 unroll8_eor3_aes_gcm_dec_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key); 64 } else { 65 aes_gcm_dec_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key); 66 } 67 break; 68 case 14: 69 if (IS_CPU_SUPPORT_UNROLL8_EOR3()) { 70 unroll8_eor3_aes_gcm_dec_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key); 71 } else { 72 aes_gcm_dec_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key); 73 } 74 break; 75 } 76 return align_bytes; 77} 78 79static int armv8_aes_gcm_initkey(PROV_GCM_CTX *ctx, const unsigned char *key, 80 size_t keylen) 81{ 82 PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx; 83 AES_KEY *ks = &actx->ks.ks; 84 85 if (AES_UNROLL12_EOR3_CAPABLE) { 86 GCM_HW_SET_KEY_CTR_FN(ks, aes_v8_set_encrypt_key, aes_v8_encrypt, 87 aes_v8_ctr32_encrypt_blocks_unroll12_eor3); 88 } else { 89 GCM_HW_SET_KEY_CTR_FN(ks, aes_v8_set_encrypt_key, aes_v8_encrypt, 90 aes_v8_ctr32_encrypt_blocks); 91 } 92 return 1; 93} 94 95 96static const PROV_GCM_HW armv8_aes_gcm = { 97 armv8_aes_gcm_initkey, 98 ossl_gcm_setiv, 99 ossl_gcm_aad_update, 100 generic_aes_gcm_cipher_update, 101 ossl_gcm_cipher_final, 102 ossl_gcm_one_shot 103}; 104 105const PROV_GCM_HW *ossl_prov_aes_hw_gcm(size_t keybits) 106{ 107 return AES_PMULL_CAPABLE ? &armv8_aes_gcm : &aes_gcm; 108} 109