24 #include "../fipsmodule/cipher/internal.h"
27 #define EVP_AEAD_AES_GCM_SIV_NONCE_LEN 12
28 #define EVP_AEAD_AES_GCM_SIV_TAG_LEN 16
32 #if defined(OPENSSL_X86_64) && !defined(OPENSSL_NO_ASM) && \
33 !defined(OPENSSL_WINDOWS)
34 #define AES_GCM_SIV_ASM
38 struct aead_aes_gcm_siv_asm_ctx {
46 sizeof(
struct aead_aes_gcm_siv_asm_ctx),
47 "AEAD state is too small");
48 #if defined(__GNUC__) || defined(__clang__)
50 "AEAD state has insufficient alignment");
54 static struct aead_aes_gcm_siv_asm_ctx *asm_ctx_from_ctx(
59 return (
struct aead_aes_gcm_siv_asm_ctx *)(&
ctx->state.opaque[
offset]);
73 size_t key_len,
size_t tag_len) {
74 const size_t key_bits = key_len * 8;
76 if (key_bits != 128 && key_bits != 256) {
90 struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx = asm_ctx_from_ctx(
ctx);
91 assert((((
uintptr_t)gcm_siv_ctx) & 15) == 0);
93 if (key_bits == 128) {
95 gcm_siv_ctx->is_128_bit = 1;
98 gcm_siv_ctx->is_128_bit = 0;
101 ctx->tag_len = tag_len;
142 uint8_t in_out_calculated_tag_and_scratch[16 * 8],
144 const struct aead_aes_gcm_siv_asm_ctx *
key,
149 uint8_t in_out_calculated_tag_and_scratch[16 * 8],
151 const struct aead_aes_gcm_siv_asm_ctx *
key,
171 uint8_t out_expanded_key[16 * 15],
177 uint8_t out_expanded_key[16 * 15],
184 const struct aead_aes_gcm_siv_asm_ctx *expanded_key);
190 const struct aead_aes_gcm_siv_asm_ctx *expanded_key);
199 const struct aead_aes_gcm_siv_asm_ctx *
key,
206 const struct aead_aes_gcm_siv_asm_ctx *
key,
213 const struct aead_aes_gcm_siv_asm_ctx *
key,
220 const struct aead_aes_gcm_siv_asm_ctx *
key,
226 size_t in_len,
const uint8_t *ad,
size_t ad_len,
230 const size_t ad_blocks = ad_len / 16;
231 const size_t in_blocks = in_len / 16;
233 alignas(16)
uint8_t htable[16*8];
235 if (ad_blocks > 8 || in_blocks > 8) {
273 length_block.bitlens.ad = ad_len * 8;
274 length_block.bitlens.in = in_len * 8;
277 for (
size_t i = 0;
i < 12;
i++) {
278 out_tag[
i] ^= nonce[
i];
288 static void aead_aes_gcm_siv_asm_crypt_last_block(
291 const struct aead_aes_gcm_siv_asm_ctx *enc_key_expanded) {
306 const size_t last_bytes_offset = in_len & ~15;
307 const size_t last_bytes_len = in_len & 15;
308 uint8_t *last_bytes_out = &
out[last_bytes_offset];
309 const uint8_t *last_bytes_in = &
in[last_bytes_offset];
310 for (
size_t i = 0;
i < last_bytes_len;
i++) {
311 last_bytes_out[
i] = last_bytes_in[
i] ^
counter.c[
i];
317 static void aead_aes_gcm_siv_kdf(
318 int is_128_bit,
const struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx,
321 alignas(16)
uint8_t padded_nonce[16];
324 alignas(16)
uint64_t key_material[12];
327 out_record_enc_key[0] = key_material[4];
328 out_record_enc_key[1] = key_material[6];
331 out_record_enc_key[0] = key_material[4];
332 out_record_enc_key[1] = key_material[6];
333 out_record_enc_key[2] = key_material[8];
334 out_record_enc_key[3] = key_material[10];
337 out_record_auth_key[0] = key_material[0];
338 out_record_auth_key[1] = key_material[2];
341 static int aead_aes_gcm_siv_asm_seal_scatter(
343 size_t *out_tag_len,
size_t max_out_tag_len,
const uint8_t *nonce,
344 size_t nonce_len,
const uint8_t *
in,
size_t in_len,
const uint8_t *extra_in,
345 size_t extra_in_len,
const uint8_t *ad,
size_t ad_len) {
346 const struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx = asm_ctx_from_ctx(
ctx);
350 if (in_len_64 > (
UINT64_C(1) << 36) ||
366 alignas(16)
uint64_t record_auth_key[2];
367 alignas(16)
uint64_t record_enc_key[4];
368 aead_aes_gcm_siv_kdf(gcm_siv_ctx->is_128_bit, gcm_siv_ctx, record_auth_key,
369 record_enc_key, nonce);
372 gcm_siv_asm_polyval(
tag,
in, in_len, ad, ad_len,
373 (
const uint8_t *)record_auth_key, nonce);
375 struct aead_aes_gcm_siv_asm_ctx enc_key_expanded;
377 if (gcm_siv_ctx->is_128_bit) {
398 aead_aes_gcm_siv_asm_crypt_last_block(gcm_siv_ctx->is_128_bit,
out,
in,
399 in_len,
tag, &enc_key_expanded);
412 size_t *out_len,
size_t max_out_len,
413 const uint8_t *nonce,
size_t nonce_len,
415 const uint8_t *ad,
size_t ad_len) {
417 if (ad_len_64 >= (
UINT64_C(1) << 61)) {
434 const struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx = asm_ctx_from_ctx(
ctx);
436 const uint8_t *
const given_tag =
in + plaintext_len;
438 if (max_out_len < plaintext_len) {
443 alignas(16)
uint64_t record_auth_key[2];
444 alignas(16)
uint64_t record_enc_key[4];
445 aead_aes_gcm_siv_kdf(gcm_siv_ctx->is_128_bit, gcm_siv_ctx, record_auth_key,
446 record_enc_key, nonce);
448 struct aead_aes_gcm_siv_asm_ctx expanded_key;
449 if (gcm_siv_ctx->is_128_bit) {
456 alignas(16)
uint8_t calculated_tag[16 * 8] = {0};
459 const size_t ad_blocks = ad_len / 16;
471 alignas(16)
uint8_t htable[16 * 6];
474 if (gcm_siv_ctx->is_128_bit) {
482 if (plaintext_len & 15) {
483 aead_aes_gcm_siv_asm_crypt_last_block(gcm_siv_ctx->is_128_bit,
out,
in,
484 plaintext_len, given_tag,
500 length_block.bitlens.ad = ad_len * 8;
501 length_block.bitlens.in = plaintext_len * 8;
505 for (
size_t i = 0;
i < 12;
i++) {
506 calculated_tag[
i] ^= nonce[
i];
509 calculated_tag[15] &= 0x7f;
511 if (gcm_siv_ctx->is_128_bit) {
527 static const EVP_AEAD aead_aes_128_gcm_siv_asm = {
534 aead_aes_gcm_siv_asm_init,
536 aead_aes_gcm_siv_asm_cleanup,
537 aead_aes_gcm_siv_asm_open,
538 aead_aes_gcm_siv_asm_seal_scatter,
544 static const EVP_AEAD aead_aes_256_gcm_siv_asm = {
551 aead_aes_gcm_siv_asm_init,
553 aead_aes_gcm_siv_asm_cleanup,
554 aead_aes_gcm_siv_asm_open,
555 aead_aes_gcm_siv_asm_seal_scatter,
561 #endif // X86_64 && !NO_ASM && !WINDOWS
574 "AEAD state is too small");
575 #if defined(__GNUC__) || defined(__clang__)
578 "AEAD state has insufficient alignment");
582 size_t key_len,
size_t tag_len) {
583 const size_t key_bits = key_len * 8;
585 if (key_bits != 128 && key_bits != 256) {
604 gcm_siv_ctx->
is_256 = (key_len == 32);
605 ctx->tag_len = tag_len;
630 for (
size_t done = 0;
done < in_len;) {
640 for (
size_t i = 0;
i <
todo;
i++) {
652 size_t ad_len,
const uint8_t auth_key[16],
681 length_block.bitlens.ad = ad_len * 8;
682 length_block.bitlens.in = in_len * 8;
684 sizeof(length_block));
688 out_tag[
i] ^= nonce[
i];
710 uint8_t key_material[(128 + 256 ) / 8];
711 const size_t blocks_needed = gcm_siv_ctx->
is_256 ? 6 : 4;
717 for (
size_t i = 0;
i < blocks_needed;
i++) {
735 key_material + 16, gcm_siv_ctx->
is_256 ? 32 : 16);
740 size_t *out_tag_len,
size_t max_out_tag_len,
const uint8_t *nonce,
741 size_t nonce_len,
const uint8_t *
in,
size_t in_len,
const uint8_t *extra_in,
742 size_t extra_in_len,
const uint8_t *ad,
size_t ad_len) {
781 const uint8_t *nonce,
size_t nonce_len,
784 size_t in_tag_len,
const uint8_t *ad,
787 if (ad_len_64 >= (
UINT64_C(1) << 61)) {
814 keys.enc_block(expected_tag, expected_tag, &
keys.enc_key.ks);
816 if (
CRYPTO_memcmp(expected_tag, in_tag,
sizeof(expected_tag)) != 0) {
858 #if defined(AES_GCM_SIV_ASM)
860 static char avx_aesni_capable(
void) {
863 return (ecx & (1 << (57 - 32))) != 0 &&
864 (ecx & (1 << 28)) != 0 ;
868 if (avx_aesni_capable()) {
869 return &aead_aes_128_gcm_siv_asm;
875 if (avx_aesni_capable()) {
876 return &aead_aes_256_gcm_siv_asm;
891 #endif // AES_GCM_SIV_ASM