1use self::ffi::{Block, BLOCK_LEN, ZERO_BLOCK};
16use super::{aes_gcm, Aad};
17use crate::{
18 bits::{BitLength, FromByteLen as _},
19 error::{self, InputTooLongError},
20 polyfill::{slice::AsChunks, sliceutil::overwrite_at_start, NotSend},
21};
22use cfg_if::cfg_if;
23
24pub(super) use ffi::KeyValue;
25
26cfg_if! {
27 if #[cfg(any(all(target_arch = "aarch64", target_endian = "little"), target_arch = "x86_64"))] {
28 pub(super) use self::ffi::{HTable, Xi};
29 } else {
30 use self::ffi::{HTable, Xi};
31 }
32}
33
34#[macro_use]
35mod ffi;
36
37pub(super) mod clmul;
38pub(super) mod clmulavxmovbe;
39pub(super) mod fallback;
40pub(super) mod neon;
41pub(super) mod vclmulavx2;
42
43pub(super) struct Context<'key, K> {
44 Xi: Xi,
45 key: &'key K,
46 aad_len: BitLength<u64>,
47 in_out_len: BitLength<u64>,
48 _not_send: NotSend,
49}
50
51impl<'key, K: UpdateBlock> Context<'key, K> {
52 #[inline(always)]
53 pub(crate) fn new(
54 key: &'key K,
55 aad: Aad<&[u8]>,
56 in_out_len: usize,
57 ) -> Result<Self, error::Unspecified> {
58 if in_out_len > aes_gcm::MAX_IN_OUT_LEN {
59 return Err(error::Unspecified);
60 }
61 let in_out_len =
62 BitLength::from_byte_len(in_out_len).map_err(error::erase::<InputTooLongError>)?;
63 let aad_len = BitLength::from_byte_len(aad.as_ref().len())
64 .map_err(error::erase::<InputTooLongError>)?;
65
66 let mut ctx = Self {
71 Xi: Xi(ZERO_BLOCK),
72 key,
73 aad_len,
74 in_out_len,
75 _not_send: NotSend::VALUE,
76 };
77
78 for ad in aad.0.chunks(BLOCK_LEN) {
79 let mut block = ZERO_BLOCK;
80 overwrite_at_start(&mut block, ad);
81 ctx.update_block(block);
82 }
83
84 Ok(ctx)
85 }
86}
87
88#[cfg(all(
89 target_arch = "aarch64",
90 target_endian = "little",
91 target_pointer_width = "64"
92))]
93impl<K> Context<'_, K> {
94 pub(super) fn in_out_whole_block_bits(&self) -> BitLength<usize> {
95 use crate::polyfill::usize_from_u64;
96 const WHOLE_BLOCK_BITS_MASK: usize = !0b111_1111;
97 #[allow(clippy::assertions_on_constants)]
98 const _WHOLE_BLOCK_BITS_MASK_CORRECT: () =
99 assert!(WHOLE_BLOCK_BITS_MASK == !((BLOCK_LEN * 8) - 1));
100 BitLength::from_bits(usize_from_u64(self.in_out_len.as_bits()) & WHOLE_BLOCK_BITS_MASK)
101 }
102}
103
104#[cfg(all(target_arch = "aarch64", target_endian = "little"))]
105impl Context<'_, clmul::Key> {
107 #[inline]
108 pub(super) fn inner(&mut self) -> (&HTable, &mut Xi) {
109 (&self.key.inner(), &mut self.Xi)
110 }
111}
112
113#[cfg(target_arch = "x86_64")]
114impl Context<'_, clmulavxmovbe::Key> {
115 #[inline]
117 pub(super) fn inner(&mut self) -> (&HTable, &mut Xi) {
118 (self.key.inner(), &mut self.Xi)
119 }
120}
121
122#[cfg(target_arch = "x86_64")]
123impl Context<'_, vclmulavx2::Key> {
124 #[inline]
126 pub(super) fn inner(&mut self) -> (&HTable, &mut Xi) {
127 (self.key.inner(), &mut self.Xi)
128 }
129}
130
131impl<K: UpdateBlocks> Context<'_, K> {
132 #[inline(always)]
133 pub fn update_blocks(&mut self, input: AsChunks<u8, BLOCK_LEN>) {
134 self.key.update_blocks(&mut self.Xi, input);
135 }
136}
137
138impl<K: UpdateBlock> Context<'_, K> {
139 pub fn update_block(&mut self, a: Block) {
140 self.key.update_block(&mut self.Xi, a);
141 }
142
143 #[inline(always)]
144 pub(super) fn pre_finish<F>(mut self, f: F) -> super::Tag
145 where
146 F: FnOnce(Block) -> super::Tag,
147 {
148 let mut block = [0u8; BLOCK_LEN];
149 let (alen, clen) = block.split_at_mut(BLOCK_LEN / 2);
150 alen.copy_from_slice(&BitLength::<u64>::to_be_bytes(self.aad_len));
151 clen.copy_from_slice(&BitLength::<u64>::to_be_bytes(self.in_out_len));
152 self.update_block(block);
153 f(self.Xi.0)
154 }
155}
156
157pub(super) trait UpdateBlock {
158 fn update_block(&self, xi: &mut Xi, a: Block);
159}
160
161pub(super) trait UpdateBlocks {
162 fn update_blocks(&self, xi: &mut Xi, input: AsChunks<u8, BLOCK_LEN>);
163}