1use universal_hash::{
19 consts::{U16, U4},
20 crypto_common::{BlockSizeUser, ParBlocksSizeUser},
21 generic_array::GenericArray,
22 UhfBackend,
23};
24
25use crate::{Block, Key, Tag};
26
27mod helpers;
28use self::helpers::*;
29
30type ParBlocks = universal_hash::ParBlocks<State>;
32
33#[derive(Copy, Clone)]
34struct Initialized {
35 p: Aligned4x130,
36 m: SpacedMultiplier4x130,
37 r4: PrecomputedMultiplier,
38}
39
40#[derive(Clone)]
41pub(crate) struct State {
42 k: AdditionKey,
43 r1: PrecomputedMultiplier,
44 r2: PrecomputedMultiplier,
45 initialized: Option<Initialized>,
46 cached_blocks: [Block; 4],
47 num_cached_blocks: usize,
48 partial_block: Option<Block>,
49}
50
51impl State {
52 pub(crate) fn new(key: &Key) -> Self {
54 let (k, r1) = unsafe { prepare_keys(key) };
56
57 let r2 = (r1 * r1).reduce();
59
60 State {
61 k,
62 r1,
63 r2: r2.into(),
64 initialized: None,
65 cached_blocks: [Block::default(); 4],
66 num_cached_blocks: 0,
67 partial_block: None,
68 }
69 }
70
71 #[target_feature(enable = "avx2")]
73 pub(crate) unsafe fn compute_par_blocks(&mut self, blocks: &ParBlocks) {
74 assert!(self.partial_block.is_none());
75 assert_eq!(self.num_cached_blocks, 0);
76
77 self.process_blocks(Aligned4x130::from_par_blocks(blocks));
78 }
79
80 #[target_feature(enable = "avx2")]
82 pub(crate) unsafe fn compute_block(&mut self, block: &Block, partial: bool) {
83 if partial {
85 assert!(self.partial_block.is_none());
86 self.partial_block = Some(*block);
87 return;
88 }
89
90 self.cached_blocks[self.num_cached_blocks].copy_from_slice(block);
91 if self.num_cached_blocks < 3 {
92 self.num_cached_blocks += 1;
93 return;
94 } else {
95 self.num_cached_blocks = 0;
96 }
97
98 self.process_blocks(Aligned4x130::from_blocks(&self.cached_blocks));
99 }
100
101 #[target_feature(enable = "avx2")]
103 unsafe fn process_blocks(&mut self, blocks: Aligned4x130) {
104 if let Some(inner) = &mut self.initialized {
105 inner.p = (&inner.p * inner.r4).reduce() + blocks;
107 } else {
108 let p = blocks;
110
111 let (m, r4) = SpacedMultiplier4x130::new(self.r1, self.r2);
114
115 self.initialized = Some(Initialized { p, m, r4 })
116 }
117 }
118
119 #[target_feature(enable = "avx2")]
121 pub(crate) unsafe fn finalize(&mut self) -> Tag {
122 assert!(self.num_cached_blocks < 4);
123 let mut data = &self.cached_blocks[..];
124
125 let mut p = self
128 .initialized
129 .take()
130 .map(|inner| (inner.p * inner.m).sum().reduce());
131
132 if self.num_cached_blocks >= 2 {
133 let mut c = Aligned2x130::from_blocks(data[..2].try_into().unwrap());
135 if let Some(p) = p {
136 c = c + p;
137 }
138 p = Some(c.mul_and_sum(self.r1, self.r2).reduce());
139 data = &data[2..];
140 self.num_cached_blocks -= 2;
141 }
142
143 if self.num_cached_blocks == 1 {
144 let mut c = Aligned130::from_block(&data[0]);
146 if let Some(p) = p {
147 c = c + p;
148 }
149 p = Some((c * self.r1).reduce());
150 self.num_cached_blocks -= 1;
151 }
152
153 if let Some(block) = &self.partial_block {
154 let mut c = Aligned130::from_partial_block(block);
156 if let Some(p) = p {
157 c = c + p;
158 }
159 p = Some((c * self.r1).reduce());
160 }
161
162 let mut tag = GenericArray::<u8, _>::default();
164 let tag_int = if let Some(p) = p {
165 self.k + p
166 } else {
167 self.k.into()
168 };
169 tag_int.write(tag.as_mut_slice());
170
171 tag
172 }
173}
174
175impl BlockSizeUser for State {
176 type BlockSize = U16;
177}
178
179impl ParBlocksSizeUser for State {
180 type ParBlocksSize = U4;
181}
182
183impl UhfBackend for State {
184 fn proc_block(&mut self, block: &Block) {
185 unsafe { self.compute_block(block, false) };
186 }
187
188 fn proc_par_blocks(&mut self, blocks: &ParBlocks) {
189 if self.num_cached_blocks == 0 {
190 unsafe { self.compute_par_blocks(blocks) };
192 } else {
193 for block in blocks {
195 self.proc_block(block);
196 }
197 }
198 }
199
200 fn blocks_needed_to_align(&self) -> usize {
201 if self.num_cached_blocks == 0 {
202 0
204 } else {
205 self.cached_blocks.len() - self.num_cached_blocks
207 }
208 }
209}