ring/aead/aes_gcm/
vaesclmulavx2.rs1#![cfg(target_arch = "x86_64")]
16
17use super::{aes, gcm, Counter, BLOCK_LEN};
18use crate::{aead::aes::Overlapping, c, polyfill::slice::AsChunksMut};
19use core::num::NonZeroU32;
20
21pub(super) fn seal_whole_vaes_clmul_avx2(
22 aes_key: &aes::hw::Key,
23 auth: &mut gcm::Context<gcm::vclmulavx2::Key>,
24 ctr: &mut Counter,
25 mut in_out: AsChunksMut<u8, BLOCK_LEN>,
26) {
27 prefixed_extern! {
28 fn aes_gcm_enc_update_vaes_avx2(
29 input: *const u8,
30 output: *mut u8,
31 len: c::size_t,
32 key: &aes::AES_KEY,
33 ivec: &Counter,
34 Htable: &gcm::HTable,
35 Xi: &mut gcm::Xi);
36 }
37
38 let in_out = in_out.as_flattened_mut();
39
40 let blocks = u32::try_from(in_out.len() / BLOCK_LEN).unwrap();
43
44 if let Some(blocks) = NonZeroU32::new(blocks) {
45 let aes_key = aes_key.inner_less_safe();
46 let (htable, xi) = auth.inner();
47 let input = in_out.as_ptr();
48 let output = in_out.as_mut_ptr();
49 let len = in_out.len();
50 unsafe { aes_gcm_enc_update_vaes_avx2(input, output, len, aes_key, ctr, htable, xi) };
51 ctr.increment_by_less_safe(blocks);
52 }
53}
54
55pub(super) fn open_whole_vaes_clmul_avx2(
56 aes_key: &aes::hw::Key,
57 auth: &mut gcm::Context<gcm::vclmulavx2::Key>,
58 in_out: Overlapping,
59 ctr: &mut Counter,
60) {
61 prefixed_extern! {
62 fn aes_gcm_dec_update_vaes_avx2(
63 input: *const u8,
64 output: *mut u8,
65 len: c::size_t,
66 key: &aes::AES_KEY,
67 ivec: &mut Counter,
68 Htable: &gcm::HTable,
69 Xi: &mut gcm::Xi);
70 }
71
72 assert_eq!(in_out.len() % BLOCK_LEN, 0);
74 let blocks = u32::try_from(in_out.len() / BLOCK_LEN).unwrap();
77
78 if let Some(blocks) = NonZeroU32::new(blocks) {
79 let aes_key = aes_key.inner_less_safe();
80 let (htable, xi) = auth.inner();
81 in_out.with_input_output_len(|input, output, len| unsafe {
82 aes_gcm_dec_update_vaes_avx2(input, output, len, aes_key, ctr, htable, xi)
83 });
84 ctr.increment_by_less_safe(blocks);
85 }
86}