ring/aead/aes/
vp.rs

1// Copyright 2018-2024 Brian Smith.
2//
3// Permission to use, copy, modify, and/or distribute this software for any
4// purpose with or without fee is hereby granted, provided that the above
5// copyright notice and this permission notice appear in all copies.
6//
7// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
8// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
9// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
10// SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
11// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
12// OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
13// CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
14
15#![cfg(any(
16    all(target_arch = "aarch64", target_endian = "little"),
17    all(target_arch = "arm", target_endian = "little"),
18    target_arch = "x86",
19    target_arch = "x86_64"
20))]
21
22use super::{Block, Counter, EncryptBlock, EncryptCtr32, Iv, KeyBytes, Overlapping, AES_KEY};
23use crate::{cpu, error};
24
25#[cfg(any(
26    all(target_arch = "aarch64", target_endian = "little"),
27    all(target_arch = "arm", target_endian = "little")
28))]
29type RequiredCpuFeatures = cpu::arm::Neon;
30
31#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
32pub(in super::super) type RequiredCpuFeatures = cpu::intel::Ssse3;
33
34#[derive(Clone)]
35pub(in super::super) struct Key {
36    inner: AES_KEY,
37}
38
39impl Key {
40    pub(in super::super) fn new(
41        bytes: KeyBytes<'_>,
42        _cpu: RequiredCpuFeatures,
43    ) -> Result<Self, error::Unspecified> {
44        let inner = unsafe { set_encrypt_key!(vpaes_set_encrypt_key, bytes) }?;
45        Ok(Self { inner })
46    }
47}
48
49#[cfg(any(
50    all(target_arch = "aarch64", target_endian = "little"),
51    all(target_arch = "arm", target_endian = "little"),
52    target_arch = "x86_64"
53))]
54impl EncryptBlock for Key {
55    fn encrypt_block(&self, block: Block) -> Block {
56        super::encrypt_block_using_encrypt_iv_xor_block(self, block)
57    }
58
59    fn encrypt_iv_xor_block(&self, iv: Iv, block: Block) -> Block {
60        super::encrypt_iv_xor_block_using_ctr32(self, iv, block)
61    }
62}
63
64#[cfg(any(
65    all(target_arch = "aarch64", target_endian = "little"),
66    target_arch = "x86_64"
67))]
68impl EncryptCtr32 for Key {
69    fn ctr32_encrypt_within(&self, in_out: Overlapping<'_>, ctr: &mut Counter) {
70        unsafe { ctr32_encrypt_blocks!(vpaes_ctr32_encrypt_blocks, in_out, &self.inner, ctr) }
71    }
72}
73
74#[cfg(all(target_arch = "arm", target_endian = "little"))]
75impl EncryptCtr32 for Key {
76    fn ctr32_encrypt_within(&self, in_out: Overlapping<'_>, ctr: &mut Counter) {
77        use super::{super::overlapping::IndexError, bs, BLOCK_LEN};
78
79        let in_out = {
80            let (in_out, src) = in_out.into_slice_src_mut();
81            let blocks = in_out[src.clone()].len() / BLOCK_LEN;
82
83            // bsaes operates in batches of 8 blocks.
84            let bsaes_blocks = if blocks >= 8 && (blocks % 8) < 6 {
85                // It's faster to use bsaes for all the full batches and then
86                // switch to vpaes for the last partial batch (if any).
87                blocks - (blocks % 8)
88            } else if blocks >= 8 {
89                // It's faster to let bsaes handle everything including
90                // the last partial batch.
91                blocks
92            } else {
93                // It's faster to let vpaes handle everything.
94                0
95            };
96            let bsaes_in_out_len = bsaes_blocks * BLOCK_LEN;
97            let bs_in_out =
98                Overlapping::new(&mut in_out[..(src.start + bsaes_in_out_len)], src.clone())
99                    .unwrap_or_else(|IndexError { .. }| unreachable!());
100
101            // SAFETY:
102            //  * self.inner was initialized with `vpaes_set_encrypt_key` above,
103            //    as required by `bsaes_ctr32_encrypt_blocks_with_vpaes_key`.
104            unsafe {
105                bs::ctr32_encrypt_blocks_with_vpaes_key(bs_in_out, &self.inner, ctr);
106            }
107
108            Overlapping::new(&mut in_out[bsaes_in_out_len..], src)
109                .unwrap_or_else(|IndexError { .. }| unreachable!())
110        };
111
112        // SAFETY:
113        //  * self.inner was initialized with `vpaes_set_encrypt_key` above,
114        //    as required by `vpaes_ctr32_encrypt_blocks`.
115        //  * `vpaes_ctr32_encrypt_blocks` satisfies the contract for
116        //    `ctr32_encrypt_blocks`.
117        unsafe { ctr32_encrypt_blocks!(vpaes_ctr32_encrypt_blocks, in_out, &self.inner, ctr) }
118    }
119}
120
121#[cfg(target_arch = "x86")]
122impl EncryptBlock for Key {
123    fn encrypt_block(&self, block: Block) -> Block {
124        unsafe { encrypt_block!(vpaes_encrypt, block, &self.inner) }
125    }
126
127    fn encrypt_iv_xor_block(&self, iv: Iv, block: Block) -> Block {
128        super::encrypt_iv_xor_block_using_encrypt_block(self, iv, block)
129    }
130}
131
132#[cfg(target_arch = "x86")]
133impl EncryptCtr32 for Key {
134    fn ctr32_encrypt_within(&self, in_out: Overlapping<'_>, ctr: &mut Counter) {
135        super::super::shift::shift_full_blocks(in_out, |input| {
136            self.encrypt_iv_xor_block(ctr.increment(), *input)
137        });
138    }
139}