ring/ec/curve25519/
scalar.rs

1// Copyright 2015-2019 Brian Smith.
2//
3// Permission to use, copy, modify, and/or distribute this software for any
4// purpose with or without fee is hereby granted, provided that the above
5// copyright notice and this permission notice appear in all copies.
6//
7// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
8// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
9// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
10// SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
11// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
12// OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
13// CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
14
15use crate::{
16    arithmetic::limbs_from_hex,
17    digest, error, limb,
18    polyfill::slice::{self, AsChunks},
19};
20use core::array;
21
22#[repr(transparent)]
23pub struct Scalar([u8; SCALAR_LEN]);
24
25pub const SCALAR_LEN: usize = 32;
26
27impl Scalar {
28    // Constructs a `Scalar` from `bytes`, failing if `bytes` encodes a scalar
29    // that is not in the range [0, n).
30    pub fn from_bytes_checked(bytes: [u8; SCALAR_LEN]) -> Result<Self, error::Unspecified> {
31        const ORDER: [limb::Limb; SCALAR_LEN / limb::LIMB_BYTES] =
32            limbs_from_hex("1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed");
33        let order = ORDER.map(limb::Limb::from);
34
35        let (limbs_as_bytes, _empty): (AsChunks<u8, { limb::LIMB_BYTES }>, _) =
36            slice::as_chunks(&bytes);
37        debug_assert!(_empty.is_empty());
38        let limbs: [limb::Limb; SCALAR_LEN / limb::LIMB_BYTES] =
39            array::from_fn(|i| limb::Limb::from_le_bytes(limbs_as_bytes[i]));
40        limb::verify_limbs_less_than_limbs_leak_bit(&limbs, &order)?;
41
42        Ok(Self(bytes))
43    }
44
45    // Constructs a `Scalar` from `digest` reduced modulo n.
46    pub fn from_sha512_digest_reduced(digest: digest::Digest) -> Self {
47        prefixed_extern! {
48            fn x25519_sc_reduce(s: &mut UnreducedScalar);
49        }
50        let mut unreduced = [0u8; digest::SHA512_OUTPUT_LEN];
51        unreduced.copy_from_slice(digest.as_ref());
52        unsafe { x25519_sc_reduce(&mut unreduced) };
53        Self((&unreduced[..SCALAR_LEN]).try_into().unwrap())
54    }
55}
56
57#[repr(transparent)]
58pub struct MaskedScalar([u8; SCALAR_LEN]);
59
60impl MaskedScalar {
61    pub fn from_bytes_masked(bytes: [u8; SCALAR_LEN]) -> Self {
62        prefixed_extern! {
63            fn x25519_sc_mask(a: &mut [u8; SCALAR_LEN]);
64        }
65        let mut r = Self(bytes);
66        unsafe { x25519_sc_mask(&mut r.0) };
67        r
68    }
69}
70
71impl From<MaskedScalar> for Scalar {
72    fn from(MaskedScalar(scalar): MaskedScalar) -> Self {
73        Self(scalar)
74    }
75}
76
77type UnreducedScalar = [u8; UNREDUCED_SCALAR_LEN];
78const UNREDUCED_SCALAR_LEN: usize = SCALAR_LEN * 2;