1// Copyright 2015-2019 Brian Smith.
2//
3// Permission to use, copy, modify, and/or distribute this software for any
4// purpose with or without fee is hereby granted, provided that the above
5// copyright notice and this permission notice appear in all copies.
6//
7// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
8// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
9// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
10// SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
11// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
12// OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
13// CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
1415use crate::{
16 arithmetic::limbs_from_hex,
17 digest, error, limb,
18 polyfill::slice::{self, AsChunks},
19};
20use core::array;
2122#[repr(transparent)]
23pub struct Scalar([u8; SCALAR_LEN]);
2425pub const SCALAR_LEN: usize = 32;
2627impl Scalar {
28// Constructs a `Scalar` from `bytes`, failing if `bytes` encodes a scalar
29 // that is not in the range [0, n).
30pub fn from_bytes_checked(bytes: [u8; SCALAR_LEN]) -> Result<Self, error::Unspecified> {
31const ORDER: [limb::Limb; SCALAR_LEN / limb::LIMB_BYTES] =
32 limbs_from_hex("1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed");
33let order = ORDER.map(limb::Limb::from);
3435let (limbs_as_bytes, _empty): (AsChunks<u8, { limb::LIMB_BYTES }>, _) =
36 slice::as_chunks(&bytes);
37debug_assert!(_empty.is_empty());
38let limbs: [limb::Limb; SCALAR_LEN / limb::LIMB_BYTES] =
39 array::from_fn(|i| limb::Limb::from_le_bytes(limbs_as_bytes[i]));
40 limb::verify_limbs_less_than_limbs_leak_bit(&limbs, &order)?;
4142Ok(Self(bytes))
43 }
4445// Constructs a `Scalar` from `digest` reduced modulo n.
46pub fn from_sha512_digest_reduced(digest: digest::Digest) -> Self {
47prefixed_extern! {
48fn x25519_sc_reduce(s: &mut UnreducedScalar);
49 }
50let mut unreduced = [0u8; digest::SHA512_OUTPUT_LEN];
51 unreduced.copy_from_slice(digest.as_ref());
52unsafe { x25519_sc_reduce(&mut unreduced) };
53Self((&unreduced[..SCALAR_LEN]).try_into().unwrap())
54 }
55}
5657#[repr(transparent)]
58pub struct MaskedScalar([u8; SCALAR_LEN]);
5960impl MaskedScalar {
61pub fn from_bytes_masked(bytes: [u8; SCALAR_LEN]) -> Self {
62prefixed_extern! {
63fn x25519_sc_mask(a: &mut [u8; SCALAR_LEN]);
64 }
65let mut r = Self(bytes);
66unsafe { x25519_sc_mask(&mut r.0) };
67 r
68 }
69}
7071impl From<MaskedScalar> for Scalar {
72fn from(MaskedScalar(scalar): MaskedScalar) -> Self {
73Self(scalar)
74 }
75}
7677type UnreducedScalar = [u8; UNREDUCED_SCALAR_LEN];
78const UNREDUCED_SCALAR_LEN: usize = SCALAR_LEN * 2;