ark_bls12_381/curves/
util.rs

1use ark_ec::{short_weierstrass::Affine, AffineRepr};
2use ark_ff::{BigInteger384, PrimeField};
3use ark_serialize::SerializationError;
4
5use crate::{g1::Config as G1Config, g2::Config as G2Config, Fq, Fq2, G1Affine, G2Affine};
6
7pub const G1_SERIALIZED_SIZE: usize = 48;
8pub const G2_SERIALIZED_SIZE: usize = 96;
9
10pub struct EncodingFlags {
11    pub is_compressed: bool,
12    pub is_infinity: bool,
13    pub is_lexographically_largest: bool,
14}
15
16impl EncodingFlags {
17    /// Fetches the flags from the byte-string
18    pub fn get_flags(bytes: &[u8]) -> Result<Self, SerializationError> {
19        let compression_flag_set = (bytes[0] >> 7) & 1;
20        let infinity_flag_set = (bytes[0] >> 6) & 1;
21        let sort_flag_set = (bytes[0] >> 5) & 1;
22
23        let is_compressed = compression_flag_set == 1;
24        let is_infinity = infinity_flag_set == 1;
25        let is_lexographically_largest = sort_flag_set == 1;
26
27        if is_lexographically_largest && (!is_compressed || is_infinity) {
28            return Err(SerializationError::InvalidData);
29        }
30
31        Ok(Self {
32            is_compressed,
33            is_infinity,
34            is_lexographically_largest,
35        })
36    }
37
38    /// Encodes the flags into the byte-string
39    pub fn encode_flags(&self, bytes: &mut [u8]) {
40        if self.is_compressed {
41            bytes[0] |= 1 << 7;
42        }
43
44        if self.is_infinity {
45            bytes[0] |= 1 << 6;
46        }
47
48        if self.is_compressed && !self.is_infinity && self.is_lexographically_largest {
49            bytes[0] |= 1 << 5;
50        }
51    }
52
53    /// Removes the flags from the byte-string.
54    ///
55    /// This reverses the effects of `encode_flags`.
56    pub fn remove_flags(bytes: &mut [u8]) {
57        bytes[0] &= 0b0001_1111;
58    }
59}
60
61pub(crate) fn deserialize_fq(bytes: [u8; 48]) -> Option<Fq> {
62    let mut tmp = BigInteger384::new([0, 0, 0, 0, 0, 0]);
63
64    // Note: The following unwraps are if the compiler cannot convert
65    // the byte slice into [u8;8], we know this is infallible since we
66    // are providing the indices at compile time and bytes has a fixed size
67    tmp.0[5] = u64::from_be_bytes(<[u8; 8]>::try_from(&bytes[0..8]).unwrap());
68    tmp.0[4] = u64::from_be_bytes(<[u8; 8]>::try_from(&bytes[8..16]).unwrap());
69    tmp.0[3] = u64::from_be_bytes(<[u8; 8]>::try_from(&bytes[16..24]).unwrap());
70    tmp.0[2] = u64::from_be_bytes(<[u8; 8]>::try_from(&bytes[24..32]).unwrap());
71    tmp.0[1] = u64::from_be_bytes(<[u8; 8]>::try_from(&bytes[32..40]).unwrap());
72    tmp.0[0] = u64::from_be_bytes(<[u8; 8]>::try_from(&bytes[40..48]).unwrap());
73
74    Fq::from_bigint(tmp)
75}
76
77pub(crate) fn serialize_fq(field: Fq) -> [u8; 48] {
78    let mut result = [0u8; 48];
79
80    let rep = field.into_bigint();
81
82    result[0..8].copy_from_slice(&rep.0[5].to_be_bytes());
83    result[8..16].copy_from_slice(&rep.0[4].to_be_bytes());
84    result[16..24].copy_from_slice(&rep.0[3].to_be_bytes());
85    result[24..32].copy_from_slice(&rep.0[2].to_be_bytes());
86    result[32..40].copy_from_slice(&rep.0[1].to_be_bytes());
87    result[40..48].copy_from_slice(&rep.0[0].to_be_bytes());
88
89    result
90}
91
92fn read_bytes_with_offset(bytes: &[u8], offset: usize, mask: bool) -> [u8; G1_SERIALIZED_SIZE] {
93    let mut tmp = [0; G1_SERIALIZED_SIZE];
94    // read `G1_SERIALIZED_SIZE` bytes
95    tmp.copy_from_slice(&bytes[offset * G1_SERIALIZED_SIZE..G1_SERIALIZED_SIZE * (offset + 1)]);
96
97    if mask {
98        EncodingFlags::remove_flags(&mut tmp);
99    }
100    tmp
101}
102
103pub(crate) fn read_g1_compressed<R: ark_serialize::Read>(
104    mut reader: R,
105) -> Result<Affine<G1Config>, ark_serialize::SerializationError> {
106    let mut bytes = [0u8; G1_SERIALIZED_SIZE];
107    reader
108        .read_exact(&mut bytes)
109        .ok()
110        .ok_or(SerializationError::InvalidData)?;
111
112    // Obtain the three flags from the start of the byte sequence
113    let flags = EncodingFlags::get_flags(&bytes[..])?;
114
115    // We expect to be deserializing a compressed point
116    if !flags.is_compressed {
117        return Err(SerializationError::UnexpectedFlags);
118    }
119
120    // Attempt to obtain the x-coordinate
121    let x_bytes = read_bytes_with_offset(&bytes, 0, true);
122
123    if flags.is_infinity {
124        // Check that the `x` co-ordinate was `0`
125        if x_bytes != [0u8; 48] {
126            return Err(SerializationError::InvalidData);
127        }
128
129        return Ok(G1Affine::zero());
130    }
131
132    let x = deserialize_fq(x_bytes).ok_or(SerializationError::InvalidData)?;
133    let p = G1Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest)
134        .ok_or(SerializationError::InvalidData)?;
135
136    Ok(p)
137}
138
139pub(crate) fn read_g1_uncompressed<R: ark_serialize::Read>(
140    mut reader: R,
141) -> Result<Affine<G1Config>, ark_serialize::SerializationError> {
142    let mut bytes = [0u8; 2 * G1_SERIALIZED_SIZE];
143    reader
144        .read_exact(&mut bytes)
145        .map_err(|_| SerializationError::InvalidData)?;
146
147    // Obtain the three flags from the start of the byte sequence
148    let flags = EncodingFlags::get_flags(&bytes[..])?;
149
150    // we expect to be deserializing an uncompressed point
151    if flags.is_compressed {
152        return Err(SerializationError::UnexpectedFlags);
153    }
154
155    let x_bytes = read_bytes_with_offset(&bytes, 0, true);
156    let y_bytes = read_bytes_with_offset(&bytes, 1, false);
157
158    if flags.is_infinity {
159        if x_bytes != [0u8; 48] || y_bytes != [0u8; 48] {
160            return Err(SerializationError::InvalidData);
161        }
162        return Ok(G1Affine::zero());
163    }
164
165    // Attempt to obtain the x-coordinate
166    let x = deserialize_fq(x_bytes).ok_or(SerializationError::InvalidData)?;
167    // Attempt to obtain the y-coordinate
168    let y = deserialize_fq(y_bytes).ok_or(SerializationError::InvalidData)?;
169    let p = G1Affine::new_unchecked(x, y);
170
171    Ok(p)
172}
173
174pub(crate) fn read_g2_compressed<R: ark_serialize::Read>(
175    mut reader: R,
176) -> Result<Affine<G2Config>, ark_serialize::SerializationError> {
177    let mut bytes = [0u8; G2_SERIALIZED_SIZE];
178    reader
179        .read_exact(&mut bytes)
180        .map_err(|_| SerializationError::InvalidData)?;
181
182    // Obtain the three flags from the start of the byte sequence
183    let flags = EncodingFlags::get_flags(&bytes)?;
184
185    // we expect to be deserializing a compressed point
186    if !flags.is_compressed {
187        return Err(SerializationError::UnexpectedFlags);
188    }
189
190    let xc1_bytes = read_bytes_with_offset(&bytes, 0, true);
191    let xc0_bytes = read_bytes_with_offset(&bytes, 1, false);
192
193    if flags.is_infinity {
194        if xc1_bytes != [0u8; 48] || xc0_bytes != [0u8; 48] {
195            return Err(SerializationError::InvalidData);
196        }
197        return Ok(G2Affine::zero());
198    }
199
200    // Attempt to obtain the x-coordinate
201    let xc1 = deserialize_fq(xc1_bytes).ok_or(SerializationError::InvalidData)?;
202    let xc0 = deserialize_fq(xc0_bytes).ok_or(SerializationError::InvalidData)?;
203    let x = Fq2::new(xc0, xc1);
204
205    let p = G2Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest)
206        .ok_or(SerializationError::InvalidData)?;
207
208    Ok(p)
209}
210
211pub(crate) fn read_g2_uncompressed<R: ark_serialize::Read>(
212    mut reader: R,
213) -> Result<Affine<G2Config>, ark_serialize::SerializationError> {
214    let mut bytes = [0u8; 2 * G2_SERIALIZED_SIZE];
215    reader
216        .read_exact(&mut bytes)
217        .map_err(|_| SerializationError::InvalidData)?;
218
219    // Obtain the three flags from the start of the byte sequence
220    let flags = EncodingFlags::get_flags(&bytes)?;
221
222    // we expect to be deserializing an uncompressed point
223    if flags.is_compressed {
224        return Err(SerializationError::UnexpectedFlags);
225    }
226
227    let xc1_bytes = read_bytes_with_offset(&bytes, 0, true);
228    let xc0_bytes = read_bytes_with_offset(&bytes, 1, false);
229
230    let yc1_bytes = read_bytes_with_offset(&bytes, 2, false);
231    let yc0_bytes = read_bytes_with_offset(&bytes, 3, false);
232
233    if flags.is_infinity {
234        if xc1_bytes != [0u8; 48]
235            || xc0_bytes != [0u8; 48]
236            || yc1_bytes != [0u8; 48]
237            || yc0_bytes != [0u8; 48]
238        {
239            return Err(SerializationError::InvalidData);
240        }
241        return Ok(G2Affine::zero());
242    }
243
244    let xc1 = deserialize_fq(xc1_bytes).ok_or(SerializationError::InvalidData)?;
245    let xc0 = deserialize_fq(xc0_bytes).ok_or(SerializationError::InvalidData)?;
246    let yc1 = deserialize_fq(yc1_bytes).ok_or(SerializationError::InvalidData)?;
247    let yc0 = deserialize_fq(yc0_bytes).ok_or(SerializationError::InvalidData)?;
248
249    // Attempt to obtain the x-coordinate
250    let x = Fq2::new(xc0, xc1);
251
252    // Attempt to obtain the y-coordinate
253    let y = Fq2::new(yc0, yc1);
254
255    let p = G2Affine::new_unchecked(x, y);
256
257    Ok(p)
258}