ark_bls12_381/curves/
g1.rs

1use ark_ec::{
2    bls12,
3    bls12::Bls12Config,
4    hashing::curve_maps::wb::{IsogenyMap, WBConfig},
5    models::CurveConfig,
6    scalar_mul::glv::GLVConfig,
7    short_weierstrass::{Affine, SWCurveConfig},
8    AffineRepr, PrimeGroup,
9};
10use ark_ff::{AdditiveGroup, BigInt, MontFp, PrimeField, Zero};
11use ark_serialize::{Compress, SerializationError};
12use ark_std::{ops::Neg, One};
13
14use super::g1_swu_iso;
15use crate::{
16    util::{
17        read_g1_compressed, read_g1_uncompressed, serialize_fq, EncodingFlags, G1_SERIALIZED_SIZE,
18    },
19    Fq, Fr,
20};
21
22pub type G1Affine = bls12::G1Affine<crate::Config>;
23pub type G1Projective = bls12::G1Projective<crate::Config>;
24
25#[derive(Clone, Default, PartialEq, Eq)]
26pub struct Config;
27
28impl CurveConfig for Config {
29    type BaseField = Fq;
30    type ScalarField = Fr;
31
32    /// COFACTOR = (x - 1)^2 / 3  = 76329603384216526031706109802092473003
33    const COFACTOR: &'static [u64] = &[0x8c00aaab0000aaab, 0x396c8c005555e156];
34
35    /// COFACTOR_INV = COFACTOR^{-1} mod r
36    /// = 52435875175126190458656871551744051925719901746859129887267498875565241663483
37    const COFACTOR_INV: Fr =
38        MontFp!("52435875175126190458656871551744051925719901746859129887267498875565241663483");
39}
40
41impl SWCurveConfig for Config {
42    /// COEFF_A = 0
43    const COEFF_A: Fq = Fq::ZERO;
44
45    /// COEFF_B = 4
46    const COEFF_B: Fq = MontFp!("4");
47
48    /// AFFINE_GENERATOR_COEFFS = (G1_GENERATOR_X, G1_GENERATOR_Y)
49    const GENERATOR: G1Affine = G1Affine::new_unchecked(G1_GENERATOR_X, G1_GENERATOR_Y);
50
51    #[inline(always)]
52    fn mul_by_a(_: Self::BaseField) -> Self::BaseField {
53        Self::BaseField::zero()
54    }
55
56    #[inline]
57    fn mul_projective(p: &G1Projective, scalar: &[u64]) -> G1Projective {
58        let s = Self::ScalarField::from_sign_and_limbs(true, scalar);
59        GLVConfig::glv_mul_projective(*p, s)
60    }
61
62    #[inline]
63    fn is_in_correct_subgroup_assuming_on_curve(p: &G1Affine) -> bool {
64        // Algorithm from Section 6 of https://eprint.iacr.org/2021/1130.
65        //
66        // Check that endomorphism_p(P) == -[X^2]P
67
68        // An early-out optimization described in Section 6.
69        // If uP == P but P != point of infinity, then the point is not in the right
70        // subgroup.
71        let x_times_p = p.mul_bigint(crate::Config::X);
72        if x_times_p.eq(p) && !p.infinity {
73            return false;
74        }
75
76        let minus_x_squared_times_p = x_times_p.mul_bigint(crate::Config::X).neg();
77        let endomorphism_p = endomorphism(p);
78        minus_x_squared_times_p.eq(&endomorphism_p)
79    }
80
81    #[inline]
82    fn clear_cofactor(p: &G1Affine) -> G1Affine {
83        // Using the effective cofactor, as explained in
84        // Section 5 of https://eprint.iacr.org/2019/403.pdf.
85        //
86        // It is enough to multiply by (1 - x), instead of (x - 1)^2 / 3
87        let h_eff = one_minus_x().into_bigint();
88        Config::mul_affine(&p, h_eff.as_ref()).into()
89    }
90
91    fn deserialize_with_mode<R: ark_serialize::Read>(
92        mut reader: R,
93        compress: ark_serialize::Compress,
94        validate: ark_serialize::Validate,
95    ) -> Result<Affine<Self>, ark_serialize::SerializationError> {
96        let p = if compress == ark_serialize::Compress::Yes {
97            read_g1_compressed(&mut reader)?
98        } else {
99            read_g1_uncompressed(&mut reader)?
100        };
101
102        if validate == ark_serialize::Validate::Yes && !p.is_in_correct_subgroup_assuming_on_curve()
103        {
104            return Err(SerializationError::InvalidData);
105        }
106        Ok(p)
107    }
108
109    fn serialize_with_mode<W: ark_serialize::Write>(
110        item: &Affine<Self>,
111        mut writer: W,
112        compress: ark_serialize::Compress,
113    ) -> Result<(), SerializationError> {
114        let encoding = EncodingFlags {
115            is_compressed: compress == ark_serialize::Compress::Yes,
116            is_infinity: item.is_zero(),
117            is_lexographically_largest: item.y > -item.y,
118        };
119        let mut p = *item;
120        if encoding.is_infinity {
121            p = G1Affine::zero();
122        }
123        // need to access the field struct `x` directly, otherwise we get None from xy()
124        // method
125        let x_bytes = serialize_fq(p.x);
126        if encoding.is_compressed {
127            let mut bytes: [u8; G1_SERIALIZED_SIZE] = x_bytes;
128
129            encoding.encode_flags(&mut bytes);
130            writer.write_all(&bytes)?;
131        } else {
132            let mut bytes = [0u8; 2 * G1_SERIALIZED_SIZE];
133            bytes[0..G1_SERIALIZED_SIZE].copy_from_slice(&x_bytes[..]);
134            bytes[G1_SERIALIZED_SIZE..].copy_from_slice(&serialize_fq(p.y)[..]);
135
136            encoding.encode_flags(&mut bytes);
137            writer.write_all(&bytes)?;
138        };
139
140        Ok(())
141    }
142
143    fn serialized_size(compress: Compress) -> usize {
144        if compress == Compress::Yes {
145            G1_SERIALIZED_SIZE
146        } else {
147            G1_SERIALIZED_SIZE * 2
148        }
149    }
150}
151
152impl GLVConfig for Config {
153    const ENDO_COEFFS: &'static[Self::BaseField] = &[
154        MontFp!("793479390729215512621379701633421447060886740281060493010456487427281649075476305620758731620350")
155    ];
156
157    const LAMBDA: Self::ScalarField =
158        MontFp!("52435875175126190479447740508185965837461563690374988244538805122978187051009");
159
160    const SCALAR_DECOMP_COEFFS: [(bool, <Self::ScalarField as PrimeField>::BigInt); 4] = [
161        (true, BigInt!("228988810152649578064853576960394133504")),
162        (true, BigInt!("1")),
163        (false, BigInt!("1")),
164        (true, BigInt!("228988810152649578064853576960394133503")),
165    ];
166
167    fn endomorphism(p: &G1Projective) -> G1Projective {
168        let mut res = (*p).clone();
169        res.x *= Self::ENDO_COEFFS[0];
170        res
171    }
172
173    fn endomorphism_affine(p: &Affine<Self>) -> Affine<Self> {
174        let mut res = (*p).clone();
175        res.x *= Self::ENDO_COEFFS[0];
176        res
177    }
178}
179
180fn one_minus_x() -> Fr {
181    const X: Fr = Fr::from_sign_and_limbs(!crate::Config::X_IS_NEGATIVE, crate::Config::X);
182    Fr::one() - X
183}
184
185// Parameters from the [IETF draft v16, section E.2](https://www.ietf.org/archive/id/draft-irtf-cfrg-hash-to-curve-16.html#name-11-isogeny-map-for-bls12-381).
186impl WBConfig for Config {
187    type IsogenousCurve = g1_swu_iso::SwuIsoConfig;
188
189    const ISOGENY_MAP: IsogenyMap<'static, Self::IsogenousCurve, Self> =
190        g1_swu_iso::ISOGENY_MAP_TO_G1;
191}
192
193/// G1_GENERATOR_X =
194/// 3685416753713387016781088315183077757961620795782546409894578378688607592378376318836054947676345821548104185464507
195pub const G1_GENERATOR_X: Fq = MontFp!("3685416753713387016781088315183077757961620795782546409894578378688607592378376318836054947676345821548104185464507");
196
197/// G1_GENERATOR_Y =
198/// 1339506544944476473020471379941921221584933875938349620426543736416511423956333506472724655353366534992391756441569
199pub const G1_GENERATOR_Y: Fq = MontFp!("1339506544944476473020471379941921221584933875938349620426543736416511423956333506472724655353366534992391756441569");
200
201/// BETA is a non-trivial cubic root of unity in Fq.
202pub const BETA: Fq = MontFp!("793479390729215512621379701633421447060886740281060493010456487427281649075476305620758731620350");
203
204pub fn endomorphism(p: &Affine<Config>) -> Affine<Config> {
205    // Endomorphism of the points on the curve.
206    // endomorphism_p(x,y) = (BETA * x, y)
207    // where BETA is a non-trivial cubic root of unity in Fq.
208    let mut res = (*p).clone();
209    res.x *= BETA;
210    res
211}
212
213#[cfg(test)]
214mod test {
215
216    use super::*;
217    use crate::g1;
218    use ark_serialize::CanonicalDeserialize;
219    use ark_std::{rand::Rng, UniformRand};
220
221    fn sample_unchecked() -> Affine<g1::Config> {
222        let mut rng = ark_std::test_rng();
223        loop {
224            let x = Fq::rand(&mut rng);
225            let greatest = rng.gen();
226
227            if let Some(p) = Affine::get_point_from_x_unchecked(x, greatest) {
228                return p;
229            }
230        }
231    }
232
233    #[test]
234    fn test_cofactor_clearing() {
235        const SAMPLES: usize = 100;
236        for _ in 0..SAMPLES {
237            let p: Affine<g1::Config> = sample_unchecked();
238            let p = p.clear_cofactor();
239            assert!(p.is_on_curve());
240            assert!(p.is_in_correct_subgroup_assuming_on_curve());
241        }
242    }
243
244    #[test]
245    fn non_canonical_identity_point() {
246        let non_canonical_hex = "c01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
247        let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap();
248        assert_eq!(non_canonical_bytes.len(), 48);
249
250        let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
251            CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);
252
253        assert!(maybe_affine_point.is_err());
254
255        let non_canonical_hex_uncompressed = "c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001";
256        let non_canonical_bytes = hex::decode(non_canonical_hex_uncompressed).unwrap();
257        assert_eq!(non_canonical_bytes.len(), 96);
258
259        let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
260            CanonicalDeserialize::deserialize_uncompressed(&non_canonical_bytes[..]);
261
262        assert!(maybe_affine_point.is_err())
263    }
264
265    #[test]
266    fn bad_flag_combination() {
267        // See https://github.com/zkcrypto/pairing/tree/fa8103764a07bd273927447d434de18aace252d3/src/bls12_381#serialization
268        // - Bit 1 is compressed/uncompressed
269        // - Bit 2 is infinity
270        // - Bit 3 is lexicographical order for compressed point deserialization
271        // Hence `0b1110` ("e" in hex) or `0b0110` ("6" in hex") are both nonsensical.
272
273        // uncompressed, but lexicographically largest flag is set
274        let non_canonical_hex = "600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
275        let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap();
276        assert_eq!(non_canonical_bytes.len(), 48);
277
278        let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
279            CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);
280
281        assert!(maybe_affine_point.is_err());
282
283        // compressed, but infinity flag is set and lexicographically largest flag is
284        // set
285        let non_canonical_hex_2 = "e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
286
287        let non_canonical_bytes = hex::decode(non_canonical_hex_2).unwrap();
288        assert_eq!(non_canonical_bytes.len(), 48);
289
290        let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
291            CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);
292        assert!(maybe_affine_point.is_err());
293    }
294}