ark_bls12_381/curves/
g1.rs1use ark_ec::{
2 bls12,
3 bls12::Bls12Config,
4 hashing::curve_maps::wb::{IsogenyMap, WBConfig},
5 models::CurveConfig,
6 scalar_mul::glv::GLVConfig,
7 short_weierstrass::{Affine, SWCurveConfig},
8 AffineRepr, PrimeGroup,
9};
10use ark_ff::{AdditiveGroup, BigInt, MontFp, PrimeField, Zero};
11use ark_serialize::{Compress, SerializationError};
12use ark_std::{ops::Neg, One};
13
14use super::g1_swu_iso;
15use crate::{
16 util::{
17 read_g1_compressed, read_g1_uncompressed, serialize_fq, EncodingFlags, G1_SERIALIZED_SIZE,
18 },
19 Fq, Fr,
20};
21
22pub type G1Affine = bls12::G1Affine<crate::Config>;
23pub type G1Projective = bls12::G1Projective<crate::Config>;
24
25#[derive(Clone, Default, PartialEq, Eq)]
26pub struct Config;
27
28impl CurveConfig for Config {
29 type BaseField = Fq;
30 type ScalarField = Fr;
31
32 const COFACTOR: &'static [u64] = &[0x8c00aaab0000aaab, 0x396c8c005555e156];
34
35 const COFACTOR_INV: Fr =
38 MontFp!("52435875175126190458656871551744051925719901746859129887267498875565241663483");
39}
40
41impl SWCurveConfig for Config {
42 const COEFF_A: Fq = Fq::ZERO;
44
45 const COEFF_B: Fq = MontFp!("4");
47
48 const GENERATOR: G1Affine = G1Affine::new_unchecked(G1_GENERATOR_X, G1_GENERATOR_Y);
50
51 #[inline(always)]
52 fn mul_by_a(_: Self::BaseField) -> Self::BaseField {
53 Self::BaseField::zero()
54 }
55
56 #[inline]
57 fn mul_projective(p: &G1Projective, scalar: &[u64]) -> G1Projective {
58 let s = Self::ScalarField::from_sign_and_limbs(true, scalar);
59 GLVConfig::glv_mul_projective(*p, s)
60 }
61
62 #[inline]
63 fn is_in_correct_subgroup_assuming_on_curve(p: &G1Affine) -> bool {
64 let x_times_p = p.mul_bigint(crate::Config::X);
72 if x_times_p.eq(p) && !p.infinity {
73 return false;
74 }
75
76 let minus_x_squared_times_p = x_times_p.mul_bigint(crate::Config::X).neg();
77 let endomorphism_p = endomorphism(p);
78 minus_x_squared_times_p.eq(&endomorphism_p)
79 }
80
81 #[inline]
82 fn clear_cofactor(p: &G1Affine) -> G1Affine {
83 let h_eff = one_minus_x().into_bigint();
88 Config::mul_affine(&p, h_eff.as_ref()).into()
89 }
90
91 fn deserialize_with_mode<R: ark_serialize::Read>(
92 mut reader: R,
93 compress: ark_serialize::Compress,
94 validate: ark_serialize::Validate,
95 ) -> Result<Affine<Self>, ark_serialize::SerializationError> {
96 let p = if compress == ark_serialize::Compress::Yes {
97 read_g1_compressed(&mut reader)?
98 } else {
99 read_g1_uncompressed(&mut reader)?
100 };
101
102 if validate == ark_serialize::Validate::Yes && !p.is_in_correct_subgroup_assuming_on_curve()
103 {
104 return Err(SerializationError::InvalidData);
105 }
106 Ok(p)
107 }
108
109 fn serialize_with_mode<W: ark_serialize::Write>(
110 item: &Affine<Self>,
111 mut writer: W,
112 compress: ark_serialize::Compress,
113 ) -> Result<(), SerializationError> {
114 let encoding = EncodingFlags {
115 is_compressed: compress == ark_serialize::Compress::Yes,
116 is_infinity: item.is_zero(),
117 is_lexographically_largest: item.y > -item.y,
118 };
119 let mut p = *item;
120 if encoding.is_infinity {
121 p = G1Affine::zero();
122 }
123 let x_bytes = serialize_fq(p.x);
126 if encoding.is_compressed {
127 let mut bytes: [u8; G1_SERIALIZED_SIZE] = x_bytes;
128
129 encoding.encode_flags(&mut bytes);
130 writer.write_all(&bytes)?;
131 } else {
132 let mut bytes = [0u8; 2 * G1_SERIALIZED_SIZE];
133 bytes[0..G1_SERIALIZED_SIZE].copy_from_slice(&x_bytes[..]);
134 bytes[G1_SERIALIZED_SIZE..].copy_from_slice(&serialize_fq(p.y)[..]);
135
136 encoding.encode_flags(&mut bytes);
137 writer.write_all(&bytes)?;
138 };
139
140 Ok(())
141 }
142
143 fn serialized_size(compress: Compress) -> usize {
144 if compress == Compress::Yes {
145 G1_SERIALIZED_SIZE
146 } else {
147 G1_SERIALIZED_SIZE * 2
148 }
149 }
150}
151
152impl GLVConfig for Config {
153 const ENDO_COEFFS: &'static[Self::BaseField] = &[
154 MontFp!("793479390729215512621379701633421447060886740281060493010456487427281649075476305620758731620350")
155 ];
156
157 const LAMBDA: Self::ScalarField =
158 MontFp!("52435875175126190479447740508185965837461563690374988244538805122978187051009");
159
160 const SCALAR_DECOMP_COEFFS: [(bool, <Self::ScalarField as PrimeField>::BigInt); 4] = [
161 (true, BigInt!("228988810152649578064853576960394133504")),
162 (true, BigInt!("1")),
163 (false, BigInt!("1")),
164 (true, BigInt!("228988810152649578064853576960394133503")),
165 ];
166
167 fn endomorphism(p: &G1Projective) -> G1Projective {
168 let mut res = (*p).clone();
169 res.x *= Self::ENDO_COEFFS[0];
170 res
171 }
172
173 fn endomorphism_affine(p: &Affine<Self>) -> Affine<Self> {
174 let mut res = (*p).clone();
175 res.x *= Self::ENDO_COEFFS[0];
176 res
177 }
178}
179
180fn one_minus_x() -> Fr {
181 const X: Fr = Fr::from_sign_and_limbs(!crate::Config::X_IS_NEGATIVE, crate::Config::X);
182 Fr::one() - X
183}
184
185impl WBConfig for Config {
187 type IsogenousCurve = g1_swu_iso::SwuIsoConfig;
188
189 const ISOGENY_MAP: IsogenyMap<'static, Self::IsogenousCurve, Self> =
190 g1_swu_iso::ISOGENY_MAP_TO_G1;
191}
192
193pub const G1_GENERATOR_X: Fq = MontFp!("3685416753713387016781088315183077757961620795782546409894578378688607592378376318836054947676345821548104185464507");
196
197pub const G1_GENERATOR_Y: Fq = MontFp!("1339506544944476473020471379941921221584933875938349620426543736416511423956333506472724655353366534992391756441569");
200
201pub const BETA: Fq = MontFp!("793479390729215512621379701633421447060886740281060493010456487427281649075476305620758731620350");
203
204pub fn endomorphism(p: &Affine<Config>) -> Affine<Config> {
205 let mut res = (*p).clone();
209 res.x *= BETA;
210 res
211}
212
213#[cfg(test)]
214mod test {
215
216 use super::*;
217 use crate::g1;
218 use ark_serialize::CanonicalDeserialize;
219 use ark_std::{rand::Rng, UniformRand};
220
221 fn sample_unchecked() -> Affine<g1::Config> {
222 let mut rng = ark_std::test_rng();
223 loop {
224 let x = Fq::rand(&mut rng);
225 let greatest = rng.gen();
226
227 if let Some(p) = Affine::get_point_from_x_unchecked(x, greatest) {
228 return p;
229 }
230 }
231 }
232
233 #[test]
234 fn test_cofactor_clearing() {
235 const SAMPLES: usize = 100;
236 for _ in 0..SAMPLES {
237 let p: Affine<g1::Config> = sample_unchecked();
238 let p = p.clear_cofactor();
239 assert!(p.is_on_curve());
240 assert!(p.is_in_correct_subgroup_assuming_on_curve());
241 }
242 }
243
244 #[test]
245 fn non_canonical_identity_point() {
246 let non_canonical_hex = "c01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
247 let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap();
248 assert_eq!(non_canonical_bytes.len(), 48);
249
250 let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
251 CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);
252
253 assert!(maybe_affine_point.is_err());
254
255 let non_canonical_hex_uncompressed = "c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001";
256 let non_canonical_bytes = hex::decode(non_canonical_hex_uncompressed).unwrap();
257 assert_eq!(non_canonical_bytes.len(), 96);
258
259 let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
260 CanonicalDeserialize::deserialize_uncompressed(&non_canonical_bytes[..]);
261
262 assert!(maybe_affine_point.is_err())
263 }
264
265 #[test]
266 fn bad_flag_combination() {
267 let non_canonical_hex = "600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
275 let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap();
276 assert_eq!(non_canonical_bytes.len(), 48);
277
278 let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
279 CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);
280
281 assert!(maybe_affine_point.is_err());
282
283 let non_canonical_hex_2 = "e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
286
287 let non_canonical_bytes = hex::decode(non_canonical_hex_2).unwrap();
288 assert_eq!(non_canonical_bytes.len(), 48);
289
290 let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
291 CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);
292 assert!(maybe_affine_point.is_err());
293 }
294}