48 std::vector<Fr> scalars;
53 "biggroup mask_points: masking_scalar must ≤ 128 bits");
56 const typename G::affine_element native_offset_generator = element::compute_table_offset_generator();
57 C*
builder = validate_context<C>(validate_context<C>(_points), validate_context<C>(_scalars));
58 const element offset_generator_element = element::from_witness(
builder, native_offset_generator);
66 Fr running_scalar =
Fr(1);
67 Fr last_scalar =
Fr(0);
70 for (
size_t i = 0; i < _points.size(); i++) {
71 scalars.push_back(_scalars[i]);
74 points.push_back(_points[i] + running_point);
77 last_scalar += _scalars[i] * running_scalar;
80 running_scalar += running_scalar;
81 running_point = running_point.
dbl();
85 const uint32_t n =
static_cast<uint32_t
>(_points.size());
86 const Fr two_power_n =
Fr(2).
pow(n);
87 const Fr two_power_n_inverse = two_power_n.
invert();
88 last_scalar *= two_power_n_inverse;
89 scalars.push_back(-last_scalar);
90 if constexpr (Fr::is_composite) {
91 scalars.back().self_reduce();
94 points.push_back(running_point);
96 return { points, scalars };
107 C*
builder = validate_context<C>(validate_context<C>(_points), validate_context<C>(_scalars));
109 std::vector<Fr> scalars;
112 for (
auto [_point, _scalar] :
zip_view(_points, _scalars)) {
113 bool_ct is_point_at_infinity = _point.is_point_at_infinity();
114 if (is_point_at_infinity.
get_value() &&
static_cast<bool>(is_point_at_infinity.
is_constant())) {
118 if (_scalar.get_value() == 0 && _scalar.is_constant()) {
127 if constexpr (!Fr::is_composite) {
129 scalar = Fr::conditional_assign_internal(is_point_at_infinity, 0, _scalar);
132 scalar = Fr::conditional_assign(is_point_at_infinity, 0, _scalar);
136 points.push_back(point);
137 scalars.push_back(scalar);
140 return { points, scalars };