Barretenberg
The ZK-SNARK library at the core of Aztec
Loading...
Searching...
No Matches
instr_fetching.test.cpp
Go to the documentation of this file.
1#include <gmock/gmock.h>
2#include <gtest/gtest.h>
3
4#include <cstdint>
5#include <memory>
6#include <vector>
7
21
22namespace bb::avm2::constraining {
23namespace {
24
25using tracegen::BytecodeTraceBuilder;
26using tracegen::PrecomputedTraceBuilder;
27using tracegen::RangeCheckTraceBuilder;
28using tracegen::TestTraceContainer;
29
31using C = Column;
32
33using instr_fetching = instr_fetching<FF>;
34
35using simulation::BytecodeDecompositionEvent;
37using simulation::Instruction;
38using simulation::InstructionFetchingEvent;
40using simulation::RangeCheckEvent;
41
42TEST(InstrFetchingConstrainingTest, EmptyRow)
43{
44 check_relation<instr_fetching>(testing::empty_trace());
45}
46
47// Basic positive test with a hardcoded bytecode for ADD_8
48TEST(InstrFetchingConstrainingTest, Add8WithTraceGen)
49{
50 TestTraceContainer trace;
51 BytecodeTraceBuilder builder;
52 PrecomputedTraceBuilder precomputed_builder;
53
54 Instruction add_8_instruction = {
55 .opcode = WireOpCode::ADD_8,
56 .indirect = 3,
57 .operands = { Operand::from<uint8_t>(0x34), Operand::from<uint8_t>(0x35), Operand::from<uint8_t>(0x36) },
58 };
59
60 std::vector<uint8_t> bytecode = add_8_instruction.serialize();
61
62 builder.process_instruction_fetching({ { .bytecode_id = 1,
63 .pc = 0,
64 .instruction = add_8_instruction,
66 trace);
67 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
68
69 EXPECT_EQ(trace.get_num_rows(), 2);
70 check_relation<instr_fetching>(trace);
71}
72
73// Basic positive test with a hardcoded bytecode for ECADD
74// Cover the longest amount of operands.
75TEST(InstrFetchingConstrainingTest, EcaddWithTraceGen)
76{
77 TestTraceContainer trace;
78 BytecodeTraceBuilder builder;
79 PrecomputedTraceBuilder precomputed_builder;
80
81 Instruction ecadd_instruction = {
82 .opcode = WireOpCode::ECADD,
83 .indirect = 0x1f1f,
84 .operands = { Operand::from<uint16_t>(0x1279),
85 Operand::from<uint16_t>(0x127a),
86 Operand::from<uint16_t>(0x127b),
87 Operand::from<uint16_t>(0x127c),
88 Operand::from<uint16_t>(0x127d),
89 Operand::from<uint16_t>(0x127e),
90 Operand::from<uint16_t>(0x127f) },
91 };
92
93 std::vector<uint8_t> bytecode = ecadd_instruction.serialize();
94 builder.process_instruction_fetching({ { .bytecode_id = 1,
95 .pc = 0,
96 .instruction = ecadd_instruction,
98 trace);
99 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
100
101 EXPECT_EQ(trace.get_num_rows(), 2);
102 check_relation<instr_fetching>(trace);
103}
104
105// Helper routine generating a vector of instruction fetching events for each
106// opcode.
107std::vector<InstructionFetchingEvent> gen_instr_events_each_opcode()
108{
109 std::vector<uint8_t> bytecode;
110 std::vector<Instruction> instructions;
111 constexpr auto num_opcodes = static_cast<size_t>(WireOpCode::LAST_OPCODE_SENTINEL);
112 instructions.reserve(num_opcodes);
114
115 for (size_t i = 0; i < num_opcodes; i++) {
116 pc_positions.at(i) = static_cast<uint32_t>(bytecode.size());
117 const auto instr = testing::random_instruction(static_cast<WireOpCode>(i));
118 instructions.emplace_back(instr);
119 const auto instruction_bytes = instr.serialize();
120 bytecode.insert(bytecode.end(),
121 std::make_move_iterator(instruction_bytes.begin()),
122 std::make_move_iterator(instruction_bytes.end()));
123 }
124
125 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
126 // Always use *bytecode_ptr from now on instead of bytecode as this one was moved.
127
129 instr_events.reserve(num_opcodes);
130 for (size_t i = 0; i < num_opcodes; i++) {
131 instr_events.emplace_back(InstructionFetchingEvent{
132 .bytecode_id = 1, .pc = pc_positions.at(i), .instruction = instructions.at(i), .bytecode = bytecode_ptr });
133 }
134 return instr_events;
135}
136
137// Positive test for each opcode. We assume that decode instruction is working correctly.
138// It works as long as the relations are not constraining the correct range for TAG nor indirect.
139TEST(InstrFetchingConstrainingTest, EachOpcodeWithTraceGen)
140{
141 TestTraceContainer trace;
142 BytecodeTraceBuilder builder;
143 PrecomputedTraceBuilder precomputed_builder;
144
145 builder.process_instruction_fetching(gen_instr_events_each_opcode(), trace);
146 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
147
148 constexpr auto num_opcodes = static_cast<size_t>(WireOpCode::LAST_OPCODE_SENTINEL);
149 EXPECT_EQ(trace.get_num_rows(), num_opcodes + 1);
150 check_relation<instr_fetching>(trace);
151}
152
153// Negative test about decomposition of operands. We mutate correct operand values in the trace.
154// This also covers wrong operands which are not "involved" by the instruction.
155// We perform this for a random instruction for opcodes: REVERT_16, CAST_8, TORADIXBE
156TEST(InstrFetchingConstrainingTest, NegativeWrongOperand)
157{
158 BytecodeTraceBuilder builder;
159 PrecomputedTraceBuilder precomputed_builder;
160
162 std::vector<size_t> sub_relations = {
167 };
168
169 constexpr std::array<C, 8> operand_cols = {
170 C::instr_fetching_indirect, C::instr_fetching_op1, C::instr_fetching_op2, C::instr_fetching_op3,
171 C::instr_fetching_op4, C::instr_fetching_op5, C::instr_fetching_op6, C::instr_fetching_op7,
172 };
173
174 for (const auto& opcode : opcodes) {
175 TestTraceContainer trace;
176 const auto instr = testing::random_instruction(opcode);
177 builder.process_instruction_fetching(
178 { { .bytecode_id = 1,
179 .pc = 0,
180 .instruction = instr,
181 .bytecode = std::make_shared<std::vector<uint8_t>>(instr.serialize()) } },
182 trace);
183 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
184
185 check_relation<instr_fetching>(trace);
186
187 EXPECT_EQ(trace.get_num_rows(), 2);
188
189 for (size_t i = 0; i < operand_cols.size(); i++) {
190 auto mutated_trace = trace;
191 const FF mutated_operand = trace.get(operand_cols.at(i), 0) + 1; // Mutate to value + 1
192 mutated_trace.set(operand_cols.at(i), 0, mutated_operand);
193 EXPECT_THROW_WITH_MESSAGE(check_relation<instr_fetching>(mutated_trace, sub_relations.at(i)),
194 instr_fetching::get_subrelation_label(sub_relations.at(i)));
195 }
196 }
197}
198
199// Positive test for interaction with instruction spec table using same events as for the test
200// EachOpcodeWithTraceGen, i.e., one event/row is generated per wire opcode.
201// It works as long as the relations are not constraining the correct range for TAG nor indirect.
202TEST(InstrFetchingConstrainingTest, WireInstructionSpecInteractions)
203{
204 TestTraceContainer trace;
205 BytecodeTraceBuilder bytecode_builder;
206 PrecomputedTraceBuilder precomputed_builder;
207
210 bytecode_builder.process_instruction_fetching(gen_instr_events_each_opcode(), trace);
211 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
212
213 EXPECT_EQ(trace.get_num_rows(), 1 << 8); // 2^8 for selector against wire_instruction_spec
214
215 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_wire_instruction_info_settings>(trace);
216 check_relation<instr_fetching>(trace);
217}
218
219std::vector<RangeCheckEvent> gen_range_check_events(const std::vector<InstructionFetchingEvent>& instr_events)
220{
221 std::vector<RangeCheckEvent> range_check_events;
222 range_check_events.reserve(instr_events.size());
223
224 for (const auto& instr_event : instr_events) {
225 range_check_events.emplace_back(RangeCheckEvent{
226 .value =
227 (instr_event.error.has_value() && instr_event.error == InstrDeserializationEventError::PC_OUT_OF_RANGE)
228 ? instr_event.pc - instr_event.bytecode->size()
229 : instr_event.bytecode->size() - instr_event.pc - 1,
230 .num_bits = AVM_PC_SIZE_IN_BITS,
231 });
232 }
233 return range_check_events;
234}
235
236// Positive test for the interaction with bytecode decomposition table.
237// One event/row is generated per wire opcode (same as for test WireInstructionSpecInteractions).
238TEST(InstrFetchingConstrainingTest, BcDecompositionInteractions)
239{
240 TestTraceContainer trace;
241 BytecodeTraceBuilder bytecode_builder;
242 PrecomputedTraceBuilder precomputed_builder;
243
244 const auto instr_fetch_events = gen_instr_events_each_opcode();
245 bytecode_builder.process_instruction_fetching(instr_fetch_events, trace);
246 bytecode_builder.process_decomposition({ {
247 .bytecode_id = instr_fetch_events.at(0).bytecode_id,
248 .bytecode = instr_fetch_events.at(0).bytecode,
249 } },
250 trace);
251 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
252
253 check_interaction<BytecodeTraceBuilder,
256
257 // BC Decomposition trace is the longest here.
258 EXPECT_EQ(trace.get_num_rows(), instr_fetch_events.at(0).bytecode->size() + 1);
259
260 check_relation<instr_fetching>(trace);
261}
262
263void check_all(const std::vector<InstructionFetchingEvent>& instr_events,
264 const std::vector<RangeCheckEvent>& range_check_events,
266{
267 TestTraceContainer trace;
268 BytecodeTraceBuilder bytecode_builder;
269 PrecomputedTraceBuilder precomputed_builder;
270 RangeCheckTraceBuilder range_check_builder;
271
276 bytecode_builder.process_instruction_fetching(instr_events, trace);
277 bytecode_builder.process_decomposition(decomposition_events, trace);
278 range_check_builder.process(range_check_events, trace);
279 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
280
281 check_interaction<BytecodeTraceBuilder,
288
289 EXPECT_EQ(trace.get_num_rows(), 1 << 16); // 2^16 for range checks
290
291 check_relation<instr_fetching>(trace);
292}
293
294void check_without_range_check(const std::vector<InstructionFetchingEvent>& instr_events,
296{
297 TestTraceContainer trace;
298 BytecodeTraceBuilder bytecode_builder;
299 PrecomputedTraceBuilder precomputed_builder;
300
304 bytecode_builder.process_instruction_fetching(instr_events, trace);
305 bytecode_builder.process_decomposition(decomposition_events, trace);
306 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
307
308 check_interaction<BytecodeTraceBuilder,
314
315 EXPECT_EQ(trace.get_num_rows(), 1 << 8); // 2^8 for range checks
316
317 check_relation<instr_fetching>(trace);
318}
319
320// Positive test with 5 five bytecodes and bytecode_id = 0,1,2,3,4
321// Bytecode i is generated by truncating instr_fetch_events to i * 6 instructions.
322// Check relations and all interactions.
323TEST(InstrFetchingConstrainingTest, MultipleBytecodes)
324{
325 const auto instr_fetch_events = gen_instr_events_each_opcode();
326 constexpr size_t num_of_bytecodes = 5;
329
330 for (size_t i = 0; i < num_of_bytecodes; i++) {
331 std::vector<uint8_t> bytecode;
332 const auto num_of_instr = i * 6;
333
334 for (size_t j = 0; j < num_of_instr; j++) {
335 const auto& instr = instr_fetch_events.at(j).instruction;
336 const auto instruction_bytes = instr.serialize();
337 bytecode.insert(bytecode.end(),
338 std::make_move_iterator(instruction_bytes.begin()),
339 std::make_move_iterator(instruction_bytes.end()));
340 }
341
342 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
343
344 for (size_t j = 0; j < num_of_instr; j++) {
345 auto instr_event = instr_fetch_events.at(j);
346 instr_event.bytecode_id = static_cast<BytecodeId>(i);
347 instr_event.bytecode = bytecode_ptr;
348 instr_events.emplace_back(instr_event);
349 }
350
351 decomposition_events.emplace_back(BytecodeDecompositionEvent{
352 .bytecode_id = static_cast<BytecodeId>(i),
353 .bytecode = bytecode_ptr,
354 });
355 }
356
357 check_all(instr_events, gen_range_check_events(instr_events), decomposition_events);
358}
359
360// Positive test with one single instruction with error INSTRUCTION_OUT_OF_RANGE.
361// The bytecode consists into a serialized single instruction with pc = 0 and
362// the bytecode had the last byte removed. This byte corresponds to a full operand.
363TEST(InstrFetchingConstrainingTest, SingleInstructionOutOfRange)
364{
365 Instruction add_8_instruction = {
366 .opcode = WireOpCode::ADD_8,
367 .indirect = 3,
368 .operands = { Operand::from<uint8_t>(0x34), Operand::from<uint8_t>(0x35), Operand::from<uint8_t>(0x36) },
369 };
370
371 std::vector<uint8_t> bytecode = add_8_instruction.serialize();
372 bytecode.pop_back(); // Remove last byte
373 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
374
375 const std::vector<InstructionFetchingEvent> instr_events = {
376 {
377 .bytecode_id = 1,
378 .pc = 0,
379 .bytecode = bytecode_ptr,
380 .error = InstrDeserializationEventError::INSTRUCTION_OUT_OF_RANGE,
381 },
382 };
383
385 {
386 .bytecode_id = 1,
387 .bytecode = bytecode_ptr,
388 },
389 };
390
391 check_without_range_check(instr_events, decomposition_events);
392}
393
394// Positive test with one single instruction (SET_FF) with error INSTRUCTION_OUT_OF_RANGE.
395// The bytecode consists into a serialized single instruction with pc = 0 and
396// the bytecode had the two last bytes removed. The truncated instruction is cut
397// in the middle of an operand.
398TEST(InstrFetchingConstrainingTest, SingleInstructionOutOfRangeSplitOperand)
399{
400 Instruction set_ff_instruction = {
401 .opcode = WireOpCode::SET_FF,
402 .indirect = 0x01,
403 .operands = { Operand::from<uint16_t>(0x1279),
404 Operand::from<uint8_t>(static_cast<uint8_t>(MemoryTag::FF)),
405 Operand::from<FF>(FF::modulus_minus_two) },
406 };
407
408 std::vector<uint8_t> bytecode = set_ff_instruction.serialize();
409 bytecode.resize(bytecode.size() - 2); // Remove last two bytes)
410 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
411
412 const std::vector<InstructionFetchingEvent> instr_events = {
413 {
414 .bytecode_id = 1,
415 .pc = 0,
416 .bytecode = bytecode_ptr,
417 .error = InstrDeserializationEventError::INSTRUCTION_OUT_OF_RANGE,
418 },
419 };
420
422 {
423 .bytecode_id = 1,
424 .bytecode = bytecode_ptr,
425 },
426 };
427
428 check_without_range_check(instr_events, decomposition_events);
429}
430
431// Positive test with error case PC_OUT_OF_RANGE. We pass a pc which is out of range.
432TEST(InstrFetchingConstrainingTest, SingleInstructionPcOutOfRange)
433{
434 Instruction add_8_instruction = {
435 .opcode = WireOpCode::SUB_8,
436 .indirect = 3,
437 .operands = { Operand::from<uint8_t>(0x34), Operand::from<uint8_t>(0x35), Operand::from<uint8_t>(0x36) },
438 };
439
440 std::vector<uint8_t> bytecode = add_8_instruction.serialize();
441 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
442
443 const std::vector<InstructionFetchingEvent> instr_events = {
444 // We first need a first instruction at pc == 0 as the trace assumes this.
445 {
446 .bytecode_id = 1,
447 .pc = 0,
448 .instruction = add_8_instruction,
449 .bytecode = bytecode_ptr,
450 },
451 {
452 .bytecode_id = 1,
453 .pc = static_cast<uint32_t>(bytecode_ptr->size() + 1),
454 .bytecode = bytecode_ptr,
455 .error = InstrDeserializationEventError::PC_OUT_OF_RANGE,
456 },
457 };
458
460 {
461 .bytecode_id = 1,
462 .bytecode = bytecode_ptr,
463 },
464 };
465
466 check_all(instr_events, gen_range_check_events(instr_events), decomposition_events);
467}
468
469// Positive test with error case OPCODE_OUT_OF_RANGE. We generate bytecode of a SET_128 instruction and
470// move the PC to a position corresponding to the beginning of the 128-bit immediate value of SET_128.
471// The immediate value in SET_128 starts with byte 0xFF (which we know is not a valid opcode).
472TEST(InstrFetchingConstrainingTest, SingleInstructionOpcodeOutOfRange)
473{
474 Instruction set_128_instruction = {
475 .opcode = WireOpCode::SET_128,
476 .indirect = 0,
477 .operands = { Operand::from<uint16_t>(0x1234),
478 Operand::from<uint8_t>(static_cast<uint8_t>(MemoryTag::U128)),
479 Operand::from<uint128_t>(static_cast<uint128_t>(0xFF) << 120) },
480 };
481
482 std::vector<uint8_t> bytecode = set_128_instruction.serialize();
483 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
484
485 const std::vector<InstructionFetchingEvent> instr_events = {
486 {
487 .bytecode_id = 1,
488 .pc = 0,
489 .instruction = set_128_instruction,
490 .bytecode = bytecode_ptr,
491 },
492 {
493 .bytecode_id = 1,
494 .pc = 5, // We move pc to the beginning of the 128-bit immediate value.
495 .bytecode = bytecode_ptr,
496 .error = InstrDeserializationEventError::OPCODE_OUT_OF_RANGE,
497 },
498 };
499
501 {
502 .bytecode_id = 1,
503 .bytecode = bytecode_ptr,
504 },
505 };
506
507 check_without_range_check(instr_events, decomposition_events);
508}
509
510// Positive test with one single instruction (SET_16) with error TAG_OUT_OF_RANGE.
511// The bytecode consists into a serialized single instruction with pc = 0.
512// The operand at index 1 is wrongly set to value 12
513TEST(InstrFetchingConstrainingTest, SingleInstructionTagOutOfRange)
514{
515 Instruction set_16_instruction = {
516 .opcode = WireOpCode::SET_16,
517 .indirect = 0,
518 .operands = { Operand::from<uint16_t>(0x1234), Operand::from<uint8_t>(12), Operand::from<uint16_t>(0x5678) },
519 };
520
521 std::vector<uint8_t> bytecode = set_16_instruction.serialize();
522 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
523
524 const std::vector<InstructionFetchingEvent> instr_events = {
525 {
526 .bytecode_id = 1,
527 .pc = 0,
528 .instruction = set_16_instruction,
529 .bytecode = bytecode_ptr,
530 .error = InstrDeserializationEventError::TAG_OUT_OF_RANGE,
531 },
532 };
533
535 {
536 .bytecode_id = 1,
537 .bytecode = bytecode_ptr,
538 },
539 };
540
541 check_without_range_check(instr_events, decomposition_events);
542}
543
544// Negative interaction test with some values not matching the instruction spec table.
545TEST(InstrFetchingConstrainingTest, NegativeWrongWireInstructionSpecInteractions)
546{
547 BytecodeTraceBuilder bytecode_builder;
548 PrecomputedTraceBuilder precomputed_builder;
549
550 // Some arbitrary chosen opcodes. We limit to one as this unit test is costly.
551 // Test works if the following vector is extended to other opcodes though.
553
554 for (const auto& opcode : opcodes) {
555 TestTraceContainer trace;
556 const auto instr = testing::random_instruction(opcode);
557 bytecode_builder.process_instruction_fetching(
558 { { .bytecode_id = 1,
559 .pc = 0,
560 .instruction = instr,
561 .bytecode = std::make_shared<std::vector<uint8_t>>(instr.serialize()) } },
562 trace);
565 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
566
567 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_wire_instruction_info_settings>(trace);
568
569 ASSERT_EQ(trace.get(C::lookup_instr_fetching_wire_instruction_info_counts, static_cast<uint32_t>(opcode)), 1);
570
571 constexpr std::array<C, 21> mutated_cols = {
572 C::instr_fetching_exec_opcode, C::instr_fetching_instr_size, C::instr_fetching_sel_has_tag,
573 C::instr_fetching_sel_tag_is_op2, C::instr_fetching_sel_op_dc_0, C::instr_fetching_sel_op_dc_1,
574 C::instr_fetching_sel_op_dc_2, C::instr_fetching_sel_op_dc_3, C::instr_fetching_sel_op_dc_4,
575 C::instr_fetching_sel_op_dc_5, C::instr_fetching_sel_op_dc_6, C::instr_fetching_sel_op_dc_7,
576 C::instr_fetching_sel_op_dc_8, C::instr_fetching_sel_op_dc_9, C::instr_fetching_sel_op_dc_10,
577 C::instr_fetching_sel_op_dc_11, C::instr_fetching_sel_op_dc_12, C::instr_fetching_sel_op_dc_13,
578 C::instr_fetching_sel_op_dc_14, C::instr_fetching_sel_op_dc_15, C::instr_fetching_sel_op_dc_16,
579 };
580
581 // Mutate execution opcode
582 for (const auto& col : mutated_cols) {
583 auto mutated_trace = trace;
584 const FF mutated_value = trace.get(col, 1) + 1; // Mutate to value + 1
585 mutated_trace.set(col, 1, mutated_value);
586
588 (check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_wire_instruction_info_settings>(
589 mutated_trace)),
590 "Failed.*LOOKUP_INSTR_FETCHING_WIRE_INSTRUCTION_INFO.*Could not find tuple in destination.");
591 }
592 }
593}
594
595// Negative interaction test with some values not matching the bytecode decomposition table.
596TEST(InstrFetchingConstrainingTest, NegativeWrongBcDecompositionInteractions)
597{
598 TestTraceContainer trace;
599 BytecodeTraceBuilder bytecode_builder;
600
601 // Some arbitrary chosen opcodes. We limit to one as this unit test is costly.
602 // Test works if the following vector is extended to other opcodes though.
604
605 for (const auto& opcode : opcodes) {
606 TestTraceContainer trace;
607 const auto instr = testing::random_instruction(opcode);
608 auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(instr.serialize());
609 bytecode_builder.process_instruction_fetching({ {
610 .bytecode_id = 1,
611 .pc = 0,
612 .instruction = instr,
613 .bytecode = bytecode_ptr,
614 } },
615 trace);
616 bytecode_builder.process_decomposition({ {
617 .bytecode_id = 1,
618 .bytecode = bytecode_ptr,
619 } },
620 trace);
621
622 auto valid_trace = trace; // Keep original trace before lookup processing
623 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_bytes_from_bc_dec_settings>(valid_trace);
624
625 constexpr std::array<C, 39> mutated_cols = {
626 C::instr_fetching_pc, C::instr_fetching_bytecode_id, C::instr_fetching_bd0, C::instr_fetching_bd1,
627 C::instr_fetching_bd2, C::instr_fetching_bd3, C::instr_fetching_bd4, C::instr_fetching_bd5,
628 C::instr_fetching_bd6, C::instr_fetching_bd7, C::instr_fetching_bd8, C::instr_fetching_bd9,
629 C::instr_fetching_bd10, C::instr_fetching_bd11, C::instr_fetching_bd12, C::instr_fetching_bd13,
630 C::instr_fetching_bd14, C::instr_fetching_bd15, C::instr_fetching_bd16, C::instr_fetching_bd17,
631 C::instr_fetching_bd18, C::instr_fetching_bd19, C::instr_fetching_bd20, C::instr_fetching_bd21,
632 C::instr_fetching_bd22, C::instr_fetching_bd23, C::instr_fetching_bd24, C::instr_fetching_bd25,
633 C::instr_fetching_bd26, C::instr_fetching_bd27, C::instr_fetching_bd28, C::instr_fetching_bd29,
634 C::instr_fetching_bd30, C::instr_fetching_bd31, C::instr_fetching_bd32, C::instr_fetching_bd33,
635 C::instr_fetching_bd34, C::instr_fetching_bd35, C::instr_fetching_bd36,
636 };
637
638 // Mutate execution opcode
639 for (const auto& col : mutated_cols) {
640 auto mutated_trace = trace;
641 const FF mutated_value = trace.get(col, 1) + 1; // Mutate to value + 1
642 mutated_trace.set(col, 1, mutated_value);
643
645 (check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_bytes_from_bc_dec_settings>(
646 mutated_trace)),
647 "Failed.*BYTES_FROM_BC_DEC. Could not find tuple in destination.");
648 }
649 }
650}
651
652// Negative interaction test for #[BYTECODE_SIZE_FROM_BC_DEC] where bytecode_size has the wrong value.
653// We set pc different from zero.
654TEST(InstrFetchingConstrainingTest, NegativeWrongBytecodeSizeBcDecompositionInteractions)
655{
656 TestTraceContainer trace;
657 BytecodeTraceBuilder bytecode_builder;
658 PrecomputedTraceBuilder precomputed_builder;
659
660 const uint32_t pc = 15;
661 std::vector<uint8_t> bytecode(pc, 0x23);
662
663 // Some arbitrary chosen opcodes. We limit to one as this unit test is costly.
664 // Test works if the following vector is extended to other opcodes though.
666
667 for (const auto& opcode : opcodes) {
668 TestTraceContainer trace;
669
670 const auto instr = testing::random_instruction(opcode);
671 const auto instr_bytecode = instr.serialize();
672 bytecode.insert(bytecode.end(),
673 std::make_move_iterator(instr_bytecode.begin()),
674 std::make_move_iterator(instr_bytecode.end()));
676
677 bytecode_builder.process_instruction_fetching({ {
678 .bytecode_id = 1,
679 .pc = pc,
680 .instruction = instr,
681 .bytecode = bytecode_ptr,
682 } },
683 trace);
684 bytecode_builder.process_decomposition({ {
685 .bytecode_id = 1,
686 .bytecode = bytecode_ptr,
687 } },
688 trace);
689 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
690
691 auto valid_trace = trace; // Keep original trace before lookup processing
692 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_bytecode_size_from_bc_dec_settings>(valid_trace);
693
694 auto mutated_trace = trace;
695 const FF mutated_value = trace.get(C::instr_fetching_bytecode_size, 1) + 1; // Mutate to value + 1
696 mutated_trace.set(C::instr_fetching_bytecode_size, 1, mutated_value);
697
699 (check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_bytecode_size_from_bc_dec_settings>(
700 mutated_trace)),
701 "Failed.*BYTECODE_SIZE_FROM_BC_DEC. Could not find tuple in destination.");
702 }
703}
704
705TEST(InstrFetchingConstrainingTest, NegativeWrongTagValidationInteractions)
706{
707 TestTraceContainer trace;
708 BytecodeTraceBuilder bytecode_builder;
709 PrecomputedTraceBuilder precomputed_builder;
710
711 // Some chosen opcode with a tag. We limit to one as this unit test is costly.
712 // Test works if the following vector is extended to other opcodes though.
714
715 for (const auto& opcode : opcodes) {
716 TestTraceContainer trace;
717 const auto instr = testing::random_instruction(opcode);
718 bytecode_builder.process_instruction_fetching(
719 { { .bytecode_id = 1,
720 .pc = 0,
721 .instruction = instr,
722 .bytecode = std::make_shared<std::vector<uint8_t>>(instr.serialize()) } },
723 trace);
726 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
727
728 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_tag_value_validation_settings>(trace);
729
730 auto valid_trace = trace; // Keep original trace before lookup processing
731
732 // Mutate tag out-of-range error
733 auto mutated_trace = trace;
734 ASSERT_EQ(trace.get(C::instr_fetching_tag_out_of_range, 1), 0);
735 mutated_trace.set(C::instr_fetching_tag_out_of_range, 1, 1); // Mutate by toggling the error.
736
738 (check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_tag_value_validation_settings>(
739 mutated_trace)),
740 "Failed.*LOOKUP_INSTR_FETCHING_TAG_VALUE_VALIDATION.*Could not find tuple in destination.");
741 }
742}
743
744// Negative test on not toggling instr_out_of_range when instr_size > bytes_to_read
745TEST(InstrFetchingConstrainingTest, NegativeNotTogglingInstrOutOfRange)
746{
747 TestTraceContainer trace({
748 { { C::precomputed_first_row, 1 } },
749 {
750 { C::instr_fetching_bytes_to_read, 11 },
751 { C::instr_fetching_instr_abs_diff, 0 },
752 { C::instr_fetching_instr_out_of_range, 1 }, // Will be mutated to zero
753 { C::instr_fetching_instr_size, 12 },
754 { C::instr_fetching_sel, 1 },
755 },
756 });
757
758 check_relation<instr_fetching>(trace, instr_fetching::SR_INSTR_OUT_OF_RANGE_TOGGLE);
759
760 trace.set(C::instr_fetching_instr_out_of_range, 1, 0); // Mutate to wrong value
761
762 EXPECT_THROW_WITH_MESSAGE(check_relation<instr_fetching>(trace, instr_fetching::SR_INSTR_OUT_OF_RANGE_TOGGLE),
763 "INSTR_OUT_OF_RANGE_TOGGLE");
764}
765
766// Negative test on wrongly toggling instr_out_of_range when instr_size <= bytes_to_read
767TEST(InstrFetchingConstrainingTest, NegativeTogglingInstrInRange)
768{
769 TestTraceContainer trace({
770 { { C::precomputed_first_row, 1 } },
771 {
772 { C::instr_fetching_bytes_to_read, 12 },
773 { C::instr_fetching_instr_abs_diff, 0 },
774 { C::instr_fetching_instr_out_of_range, 0 }, // Will be mutated to 1
775 { C::instr_fetching_instr_size, 12 },
776 { C::instr_fetching_sel, 1 },
777 },
778 });
779
780 check_relation<instr_fetching>(trace, instr_fetching::SR_INSTR_OUT_OF_RANGE_TOGGLE);
781
782 trace.set(C::instr_fetching_instr_out_of_range, 1, 1); // Mutate to wrong value
783
784 EXPECT_THROW_WITH_MESSAGE(check_relation<instr_fetching>(trace, instr_fetching::SR_INSTR_OUT_OF_RANGE_TOGGLE),
785 "INSTR_OUT_OF_RANGE_TOGGLE");
786}
787
788// Negative test on not toggling pc_out_of_range when pc >= bytecode_size
789TEST(InstrFetchingConstrainingTest, NegativeNotTogglingPcOutOfRange)
790{
791 TestTraceContainer trace({
792 { { C::precomputed_first_row, 1 } },
793 {
794 { C::instr_fetching_bytecode_size, 12 },
795 { C::instr_fetching_pc, 12 },
796 { C::instr_fetching_pc_abs_diff, 0 },
797 { C::instr_fetching_pc_out_of_range, 1 }, // Will be mutated to 0
798 { C::instr_fetching_sel, 1 },
799 },
800 });
801
802 check_relation<instr_fetching>(trace, instr_fetching::SR_PC_OUT_OF_RANGE_TOGGLE);
803
804 trace.set(C::instr_fetching_pc_out_of_range, 1, 0); // Mutate to wrong value
805
806 EXPECT_THROW_WITH_MESSAGE(check_relation<instr_fetching>(trace, instr_fetching::SR_PC_OUT_OF_RANGE_TOGGLE),
807 "PC_OUT_OF_RANGE_TOGGLE");
808}
809
810// Negative test on wrongly toggling pc_out_of_range when pc < bytecode_size
811TEST(InstrFetchingConstrainingTest, NegativeTogglingPcInRange)
812{
813 TestTraceContainer trace({
814 { { C::precomputed_first_row, 1 } },
815 {
816 { C::instr_fetching_bytecode_size, 12 },
817 { C::instr_fetching_pc, 11 },
818 { C::instr_fetching_pc_abs_diff, 0 },
819 { C::instr_fetching_pc_out_of_range, 0 }, // Will be mutated to 1
820 { C::instr_fetching_sel, 1 },
821 },
822 });
823
824 check_relation<instr_fetching>(trace, instr_fetching::SR_PC_OUT_OF_RANGE_TOGGLE);
825
826 trace.set(C::instr_fetching_pc_out_of_range, 1, 1); // Mutate to wrong value
827
828 EXPECT_THROW_WITH_MESSAGE(check_relation<instr_fetching>(trace, instr_fetching::SR_PC_OUT_OF_RANGE_TOGGLE),
829 "PC_OUT_OF_RANGE_TOGGLE");
830}
831
832} // namespace
833} // namespace bb::avm2::constraining
std::shared_ptr< Napi::ThreadSafeFunction > bytecode
#define AVM_PC_SIZE_IN_BITS
EventEmitter< BytecodeDecompositionEvent > decomposition_events
static constexpr size_t SR_OP1_BYTES_DECOMPOSITION
static constexpr size_t SR_OP3_BYTES_DECOMPOSITION
static constexpr size_t SR_INDIRECT_BYTES_DECOMPOSITION
static constexpr size_t SR_OP6_BYTES_DECOMPOSITION
static constexpr size_t SR_OP4_BYTES_DECOMPOSITION
static constexpr size_t SR_INSTR_OUT_OF_RANGE_TOGGLE
static std::string get_subrelation_label(size_t index)
static constexpr size_t SR_OP7_BYTES_DECOMPOSITION
static constexpr size_t SR_OP5_BYTES_DECOMPOSITION
static constexpr size_t SR_PC_OUT_OF_RANGE_TOGGLE
static constexpr size_t SR_OP2_BYTES_DECOMPOSITION
void process_wire_instruction_spec(TraceContainer &trace)
void process_memory_tag_range(TraceContainer &trace)
void process_misc(TraceContainer &trace, const uint32_t num_rows=MAX_AVM_TRACE_SIZE)
void process(const simulation::EventEmitterInterface< simulation::RangeCheckEvent >::Container &events, TraceContainer &trace)
const FF & get(Column col, uint32_t row) const
void set(Column col, uint32_t row, const FF &value)
RangeCheckTraceBuilder range_check_builder
Definition alu.test.cpp:121
PrecomputedTraceBuilder precomputed_builder
Definition alu.test.cpp:120
AluTraceBuilder builder
Definition alu.test.cpp:124
TestTraceContainer trace
#define EXPECT_THROW_WITH_MESSAGE(code, expectedMessage)
Definition macros.hpp:7
void check_interaction(tracegen::TestTraceContainer &trace)
TEST(TxExecutionConstrainingTest, WriteTreeValue)
Definition tx.test.cpp:441
Instruction random_instruction(WireOpCode w_opcode)
Definition fixtures.cpp:125
TestTraceContainer empty_trace()
Definition fixtures.cpp:153
lookup_settings< lookup_instr_fetching_wire_instruction_info_settings_ > lookup_instr_fetching_wire_instruction_info_settings
lookup_settings< lookup_instr_fetching_bytecode_size_from_bc_dec_settings_ > lookup_instr_fetching_bytecode_size_from_bc_dec_settings
lookup_settings< lookup_instr_fetching_bytes_from_bc_dec_settings_ > lookup_instr_fetching_bytes_from_bc_dec_settings
lookup_settings< lookup_instr_fetching_instr_abs_diff_positive_settings_ > lookup_instr_fetching_instr_abs_diff_positive_settings
lookup_settings< lookup_instr_fetching_pc_abs_diff_positive_settings_ > lookup_instr_fetching_pc_abs_diff_positive_settings
lookup_settings< lookup_instr_fetching_tag_value_validation_settings_ > lookup_instr_fetching_tag_value_validation_settings
Instruction
Enumeration of VM instructions that can be executed.
constexpr decltype(auto) get(::tuplet::tuple< T... > &&t) noexcept
Definition tuple.hpp:13
unsigned __int128 uint128_t
Definition serialize.hpp:44
static constexpr uint256_t modulus_minus_two