1use super::{StackSectionAnalysis, block_analysis::AbsValue};
15use crate::bytecode::{AnalysisConfig, Bytecode, InstFlags};
16use bitvec::vec::BitVec;
17use revm_bytecode::opcode as op;
18
19#[derive(Clone, Copy)]
21enum DecodedImm {
22 Single(u8),
24 Pair(u8, u8),
26}
27
28impl Bytecode<'_> {
29 #[instrument(name = "dse", level = "debug", skip_all)]
35 pub(crate) fn dead_store_elim(&mut self) {
36 let mut eliminated = 0u32;
37 let inspect_stack = self.config.contains(AnalysisConfig::INSPECT_STACK);
38
39 let mut heights = Vec::new();
41 let mut live = BitVec::new();
42
43 for bid in self.cfg.blocks.indices() {
44 let block = &self.cfg.blocks[bid];
45 let mut stack = StackSectionAnalysis::default();
49 heights.clear();
50 heights.push(0); for inst in block.insts() {
52 let (inp, out) = self.insts[inst].stack_io();
53 stack.process(inp, out);
54 heights.push(stack.diff());
55 }
56
57 let section = stack.section();
59 let entry_height = section.inputs as i32;
60 for h in &mut heights {
61 *h += entry_height;
62 }
63
64 let exit_height = *heights.last().unwrap();
65 let max_height = entry_height + section.max_growth as i32;
66
67 let has_suspend = block.insts().any(|i| self.insts[i].may_suspend());
72 let term_diverges =
73 !inspect_stack && !has_suspend && self.insts[block.terminator()].is_diverging();
74
75 live.clear();
76 live.resize(exit_height.max(0) as usize, !term_diverges);
77 live.resize(max_height.max(0) as usize, false);
78
79 if live.is_empty() {
81 continue;
82 }
83
84 for (idx, inst) in block.insts().enumerate().rev() {
86 let data = &self.insts[inst];
87 if data.flags.contains(InstFlags::NOOP)
88 || data.flags.contains(InstFlags::DISABLED)
89 || data.flags.contains(InstFlags::UNKNOWN)
90 {
91 continue;
92 }
93
94 let opcode = data.opcode;
95 let (inp, out) = data.stack_io();
96 let h_before = heights[idx] as usize;
97
98 let imm = match opcode {
100 op::DUPN | op::SWAPN => {
101 crate::decode_single(data.imm_byte()).map(DecodedImm::Single)
102 }
103 op::EXCHANGE => {
104 crate::decode_pair(data.imm_byte()).map(|(n, m)| DecodedImm::Pair(n, m))
105 }
106 _ => None,
107 };
108
109 if can_skip_when_dead(opcode)
111 && !data.is_branching()
112 && !data.is_diverging()
113 && all_outputs_dead(&live, opcode, h_before, inp as usize, out as usize, imm)
114 {
115 self.insts[inst].flags |= InstFlags::NOOP;
116 eliminated += 1;
117 continue;
118 }
119
120 transfer_liveness(
121 &mut live,
122 opcode,
123 h_before,
124 inp as usize,
125 out as usize,
126 imm,
127 &self.snapshots.inputs[inst],
128 self.snapshots.outputs.get(inst).copied().flatten(),
129 );
130
131 if data.may_suspend() {
137 let h_after = heights[idx + 1] as usize;
138 live[..h_after].fill(true);
139 }
140 }
141 }
142
143 if eliminated > 0 {
144 debug!(eliminated, "dead stores eliminated");
145 }
146 }
147}
148
149fn can_skip_when_dead(opcode: u8) -> bool {
160 matches!(
161 opcode,
162 op::ADD
164 | op::MUL
165 | op::SUB
166 | op::SIGNEXTEND
167 | op::DIV
169 | op::SDIV
170 | op::MOD
171 | op::SMOD
172 | op::ADDMOD
173 | op::MULMOD
174 | op::LT
176 | op::GT
177 | op::SLT
178 | op::SGT
179 | op::EQ
180 | op::ISZERO
181 | op::AND
183 | op::OR
184 | op::XOR
185 | op::NOT
186 | op::BYTE
187 | op::SHL
188 | op::SHR
189 | op::SAR
190 | op::CLZ
191 | op::DUP1..=op::DUP16
193 | op::DUPN
194 | op::SWAP1..=op::SWAP16
195 | op::SWAPN
196 | op::EXCHANGE
197 | op::PUSH0..=op::PUSH32
199 | op::PC
200 | op::CODESIZE
201 | op::ADDRESS
203 | op::ORIGIN
204 | op::CALLER
205 | op::CALLVALUE
206 | op::CALLDATALOAD
207 | op::CALLDATASIZE
208 | op::GASPRICE
209 | op::RETURNDATASIZE
210 | op::COINBASE
211 | op::TIMESTAMP
212 | op::NUMBER
213 | op::DIFFICULTY
214 | op::GASLIMIT
215 | op::CHAINID
216 | op::BASEFEE
217 | op::BLOBBASEFEE
218 | op::BLOBHASH
219 | op::SLOTNUM
220 )
221}
222
223#[allow(clippy::too_many_arguments)]
236fn transfer_liveness(
237 live: &mut BitVec,
238 opcode: u8,
239 h_before: usize,
240 inp: usize,
241 out: usize,
242 imm: Option<DecodedImm>,
243 input_snap: &[AbsValue],
244 output: Option<AbsValue>,
245) {
246 match opcode {
247 op::SWAP1..=op::SWAP16 => {
248 let depth = (opcode - op::SWAP1 + 1) as usize;
249 transfer_swap(live, h_before, depth);
250 }
251 op::SWAPN => {
252 if let Some(DecodedImm::Single(depth)) = imm
253 && (depth as usize) < h_before
254 {
255 transfer_swap(live, h_before, depth as usize);
256 }
257 }
259 op::DUP1..=op::DUP16 => {
260 let depth = (opcode - op::DUP1 + 1) as usize;
261 transfer_dup(live, h_before, depth);
262 }
263 op::DUPN => {
264 if let Some(DecodedImm::Single(depth)) = imm
265 && depth >= 1
266 && (depth as usize) <= h_before
267 {
268 transfer_dup(live, h_before, depth as usize);
269 }
270 }
271 op::EXCHANGE => {
272 if let Some(DecodedImm::Pair(n, m)) = imm
273 && (m as usize) < h_before
274 {
275 let tos = h_before - 1;
276 let pos_a = tos - n as usize;
277 let pos_b = tos - m as usize;
278 let (a, b) = (live[pos_a], live[pos_b]);
279 live.set(pos_a, b);
280 live.set(pos_b, a);
281 }
282 }
283 op::POP => {
284 live.set(h_before - 1, false);
285 }
286 _ => generic_transfer(live, h_before, inp, out, input_snap, output),
287 }
288}
289
290fn generic_transfer(
298 live: &mut BitVec,
299 h_before: usize,
300 inp: usize,
301 out: usize,
302 input_snap: &[AbsValue],
303 output: Option<AbsValue>,
304) {
305 let write_base = h_before - inp;
306 for k in 0..out {
308 live.set(write_base + k, false);
309 }
310 if out > 0 && matches!(output, Some(AbsValue::Const(_))) {
313 return;
314 }
315 for k in 0..inp {
321 if input_snap.get(k).is_some_and(|v| matches!(v, AbsValue::Const(_))) {
323 continue;
324 }
325 live.set(h_before - inp + k, true);
326 }
327}
328
329fn transfer_swap(live: &mut BitVec, h_before: usize, depth: usize) {
331 let tos = h_before - 1;
332 let other = tos - depth;
333 let (a, b) = (live[tos], live[other]);
334 live.set(tos, b);
335 live.set(other, a);
336}
337
338fn all_outputs_dead(
344 live: &BitVec,
345 opcode: u8,
346 h_before: usize,
347 inp: usize,
348 out: usize,
349 imm: Option<DecodedImm>,
350) -> bool {
351 match (opcode, imm) {
352 (op::SWAP1..=op::SWAP16, _) => {
354 swap_all_dead(live, h_before, (opcode - op::SWAP1 + 1) as usize)
355 }
356 (op::SWAPN, Some(DecodedImm::Single(d))) if (d as usize) < h_before => {
357 swap_all_dead(live, h_before, d as usize)
358 }
359 (op::DUP1..=op::DUP16, _) => !live[h_before],
361 (op::DUPN, Some(DecodedImm::Single(_))) => !live[h_before],
362 (op::EXCHANGE, Some(DecodedImm::Pair(n, m))) if (m as usize) < h_before => {
364 let tos = h_before - 1;
365 !live[tos - n as usize] && !live[tos - m as usize]
366 }
367 (op::DUPN | op::SWAPN | op::EXCHANGE, _) => false,
369 _ if out > 0 => {
371 let write_base = h_before - inp;
372 (0..out).all(|k| {
373 let pos = write_base + k;
374 pos < live.len() && !live[pos]
375 })
376 }
377 _ => false,
378 }
379}
380
381fn swap_all_dead(live: &BitVec, h_before: usize, depth: usize) -> bool {
383 let tos = h_before - 1;
384 !live[tos] && !live[tos - depth]
385}
386
387fn transfer_dup(live: &mut BitVec, h_before: usize, depth: usize) {
389 let new_tos = h_before;
390 let src = h_before - depth;
391 let new_tos_live = live[new_tos];
392 live.set(new_tos, false);
393 if new_tos_live {
394 live.set(src, true);
395 }
396}
397
398#[cfg(test)]
399mod tests {
400 use super::super::block_analysis::tests::{
401 Inst, analyze_asm, analyze_asm_spec, analyze_code_spec,
402 };
403 use crate::bytecode::InstFlags;
404 use revm_bytecode::opcode as op;
405 use revm_primitives::hardfork::SpecId;
406
407 fn push0s(n: usize) -> String {
409 "PUSH0\n".repeat(n)
410 }
411
412 fn with_prefix(n: usize, suffix: &str) -> crate::bytecode::Bytecode<'static> {
414 analyze_asm_spec(&format!("{}{suffix}", push0s(n)), SpecId::AMSTERDAM)
415 }
416
417 fn with_dynamic_and_prefix(n: usize, suffix: &str) -> crate::bytecode::Bytecode<'static> {
419 analyze_asm_spec(&format!("PUSH0 CALLDATALOAD {}{suffix}", push0s(n)), SpecId::AMSTERDAM)
420 }
421
422 #[test]
423 fn push_pop() {
424 let bytecode = analyze_asm(
425 "
426 PUSH1 0x42
427 POP
428 STOP
429 ",
430 );
431 assert!(
432 bytecode.inst(Inst::from_usize(0)).flags.contains(InstFlags::NOOP),
433 "PUSH should be skipped (dead store)"
434 );
435 }
436
437 #[test]
438 fn push_push_add_pop() {
439 let bytecode = analyze_asm(
440 "
441 PUSH1 0x03
442 PUSH1 0x04
443 ADD
444 POP
445 STOP
446 ",
447 );
448 for (i, name) in [(0, "PUSH 3"), (1, "PUSH 4"), (2, "ADD")] {
449 assert!(
450 bytecode.inst(Inst::from_usize(i)).flags.contains(InstFlags::NOOP),
451 "{name} should be skipped"
452 );
453 }
454 }
455
456 #[test]
457 fn partial() {
458 let bytecode = analyze_asm(
459 "
460 PUSH1 0x01
461 PUSH1 0x02
462 POP
463 PUSH0
464 MSTORE
465 STOP
466 ",
467 );
468 assert!(
470 bytecode.inst(Inst::from_usize(0)).flags.contains(InstFlags::NOOP),
471 "PUSH 1 should be skipped (const input)"
472 );
473 assert!(
474 bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
475 "PUSH 2 should be skipped"
476 );
477 }
478
479 #[test]
480 fn swap_preserves_liveness() {
481 let bytecode = analyze_asm(
484 "
485 PUSH0 ; inst 0
486 CALLDATALOAD ; inst 1: dynamic A
487 PUSH1 0x20 ; inst 2
488 CALLDATALOAD ; inst 3: dynamic B
489 SWAP1 ; inst 4: [B, A]
490 POP ; inst 5: [B]
491 RET_WORD ; inst 6..11: return B
492 ",
493 );
494 assert!(
496 bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
497 "CALLDATALOAD A should be skipped (dead after swap+pop)"
498 );
499 assert!(
500 !bytecode.inst(Inst::from_usize(3)).flags.contains(InstFlags::NOOP),
501 "CALLDATALOAD B should NOT be skipped (consumed by RET_WORD)"
502 );
503 }
504
505 #[test]
506 fn swap_dead_with_diverging_terminator() {
507 let bytecode = analyze_asm(
509 "
510 PUSH1 0x01
511 PUSH1 0x02
512 PUSH1 0x03
513 SWAP2
514 STOP
515 ",
516 );
517 for i in 0..4 {
518 assert!(
519 bytecode.inst(Inst::from_usize(i)).flags.contains(InstFlags::NOOP),
520 "inst at {i} should be skipped (exit stack dead)"
521 );
522 }
523 }
524
525 #[test]
526 fn dup_const_source_killed() {
527 let bytecode = analyze_asm(
530 "
531 PUSH1 0x42
532 DUP1
533 POP
534 PUSH0
535 MSTORE
536 STOP
537 ",
538 );
539 assert!(
540 bytecode.inst(Inst::from_usize(0)).flags.contains(InstFlags::NOOP),
541 "PUSH should be skipped (const input)"
542 );
543 }
544
545 #[test]
546 fn dup_both_dead() {
547 let bytecode = analyze_asm(
550 "
551 PUSH1 0x42
552 DUP1
553 POP
554 POP
555 STOP
556 ",
557 );
558 assert!(
559 bytecode.inst(Inst::from_usize(0)).flags.contains(InstFlags::NOOP),
560 "PUSH should be skipped when both DUP copy and source are dead"
561 );
562 }
563
564 #[test]
565 fn env_read_dead() {
566 let bytecode = analyze_asm(
568 "
569 ADDRESS
570 POP
571 STOP
572 ",
573 );
574 assert!(
575 bytecode.inst(Inst::from_usize(0)).flags.contains(InstFlags::NOOP),
576 "ADDRESS should be skipped when dead"
577 );
578 }
579
580 #[test]
581 fn no_side_effects() {
582 let bytecode = analyze_asm(
583 "
584 PUSH1 0x42
585 PUSH0
586 MSTORE
587 STOP
588 ",
589 );
590 assert!(!bytecode.inst(Inst::from_usize(2)).flags.contains(InstFlags::NOOP));
591 }
592
593 #[test]
594 fn swapn_preserves_liveness() {
595 let bytecode = with_dynamic_and_prefix(17, "SWAP 17 RET_WORD");
598 assert!(
599 !bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
600 "CALLDATALOAD should NOT be skipped (consumed via SWAPN + MSTORE)"
601 );
602 }
603
604 #[test]
605 fn dupn_keeps_source_live() {
606 let bytecode = with_dynamic_and_prefix(16, "DUP 17 RET_WORD");
609 assert!(
610 !bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
611 "CALLDATALOAD should NOT be skipped"
612 );
613 }
614
615 #[test]
616 fn dupn_dead_copy() {
617 let bytecode = analyze_asm("PUSH0 CALLDATALOAD DUP1 POP RET_WORD");
620 assert!(
621 bytecode.inst(Inst::from_usize(2)).flags.contains(InstFlags::NOOP),
622 "DUP1 should be skipped (copy is dead)"
623 );
624 assert!(
625 !bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
626 "CALLDATALOAD should NOT be skipped"
627 );
628 }
629
630 #[test]
631 fn exchange_preserves_liveness() {
632 let bytecode = analyze_asm_spec(
637 "PUSH0 PUSH0 CALLDATALOAD PUSH0 EXCHANGE 1 2 SWAP2 RET_WORD",
638 SpecId::AMSTERDAM,
639 );
640 assert!(
641 !bytecode.inst(Inst::from_usize(2)).flags.contains(InstFlags::NOOP),
642 "CALLDATALOAD should NOT be skipped (consumed via EXCHANGE + SWAP + MSTORE)"
643 );
644 }
645
646 #[test]
647 fn dup_dead() {
648 let bytecode = analyze_asm(
650 "
651 PUSH1 0x42
652 DUP1
653 POP
654 POP
655 STOP
656 ",
657 );
658 assert!(bytecode.inst(Inst::from_usize(0)).flags.contains(InstFlags::NOOP));
659 assert!(bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP));
660 }
661
662 #[test]
663 fn swap_dead() {
664 let bytecode = analyze_asm(
666 "
667 PUSH1 0x01
668 PUSH1 0x02
669 SWAP1
670 POP
671 POP
672 STOP
673 ",
674 );
675 for (i, name) in [(0, "PUSH 1"), (1, "PUSH 2"), (2, "SWAP1")] {
676 assert!(
677 bytecode.inst(Inst::from_usize(i)).flags.contains(InstFlags::NOOP),
678 "{name} should be skipped"
679 );
680 }
681 }
682
683 #[test]
684 fn swap_dead_with_live_passthrough() {
685 let bytecode = analyze_asm(
690 "
691 PUSH0 ; inst 0
692 CALLDATALOAD ; inst 1: dynamic A
693 PUSH1 0x20 ; inst 2
694 CALLDATALOAD ; inst 3: dynamic B
695 PUSH1 0x40 ; inst 4
696 CALLDATALOAD ; inst 5: dynamic C
697 SWAP2 ; inst 6: [C, B, A]
698 POP ; inst 7: [C, B]
699 SWAP1 ; inst 8: [B, C]
700 POP ; inst 9: [B]
701 RET_WORD ; inst 10..15: return B
702 ",
703 );
704 for (i, alive) in [false, false, false, true, false, false, false, true, true, true]
706 .into_iter()
707 .enumerate()
708 {
709 assert_eq!(
710 !bytecode.inst(Inst::from_usize(i)).flags.contains(InstFlags::NOOP),
711 alive,
712 "inst {i}"
713 );
714 }
715 }
716
717 #[test]
718 fn returndatasize_dead() {
719 let bytecode = analyze_asm(
720 "
721 RETURNDATASIZE
722 POP
723 STOP
724 ",
725 );
726 assert!(
727 bytecode.inst(Inst::from_usize(0)).flags.contains(InstFlags::NOOP),
728 "RETURNDATASIZE should be skipped when dead"
729 );
730 }
731
732 #[test]
733 fn returndatasize_live() {
734 let bytecode = analyze_asm(
736 "
737 PUSH0
738 RETURNDATASIZE
739 MSTORE
740 STOP
741 ",
742 );
743 assert!(
744 !bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
745 "RETURNDATASIZE should NOT be skipped when live"
746 );
747 }
748
749 #[test]
750 fn blobhash_dead() {
751 let bytecode = analyze_asm(
753 "
754 PUSH0
755 BLOBHASH
756 POP
757 STOP
758 ",
759 );
760 assert!(
761 bytecode.inst(Inst::from_usize(0)).flags.contains(InstFlags::NOOP),
762 "PUSH0 should be skipped"
763 );
764 assert!(
765 bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
766 "BLOBHASH should be skipped when dead"
767 );
768 }
769
770 #[test]
771 fn blobhash_live() {
772 let bytecode = analyze_asm(
774 "
775 PUSH0
776 BLOBHASH
777 PUSH0
778 MSTORE
779 STOP
780 ",
781 );
782 assert!(
783 !bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
784 "BLOBHASH should NOT be skipped when live"
785 );
786 }
787
788 #[test]
789 fn dupn_dead() {
790 let pops = "POP\n".repeat(18);
792 let bytecode = with_prefix(17, &format!("DUP 17 {pops} STOP"));
793 for i in 0..17 {
794 assert!(
795 bytecode.inst(Inst::from_usize(i)).flags.contains(InstFlags::NOOP),
796 "PUSH0 at {i} should be skipped"
797 );
798 }
799 assert!(
800 bytecode.inst(Inst::from_usize(17)).flags.contains(InstFlags::NOOP),
801 "DUPN should be skipped"
802 );
803 }
804
805 #[test]
806 fn dupn_invalid_imm_not_eliminated() {
807 let bytecode = analyze_code_spec(
811 vec![op::PUSH0, op::DUPN, 0x5b, op::POP, op::STOP],
812 SpecId::AMSTERDAM,
813 );
814 assert!(
815 !bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
816 "DUPN with invalid immediate should NOT be skipped"
817 );
818 }
819
820 #[test]
821 fn dupn_underflow_still_eliminated() {
822 let bytecode = with_prefix(1, "DUP 17 POP POP STOP");
826 assert!(
827 bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
828 "DUPN with valid immediate should be skipped when dead"
829 );
830 }
831
832 #[test]
833 fn swapn_dead() {
834 let pops = "POP\n".repeat(18);
836 let bytecode = with_prefix(18, &format!("SWAP 17 {pops} STOP"));
837 for i in 0..18 {
838 assert!(
839 bytecode.inst(Inst::from_usize(i)).flags.contains(InstFlags::NOOP),
840 "PUSH0 at {i} should be skipped"
841 );
842 }
843 assert!(
844 bytecode.inst(Inst::from_usize(18)).flags.contains(InstFlags::NOOP),
845 "SWAPN should be skipped"
846 );
847 }
848
849 #[test]
850 fn exchange_dead() {
851 let bytecode = with_prefix(3, "EXCHANGE 1 2 POP POP POP STOP");
853 for i in 0..3 {
854 assert!(
855 bytecode.inst(Inst::from_usize(i)).flags.contains(InstFlags::NOOP),
856 "PUSH0 at {i} should be skipped"
857 );
858 }
859 assert!(
860 bytecode.inst(Inst::from_usize(3)).flags.contains(InstFlags::NOOP),
861 "EXCHANGE should be skipped"
862 );
863 }
864
865 #[test]
866 fn dupn_underflow_no_panic() {
867 let _ = with_prefix(2, "DUP 17 STOP");
869 }
870
871 #[test]
872 fn swapn_underflow_no_panic() {
873 let _ = with_prefix(2, "SWAP 17 STOP");
875 }
876
877 #[test]
878 fn exchange_underflow_no_panic() {
879 let _ = with_prefix(1, "EXCHANGE 1 2 STOP");
881 }
882
883 #[test]
886 fn const_output_kills_all_inputs() {
887 let bytecode = analyze_asm(
889 "
890 PUSH1 0x02 ; inst 0
891 PUSH1 0x03 ; inst 1
892 ADD ; inst 2: folded to 5
893 PUSH0 ; inst 3
894 MSTORE ; inst 4
895 STOP ; inst 5
896 ",
897 );
898 for dead in [0, 1, 2, 3] {
899 assert!(bytecode.inst(Inst::from_usize(dead)).flags.contains(InstFlags::NOOP));
900 }
901 for not_dead in [4, 5] {
902 assert!(!bytecode.inst(Inst::from_usize(not_dead)).flags.contains(InstFlags::NOOP));
903 }
904 }
905
906 #[test]
907 fn const_output_chain() {
908 let bytecode = analyze_asm(
911 "
912 PUSH1 0x02 ; inst 0
913 PUSH1 0x03 ; inst 1
914 ADD ; inst 2: folded to 5
915 PUSH1 0x04 ; inst 3
916 MUL ; inst 4: folded to 20
917 PUSH0 ; inst 5
918 MSTORE ; inst 6
919 STOP ; inst 7
920 ",
921 );
922 for (i, name) in [(0, "PUSH 2"), (1, "PUSH 3"), (2, "ADD"), (3, "PUSH 4")] {
923 assert!(
924 bytecode.inst(Inst::from_usize(i)).flags.contains(InstFlags::NOOP),
925 "{name} should be skipped"
926 );
927 }
928 }
929
930 #[test]
931 fn const_input_add_dynamic() {
932 let bytecode = analyze_asm(
934 "
935 PUSH0 ; inst 0: offset for CALLDATALOAD
936 CALLDATALOAD ; inst 1: dynamic value
937 PUSH1 0x01 ; inst 2: constant addend
938 ADD ; inst 3: dynamic + 1
939 PUSH0 ; inst 4
940 MSTORE ; inst 5
941 STOP ; inst 6
942 ",
943 );
944 assert!(
945 bytecode.inst(Inst::from_usize(2)).flags.contains(InstFlags::NOOP),
946 "PUSH 1 (addend) should be skipped (const input)"
947 );
948 assert!(
949 !bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
950 "CALLDATALOAD should NOT be skipped"
951 );
952 }
953
954 #[test]
955 fn const_input_add_zero() {
956 let bytecode = analyze_asm(
958 "
959 PUSH0 ; inst 0: offset for CALLDATALOAD
960 CALLDATALOAD ; inst 1: dynamic value
961 PUSH0 ; inst 2: constant 0
962 ADD ; inst 3: dynamic + 0
963 PUSH0 ; inst 4
964 MSTORE ; inst 5
965 STOP ; inst 6
966 ",
967 );
968 assert!(
969 bytecode.inst(Inst::from_usize(2)).flags.contains(InstFlags::NOOP),
970 "PUSH0 (addend) should be skipped (const input)"
971 );
972 assert!(
973 !bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
974 "CALLDATALOAD should NOT be skipped"
975 );
976 }
977
978 #[test]
979 fn const_input_both_sides() {
980 let bytecode = analyze_asm(
982 "
983 PUSH0 ; inst 0: constant 0 (deeper input to ADD)
984 PUSH0 ; inst 1: offset for CALLDATALOAD
985 CALLDATALOAD ; inst 2: dynamic value (TOS input to ADD)
986 ADD ; inst 3: 0 + dynamic
987 PUSH0 ; inst 4
988 MSTORE ; inst 5
989 STOP ; inst 6
990 ",
991 );
992 assert!(
993 bytecode.inst(Inst::from_usize(0)).flags.contains(InstFlags::NOOP),
994 "PUSH0 (deeper const input) should be skipped"
995 );
996 assert!(
997 !bytecode.inst(Inst::from_usize(2)).flags.contains(InstFlags::NOOP),
998 "CALLDATALOAD should NOT be skipped"
999 );
1000 }
1001
1002 #[test]
1003 fn builtin_const_input_killed() {
1004 let bytecode = analyze_asm(
1008 "
1009 PUSH0 ; inst 0: offset for CALLDATALOAD
1010 CALLDATALOAD ; inst 1: dynamic value
1011 PUSH1 0x02 ; inst 2: constant divisor
1012 DIV ; inst 3: dynamic / 2
1013 PUSH0 ; inst 4
1014 MSTORE ; inst 5
1015 STOP ; inst 6
1016 ",
1017 );
1018 assert!(
1019 bytecode.inst(Inst::from_usize(2)).flags.contains(InstFlags::NOOP),
1020 "PUSH 2 (divisor) should be skipped (builtin const input)"
1021 );
1022 assert!(
1023 !bytecode.inst(Inst::from_usize(1)).flags.contains(InstFlags::NOOP),
1024 "CALLDATALOAD should NOT be skipped"
1025 );
1026 }
1027
1028 #[test]
1029 fn sload_const_input_killed() {
1030 let bytecode = analyze_asm(
1033 "
1034 PUSH1 0x00 ; inst 0: storage key
1035 SLOAD ; inst 1: read storage
1036 PUSH0 ; inst 2
1037 MSTORE ; inst 3
1038 STOP ; inst 4
1039 ",
1040 );
1041 assert!(
1042 bytecode.inst(Inst::from_usize(0)).flags.contains(InstFlags::NOOP),
1043 "PUSH 0 should be skipped (const input)"
1044 );
1045 }
1046
1047 #[test]
1048 fn const_live_across_suspend_not_nooped() {
1049 let bytecode = analyze_asm(
1054 "
1055 PUSH1 0xaa ; inst 0: live across CALL
1056 PUSH0 ; inst 1
1057 PUSH0 ; inst 2
1058 PUSH0 ; inst 3
1059 PUSH0 ; inst 4
1060 PUSH0 ; inst 5
1061 PUSH1 0x69 ; inst 6
1062 GAS ; inst 7
1063 CALL ; inst 8: suspends
1064 POP ; inst 9
1065 PUSH0 ; inst 10
1066 MSTORE ; inst 11
1067 PUSH1 0x20 ; inst 12
1068 PUSH0 ; inst 13
1069 RETURN ; inst 14
1070 ",
1071 );
1072 assert!(
1073 !bytecode.inst(Inst::from_usize(0)).flags.contains(InstFlags::NOOP),
1074 "PUSH1 0xaa must NOT be nooped — it is live across the CALL suspension"
1075 );
1076 }
1077}