Skip to main content

revmc_codegen/bytecode/passes/
block_analysis.rs

1//! Abstract stack interpretation for resolving dynamic jump targets and constant propagation.
2//!
3//! This pass builds a basic-block CFG and runs worklist-based abstract interpretation to a
4//! fixpoint, propagating abstract stack states across blocks. It resolves jump targets that the
5//! block-local pass `block_analysis_local` (which interprets each block independently) cannot
6//! handle — including internal function return patterns where multiple callers push different
7//! return addresses.
8//!
9//! After the fixpoint converges, the known-constant operands at each instruction are persisted
10//! as [`OperandSnapshot`]s so that later passes (e.g. code generation) can query the
11//! known-constant value of stack operands.
12//!
13//! ## Abstract domain
14//!
15//! Per-value lattice (ascending order):
16//! - `Const(U256Imm)` — a single known constant.
17//! - `ConstSet(ConstSetIdx)` — multiple known constants (interned, sorted, deduplicated).
18//! - `Top` — reachable but unknown.
19//!
20//! Per-block lattice:
21//! - `Bottom` — block not yet reached.
22//! - `Known(Vec<AbsValue>)` — reached with a known stack state (top-aligned; when predecessors have
23//!   different stack heights, the shorter stack is bottom-padded with `Top`).
24//!
25//! ## Soundness
26//!
27//! When the fixpoint does not converge (iteration cap reached) or unresolved `Top` jumps remain,
28//! a transitive predecessor analysis invalidates suspect resolutions that may be based on
29//! incomplete information, ensuring that only sound jump targets are reported as resolved.
30
31use super::StackSection;
32use crate::bytecode::{Bytecode, Inst, InstFlags, Interner, U256Imm};
33use bitvec::vec::BitVec;
34use either::Either;
35use oxc_index::{IndexVec, index_vec};
36use revm_bytecode::opcode as op;
37use smallvec::SmallVec;
38use std::{cmp::Ordering, collections::VecDeque, ops::Range};
39
40oxc_index::define_nonmax_u32_index_type! {
41    /// Index into the interned constant-set pool.
42    pub(crate) struct ConstSetIdx;
43}
44crate::bytecode::impl_index_display!(ConstSetIdx, "{}");
45
46/// Per-instruction snapshot of abstract operand values.
47///
48/// Stored in stack order: index 0 is the deepest operand, last element is TOS.
49/// Only the instruction's inputs are stored, not the entire stack.
50pub(crate) type OperandSnapshot = SmallVec<[AbsValue; 4]>;
51
52/// Bundles input and output snapshots for recording during abstract interpretation.
53#[derive(Clone, Default)]
54pub(crate) struct Snapshots {
55    /// Pre-instruction input operand snapshots.
56    pub(crate) inputs: IndexVec<Inst, OperandSnapshot>,
57    /// Post-instruction output snapshot (single value per instruction).
58    pub(crate) outputs: IndexVec<Inst, Option<AbsValue>>,
59}
60
61impl Snapshots {
62    /// Restores snapshots for the given instructions from a previously saved copy.
63    pub(crate) fn restore_from(&mut self, insts: impl Iterator<Item = Inst>, saved: &Self) {
64        for inst in insts {
65            self.inputs[inst].clone_from(&saved.inputs[inst]);
66            self.outputs[inst] = saved.outputs[inst];
67        }
68    }
69}
70
71/// Abstract value on the stack.
72#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
73pub(crate) enum AbsValue {
74    Const(U256Imm),
75    /// Multiple known constant values (interned, sorted, deduplicated).
76    ConstSet(ConstSetIdx),
77    /// Top value (unknown concrete value).
78    #[default]
79    Top,
80}
81
82impl AbsValue {
83    /// Returns the single constant if this is `Const`, or `None` otherwise.
84    pub(crate) fn as_const(self) -> Option<U256Imm> {
85        match self {
86            Self::Const(v) => Some(v),
87            _ => None,
88        }
89    }
90}
91
92/// Constant-set interner used by the abstract interpreter.
93struct ConstSetInterner {
94    interner: Interner<ConstSetIdx, Box<[U256Imm]>>,
95}
96
97impl ConstSetInterner {
98    fn new() -> Self {
99        Self { interner: Interner::new() }
100    }
101
102    /// Returns the constants in the given set.
103    fn get(&self, idx: ConstSetIdx) -> &[U256Imm] {
104        self.interner.get(idx)
105    }
106
107    /// Returns the set of known constants for an abstract value, or `None` if `Top`.
108    fn abs_const_set<'a>(&'a self, val: &'a AbsValue) -> Option<&'a [U256Imm]> {
109        match val {
110            AbsValue::Top => None,
111            AbsValue::Const(v) => Some(std::slice::from_ref(v)),
112            AbsValue::ConstSet(idx) => Some(self.get(*idx)),
113        }
114    }
115
116    /// Interns a sorted, deduplicated set and returns the corresponding `AbsValue`.
117    fn intern_set(&mut self, set: &[U256Imm]) -> AbsValue {
118        match set.len() {
119            0 => unreachable!("empty const set"),
120            1 => AbsValue::Const(set[0]),
121            _ => AbsValue::ConstSet(self.interner.intern(set.into())),
122        }
123    }
124
125    /// Lattice join: two values merge to their least upper bound.
126    fn join(&mut self, a: AbsValue, b: AbsValue) -> AbsValue {
127        match (a, b) {
128            (AbsValue::Top, _) | (_, AbsValue::Top) => AbsValue::Top,
129            (AbsValue::Const(x), AbsValue::Const(y)) if x == y => AbsValue::Const(x),
130            _ => {
131                let a = self.abs_const_set(&a).unwrap();
132                let b = self.abs_const_set(&b).unwrap();
133                // Sorted merge with dedup.
134                let mut merged = SmallVec::<[U256Imm; 8]>::new();
135                let (mut i, mut j) = (0, 0);
136                while i < a.len() && j < b.len() {
137                    match a[i].cmp(&b[j]) {
138                        Ordering::Less => {
139                            merged.push(a[i]);
140                            i += 1;
141                        }
142                        Ordering::Greater => {
143                            merged.push(b[j]);
144                            j += 1;
145                        }
146                        Ordering::Equal => {
147                            merged.push(a[i]);
148                            i += 1;
149                            j += 1;
150                        }
151                    }
152                }
153                merged.extend_from_slice(&a[i..]);
154                merged.extend_from_slice(&b[j..]);
155                self.intern_set(&merged)
156            }
157        }
158    }
159}
160
161/// Maximum abstract stack depth tracked by the analysis. Top-aligned joins across paths with
162/// differing stack heights pad the shorter stack with `Top` at the bottom; on CFG cycles this
163/// padding can grow without bound. Clamping discards only those bottom `Top` entries, so no
164/// precision is lost.
165///
166/// Instructions that access deeper than this (e.g. Amsterdam DUPN/SWAPN up to depth 236)
167/// treat the out-of-range slot as `Top` rather than aborting block interpretation.
168const MAX_ABS_STACK_DEPTH: usize = 64;
169
170/// Abstract state at the entry of a block.
171#[derive(Clone, Debug)]
172enum BlockState {
173    /// Block has not been reached yet.
174    Bottom,
175    /// Block has been reached with a known stack state (top-aligned).
176    Known(Vec<AbsValue>),
177}
178
179impl BlockState {
180    /// Join another incoming state into this one. Returns `true` if the state changed.
181    ///
182    /// When stack heights differ, the stacks are top-aligned and the shorter one is
183    /// bottom-padded with `Top`. This handles the common Solidity dispatch pattern where
184    /// the "no selector match" fallthrough leaves an extra item on the stack that the
185    /// fallback ignores.
186    fn join(&mut self, incoming: &[AbsValue], sets: &mut ConstSetInterner) -> bool {
187        match self {
188            Self::Bottom => {
189                let start = incoming.len().saturating_sub(MAX_ABS_STACK_DEPTH);
190                *self = Self::Known(incoming[start..].to_vec());
191                true
192            }
193            Self::Known(existing) => {
194                let new_len = existing.len().max(incoming.len());
195                let mut changed = false;
196
197                // Clamp to MAX_ABS_STACK_DEPTH by only joining the top portion;
198                // elements below that are unreachable by any EVM instruction and
199                // discarding them preserves soundness.
200                let join_len = new_len.min(MAX_ABS_STACK_DEPTH);
201
202                // Resize existing to join_len: pad at bottom with Top or truncate.
203                if existing.len() < join_len {
204                    let pad = join_len - existing.len();
205                    existing.splice(0..0, std::iter::repeat_n(AbsValue::Top, pad));
206                    changed = true;
207                } else if existing.len() > join_len {
208                    existing.drain(..existing.len() - join_len);
209                    changed = true;
210                }
211
212                // Join element-wise, top-aligned. Both stacks have their top at the end.
213                // `incoming` may be longer than join_len — we only look at its top portion.
214                let incoming_start = incoming.len().saturating_sub(join_len);
215                let incoming_top = &incoming[incoming_start..];
216                // incoming_top.len() <= join_len. If shorter, bottom positions get Top.
217                let pad = join_len - incoming_top.len();
218                for i in 0..join_len {
219                    let inc = if i < pad { AbsValue::Top } else { incoming_top[i - pad] };
220                    let joined = sets.join(existing[i], inc);
221                    if joined != existing[i] {
222                        existing[i] = joined;
223                        changed = true;
224                    }
225                }
226
227                changed
228            }
229        }
230    }
231}
232
233oxc_index::define_nonmax_u32_index_type! {
234    /// A block index in the CFG.
235    pub(crate) struct Block;
236}
237crate::bytecode::impl_index_display!(Block, "bb{}");
238
239/// FIFO worklist with deduplication.
240struct Worklist {
241    queue: VecDeque<Block>,
242    in_queue: BitVec,
243}
244
245impl Worklist {
246    fn new(size: usize) -> Self {
247        Self { queue: VecDeque::new(), in_queue: BitVec::repeat(false, size) }
248    }
249
250    fn push(&mut self, id: Block) {
251        let idx = id.index();
252        if !self.in_queue[idx] {
253            self.in_queue.set(idx, true);
254            self.queue.push_back(id);
255        }
256    }
257
258    fn pop(&mut self) -> Option<Block> {
259        let id = self.queue.pop_front()?;
260        self.in_queue.set(id.index(), false);
261        Some(id)
262    }
263}
264
265/// A basic block in the CFG.
266pub(crate) struct BlockData {
267    /// Instruction index range (exclusive end). All instructions in this range are live.
268    pub(crate) insts: Range<Inst>,
269    /// Predecessor block IDs.
270    pub(crate) preds: SmallVec<[Block; 4]>,
271    /// Successor block IDs.
272    pub(crate) succs: SmallVec<[Block; 4]>,
273}
274
275impl BlockData {
276    #[inline]
277    pub(crate) fn terminator(&self) -> Inst {
278        self.insts.end - 1
279    }
280
281    /// Returns the instruction range as `Range<usize>` for indexing into raw arrays.
282    #[inline]
283    pub(crate) fn insts(
284        &self,
285    ) -> impl DoubleEndedIterator<Item = Inst> + ExactSizeIterator + use<> {
286        (self.insts.start.index()..self.insts.end.index()).map(Inst::from_usize)
287    }
288}
289
290/// Resolved jump target after fixpoint.
291#[derive(Clone, Debug)]
292enum JumpTarget {
293    /// Not yet observed.
294    Bottom,
295    /// One or more known constant target instruction indices.
296    Resolved(SmallVec<[Inst; 4]>),
297    /// Known constant but invalid target.
298    Invalid,
299    /// Unknown target.
300    Top,
301}
302
303impl JumpTarget {
304    /// Creates a resolved target with a single constant.
305    fn single(inst: Inst) -> Self {
306        Self::Resolved(SmallVec::from_elem(inst, 1))
307    }
308
309    /// Returns the single resolved target, if exactly one.
310    fn as_single(&self) -> Option<Inst> {
311        match self {
312            Self::Resolved(targets) if targets.len() == 1 => Some(targets[0]),
313            _ => None,
314        }
315    }
316}
317
318/// CFG for abstract interpretation.
319#[derive(Default)]
320pub(crate) struct Cfg {
321    pub(crate) blocks: IndexVec<Block, BlockData>,
322    /// Maps instruction index to block ID. `None` for dead-code instructions.
323    pub(crate) inst_to_block: IndexVec<Inst, Option<Block>>,
324}
325
326impl Bytecode<'_> {
327    /// Ensures `self.snapshots` is sized for the current instruction count.
328    fn init_snapshots(&mut self) {
329        let n = self.insts.len();
330        self.snapshots.inputs.resize(n, SmallVec::new());
331        self.snapshots.outputs.resize(n, None);
332    }
333
334    /// Block-local jump resolution: interpret each block independently to discover
335    /// jumps resolvable from block-local computation alone.
336    ///
337    /// Also initializes `self.snapshots`.
338    #[instrument(name = "local_jumps", level = "debug", skip_all)]
339    pub(crate) fn block_analysis_local(&mut self) {
340        self.init_snapshots();
341
342        let empty_sets = ConstSetInterner::new();
343        let mut resolved = Vec::new();
344        let mut stack = Vec::new();
345        let trace_logs = enabled!(tracing::Level::TRACE);
346
347        for bid in self.cfg.blocks.indices() {
348            let block = &self.cfg.blocks[bid];
349
350            // Compute required entry depth for this block using stack section analysis.
351            let section =
352                StackSection::from_stack_io(block.insts().map(|i| self.insts[i].stack_io()));
353
354            // Interpret the block with `Top` as inputs.
355            stack.clear();
356            stack.resize(section.inputs as usize, AbsValue::Top);
357            if !self.interpret_block(block.insts(), &mut stack) {
358                continue;
359            }
360
361            // Check if the block's terminator is an unresolved jump.
362            // We interpret every block to initialize `snapshots`, so we check this after.
363            let block = &self.cfg.blocks[bid];
364            let term_inst = block.terminator();
365            let term = &self.insts[term_inst];
366            if !term.is_jump() || term.flags.contains(InstFlags::STATIC_JUMP) {
367                continue;
368            }
369
370            let Some(&operand) = self.snapshots.inputs[term_inst].last() else { continue };
371            debug_assert!(!matches!(operand, AbsValue::ConstSet(_)));
372            let target = self.resolve_jump_operand(operand, &empty_sets);
373            let Some(target_inst) = target.as_single() else { continue };
374
375            // Log non-adjacent resolutions (not simple PUSH+JUMP).
376            if trace_logs
377                && let is_adjacent = (term_inst > 0 && {
378                    let prev_inst = term_inst - 1;
379                    let prev = &self.insts[prev_inst];
380                    matches!(prev.opcode, op::PUSH0..=op::PUSH32)
381                        && !prev.is_dead_code()
382                        && block.insts.contains(&prev_inst)
383                })
384                && !is_adjacent
385            {
386                trace!(%term_inst, %target_inst, pc = self.pc(term_inst), "resolved non-adjacent jump");
387            }
388
389            resolved.push((term_inst, target));
390        }
391
392        let newly_resolved = self.commit_resolved_jumps(&resolved);
393        debug!(newly_resolved, "finished");
394        self.recompute_has_dynamic_jumps();
395    }
396
397    /// Runs abstract stack interpretation to resolve additional jump targets.
398    ///
399    /// Also computes and stores per-instruction stack snapshots for constant propagation.
400    #[instrument(name = "ba", level = "debug", skip_all)]
401    pub(crate) fn block_analysis(&mut self, local_snapshots: &Snapshots) {
402        self.init_snapshots();
403        let (resolved, count) = self.run_abstract_interp(local_snapshots);
404
405        if count > 0 {
406            let newly_resolved = self.commit_resolved_jumps(&resolved);
407            debug!(newly_resolved, "resolved jumps");
408        }
409
410        self.recompute_has_dynamic_jumps();
411    }
412
413    /// Recomputes the `has_dynamic_jumps` flag based on the current instruction set.
414    pub(crate) fn recompute_has_dynamic_jumps(&mut self) {
415        let mut unresolved = self.insts.iter().filter(|inst| {
416            inst.is_jump() && !inst.flags.contains(InstFlags::STATIC_JUMP) && !inst.is_dead_code()
417        });
418        self.has_dynamic_jumps = unresolved.next().is_some();
419        if self.has_dynamic_jumps {
420            debug!(n = 1 + unresolved.count(), "unresolved dynamic jumps remain");
421        }
422    }
423
424    /// Commits resolved jump targets by setting flags and data on the corresponding instructions.
425    ///
426    /// Returns the number of newly resolved jumps.
427    fn commit_resolved_jumps(&mut self, resolved: &[(Inst, JumpTarget)]) -> u32 {
428        let has_top_jump = resolved.iter().any(|(_, t)| matches!(t, JumpTarget::Top));
429
430        let mut newly_resolved = 0u32;
431        for &(jump_inst, ref target) in resolved {
432            // Skip if already resolved by block_analysis_local.
433            if self.insts[jump_inst].flags.contains(InstFlags::STATIC_JUMP) {
434                continue;
435            }
436
437            match *target {
438                JumpTarget::Resolved(ref targets) => {
439                    for &target_inst in targets {
440                        debug_assert_eq!(
441                            self.insts[target_inst].opcode,
442                            op::JUMPDEST,
443                            "block_analysis resolved to non-JUMPDEST"
444                        );
445                        self.insts[target_inst].set_jumpdest_reachable();
446                    }
447                    if targets.len() == 1 {
448                        self.insts[jump_inst].flags |= InstFlags::STATIC_JUMP;
449                        self.insts[jump_inst].set_static_jump_target(targets[0]);
450                        trace!(%jump_inst, target_inst = %targets[0], "resolved jump");
451                    } else {
452                        self.insts[jump_inst].flags |=
453                            InstFlags::STATIC_JUMP | InstFlags::MULTI_JUMP;
454                        self.multi_jump_targets.insert(jump_inst, targets.clone());
455                        trace!(%jump_inst, n_targets = targets.len(), "resolved multi-target jump");
456                    }
457                    newly_resolved += 1;
458                }
459                JumpTarget::Invalid => {
460                    self.insts[jump_inst].flags |= InstFlags::STATIC_JUMP | InstFlags::INVALID_JUMP;
461                    newly_resolved += 1;
462                    trace!(%jump_inst, "resolved invalid jump");
463                }
464                JumpTarget::Bottom if !has_top_jump => {
465                    // Truly unreachable: no unresolved jumps remain, so this
466                    // code cannot be reached at runtime. Mark as invalid.
467                    self.insts[jump_inst].flags |= InstFlags::STATIC_JUMP | InstFlags::INVALID_JUMP;
468                    newly_resolved += 1;
469                    trace!(%jump_inst, "unreachable jump");
470                }
471                JumpTarget::Bottom => {
472                    // Unreachable according to the analysis, but there are
473                    // unresolved (Top) jumps that might reach this code at
474                    // runtime. Leave as-is.
475                    trace!(%jump_inst, "unreachable jump (not marking, has_top_jump)");
476                }
477                JumpTarget::Top => {
478                    trace!(%jump_inst, "unresolved jump (Top)");
479                }
480            }
481        }
482        newly_resolved
483    }
484
485    /// Rebuild the basic-block CFG from the current instruction state.
486    #[instrument(level = "debug", skip_all)]
487    pub(crate) fn rebuild_cfg(&mut self) {
488        let finish_block = |cfg: &mut Cfg, start: usize, end: usize| {
489            debug_assert!(start < end, "empty block range: {start}..{end}");
490            let bid = cfg.blocks.push(BlockData {
491                insts: Inst::from_usize(start)..Inst::from_usize(end),
492                preds: SmallVec::new(),
493                succs: SmallVec::new(),
494            });
495            for i in start..end {
496                cfg.inst_to_block[Inst::from_usize(i)] = Some(bid);
497            }
498        };
499
500        let n = self.insts.len();
501        assert_ne!(n, 0, "insts should never be empty");
502
503        let cfg = &mut self.cfg;
504        cfg.blocks.clear();
505        cfg.inst_to_block.clear();
506
507        // Identify block leaders.
508        let mut is_leader: BitVec = BitVec::repeat(false, n);
509        is_leader.set(0, true);
510
511        for (i, inst) in self.insts.raw.iter().enumerate() {
512            // Reachable JUMPDESTs must be leaders even when dead (e.g. deduped).
513            // Without this, `pending_leader` in the block-building loop below never
514            // fires for a dead JUMPDEST, and the preceding live block absorbs the
515            // next alive instruction — potentially a diverging terminator that
516            // poisons DSE.
517            if inst.is_reachable_jumpdest(self.has_dynamic_jumps) {
518                is_leader.set(i, true);
519            }
520            if inst.is_dead_code() {
521                continue;
522            }
523            if (inst.is_branching() || inst.is_diverging()) && i + 1 < n {
524                is_leader.set(i + 1, true);
525            }
526        }
527
528        // Build blocks. Dead instructions are skipped; `current_end` tracks the
529        // exclusive end of live instructions so block ranges never include dead code.
530        cfg.inst_to_block.resize(n, None);
531        let mut current_start = None;
532        let mut current_end = 0;
533        let mut pending_leader = false;
534
535        for i in 0..n {
536            if self.insts.raw[i].is_dead_code() {
537                // Propagate leader marks past dead code so that the next alive
538                // instruction starts a new block. Without this, a block ending
539                // in JUMPI whose fall-through was deduped (dead) would merge
540                // with the next alive instruction, potentially changing the
541                // block's terminator and poisoning DSE liveness.
542                pending_leader |= is_leader[i];
543                continue;
544            }
545
546            if is_leader[i] || pending_leader || current_start.is_none() {
547                if let Some(start) = current_start {
548                    finish_block(cfg, start, current_end);
549                }
550                current_start = Some(i);
551            }
552            pending_leader = false;
553            current_end = i + 1;
554        }
555
556        // Close the last block.
557        if let Some(start) = current_start {
558            finish_block(cfg, start, current_end);
559        }
560
561        // Build edges based on known control flow.
562        for bid in cfg.blocks.indices() {
563            let term = &self.insts[cfg.blocks[bid].terminator()];
564
565            // Resolve a target instruction through redirects to a CFG block.
566            let resolve = |target: Inst| -> Option<Block> {
567                let target = self.redirects.get(&target).copied().unwrap_or(target);
568                cfg.inst_to_block.get(target).copied().flatten()
569            };
570
571            // Fallthrough edge.
572            if term.can_fall_through()
573                && let Some(target_block) = resolve(cfg.blocks[bid].terminator() + 1)
574            {
575                cfg.blocks[target_block].preds.push(bid);
576                cfg.blocks[bid].succs.push(target_block);
577            }
578
579            // Jump edges: static single-target, or multi-jump.
580            let term_inst = cfg.blocks[bid].terminator();
581            if term.flags.contains(InstFlags::MULTI_JUMP)
582                && let Some(targets) = self.multi_jump_targets.get(&term_inst)
583            {
584                for &t in targets {
585                    if let Some(target_block) = resolve(t)
586                        && !cfg.blocks[bid].succs.contains(&target_block)
587                    {
588                        cfg.blocks[target_block].preds.push(bid);
589                        cfg.blocks[bid].succs.push(target_block);
590                    }
591                }
592            } else if term.is_static_jump()
593                && !term.flags.contains(InstFlags::INVALID_JUMP)
594                && let Some(target_block) = resolve(term.static_jump_target())
595            {
596                cfg.blocks[target_block].preds.push(bid);
597                cfg.blocks[bid].succs.push(target_block);
598            }
599        }
600
601        assert_ne!(cfg.blocks.len(), 0, "should always build at least one block");
602    }
603
604    /// Run worklist-based abstract interpretation over the CFG.
605    ///
606    /// Returns a list of (jump_inst, resolved_target) pairs and the count of resolvable jumps.
607    /// Stack snapshots are recorded into `self.snapshots` during the fixpoint.
608    fn run_abstract_interp(
609        &mut self,
610        local_snapshots: &Snapshots,
611    ) -> (Vec<(Inst, JumpTarget)>, usize) {
612        let num_blocks = self.cfg.blocks.len();
613
614        // Initialize block states. Entry block starts with an empty stack.
615        let mut block_states: IndexVec<Block, BlockState> =
616            index_vec![BlockState::Bottom; num_blocks];
617        block_states[Block::from_usize(0)] = BlockState::Known(Vec::new());
618
619        // Collect unresolved jumps.
620        let mut jump_insts: Vec<Inst> = Vec::new();
621        for (i, inst) in self.insts.iter_enumerated() {
622            if inst.is_jump() && !inst.flags.contains(InstFlags::STATIC_JUMP) {
623                jump_insts.push(i);
624            }
625        }
626
627        let mut const_sets = ConstSetInterner::new();
628        let (discovered_edges, converged) = self.run_fixpoint(&mut block_states, &mut const_sets);
629
630        // On non-convergence, all fixpoint-derived snapshots are potentially stale.
631        // Restore the safe block-local snapshots computed by `block_analysis_local`.
632        if !converged {
633            self.snapshots.restore_from(self.insts.indices(), local_snapshots);
634        }
635
636        // After convergence, resolve each dynamic jump from its snapshot operand.
637        let mut jump_targets: Vec<(Inst, JumpTarget)> = Vec::new();
638        let mut has_top_jump = false;
639        for &jump_inst in &jump_insts {
640            let target = match self.snapshots.inputs[jump_inst].last() {
641                Some(&operand) => self.resolve_jump_operand(operand, &const_sets),
642                None => {
643                    // No snapshot means the block was never interpreted (unreachable).
644                    trace!(%jump_inst, pc = self.pc(jump_inst), "jump in unreached block");
645                    JumpTarget::Bottom
646                }
647            };
648            if matches!(target, JumpTarget::Top) {
649                has_top_jump = true;
650            }
651            jump_targets.push((jump_inst, target));
652        }
653
654        // Invalidate resolutions that may be unsound due to incomplete analysis.
655        // When the fixpoint didn't converge, partially-discovered ConstSets may be
656        // incomplete, so we must conservatively invalidate them too.
657        if has_top_jump || !converged {
658            self.invalidate_suspect_jumps(
659                &mut jump_targets,
660                &block_states,
661                &discovered_edges,
662                local_snapshots,
663            );
664        }
665
666        let count = jump_targets
667            .iter()
668            .filter(|(_, t)| matches!(t, JumpTarget::Resolved(_) | JumpTarget::Invalid))
669            .count();
670
671        (jump_targets, count)
672    }
673
674    /// Resolves a jump target from the snapshot operand recorded during the fixpoint.
675    fn resolve_jump_operand(&self, operand: AbsValue, const_sets: &ConstSetInterner) -> JumpTarget {
676        match operand {
677            AbsValue::Const(imm) => {
678                let val = imm.get(&self.u256_interner.borrow());
679                match usize::try_from(val) {
680                    Ok(target_pc) if self.is_valid_jump(target_pc) => {
681                        JumpTarget::single(self.pc_to_inst(target_pc))
682                    }
683                    _ => JumpTarget::Invalid,
684                }
685            }
686            AbsValue::ConstSet(set_idx) => {
687                let consts = const_sets.get(set_idx);
688                let interner = self.u256_interner.borrow();
689                let mut targets = SmallVec::new();
690                for &imm in consts {
691                    let val = imm.get(&interner);
692                    match usize::try_from(val) {
693                        Ok(pc) if self.is_valid_jump(pc) => {
694                            targets.push(self.pc_to_inst(pc));
695                        }
696                        _ => {
697                            // Mixed valid + invalid: can't resolve since at runtime
698                            // the value might be any member of the set.
699                            return JumpTarget::Top;
700                        }
701                    }
702                }
703                if !targets.is_empty() {
704                    JumpTarget::Resolved(targets)
705                } else {
706                    JumpTarget::Invalid
707                }
708            }
709            AbsValue::Top => JumpTarget::Top,
710        }
711    }
712
713    /// Adds discovered dynamic-jump target edges for a block.
714    fn discover_jump_edges(
715        &self,
716        operand: AbsValue,
717        bid: Block,
718        const_sets: &ConstSetInterner,
719        discovered: &mut IndexVec<Block, SmallVec<[Block; 4]>>,
720        disc_preds: &mut IndexVec<Block, SmallVec<[Block; 4]>>,
721    ) {
722        let consts = match operand {
723            AbsValue::Const(imm) => Either::Left(std::iter::once(imm)),
724            AbsValue::ConstSet(set_idx) => Either::Right(const_sets.get(set_idx).iter().copied()),
725            AbsValue::Top => return,
726        };
727        let interner = self.u256_interner.borrow();
728        for imm in consts {
729            let Ok(target_pc) = usize::try_from(imm.get(&interner)) else { continue };
730            if !self.is_valid_jump(target_pc) {
731                continue;
732            }
733            let ti = self.pc_to_inst(target_pc);
734            if let Some(tb) = self.cfg.inst_to_block[ti]
735                && !discovered[bid].contains(&tb)
736            {
737                discovered[bid].push(tb);
738                disc_preds[tb].push(bid);
739            }
740        }
741    }
742
743    /// Invalidates jump resolutions and operand snapshots that may be unsound due to
744    /// unresolved `Top` jumps.
745    ///
746    /// When unresolved dynamic jumps remain, any reachable `JUMPDEST` block could
747    /// potentially be reached by those jumps with an arbitrary stack state. This means
748    /// resolutions and snapshots derived from the fixpoint may be based on incomplete
749    /// information.
750    ///
751    /// The conservative rule: seed every reachable `JUMPDEST` block as suspect,
752    /// propagate forward through the CFG and discovered edges, and invalidate all
753    /// resolved jumps and operand snapshots in suspect blocks.
754    fn invalidate_suspect_jumps(
755        &mut self,
756        jump_targets: &mut [(Inst, JumpTarget)],
757        block_states: &IndexVec<Block, BlockState>,
758        discovered_edges: &IndexVec<Block, SmallVec<[Block; 4]>>,
759        local_snapshots: &Snapshots,
760    ) {
761        let num_blocks = self.cfg.blocks.len();
762
763        // Seed: every reachable JUMPDEST block is suspect when Top jumps exist.
764        let mut suspect: BitVec = BitVec::repeat(false, num_blocks);
765        for bid in self.cfg.blocks.indices() {
766            if !matches!(block_states[bid], BlockState::Bottom)
767                && self.insts[self.cfg.blocks[bid].insts.start].is_jumpdest()
768            {
769                suspect.set(bid.index(), true);
770            }
771        }
772
773        // Propagate suspect flag forward through the CFG.
774        let mut propagate = Worklist::new(num_blocks);
775        for bid in self.cfg.blocks.indices() {
776            if suspect[bid.index()] {
777                propagate.push(bid);
778            }
779        }
780        while let Some(bid) = propagate.pop() {
781            for &succ in self.cfg.blocks[bid].succs.iter().chain(discovered_edges[bid].iter()) {
782                let si = succ.index();
783                if !suspect[si] {
784                    suspect.set(si, true);
785                    propagate.push(succ);
786                }
787            }
788        }
789
790        for (inst, target) in jump_targets.iter_mut() {
791            if !matches!(target, JumpTarget::Resolved(_) | JumpTarget::Invalid) {
792                continue;
793            }
794            if let Some(bid) = self.cfg.inst_to_block[*inst]
795                && suspect[bid.index()]
796            {
797                *target = JumpTarget::Top;
798            }
799        }
800
801        // Restore block-local snapshots in suspect blocks: the fixpoint may have recorded
802        // precise values that are unsound because undiscovered edges (from Top jumps)
803        // could bring in different stack states. However, block-local constants (computed
804        // by block_analysis_local without depending on the incoming stack) are always valid.
805        for bid in self.cfg.blocks.indices() {
806            if !suspect[bid.index()] {
807                continue;
808            }
809            self.snapshots.restore_from(self.cfg.blocks[bid].insts(), local_snapshots);
810        }
811    }
812
813    /// Run a worklist-based fixpoint to compute abstract block states.
814    ///
815    /// Returns `(discovered_edges, converged)`.
816    fn run_fixpoint(
817        &mut self,
818        block_states: &mut IndexVec<Block, BlockState>,
819        const_sets: &mut ConstSetInterner,
820    ) -> (IndexVec<Block, SmallVec<[Block; 4]>>, bool) {
821        let num_blocks = self.cfg.blocks.len();
822        let mut worklist = Worklist::new(num_blocks);
823        worklist.push(Block::from_usize(0));
824
825        // Discovered dynamic-jump target edges per block.
826        let mut discovered: IndexVec<Block, SmallVec<[Block; 4]>> =
827            IndexVec::from_vec(vec![SmallVec::new(); num_blocks]);
828        // Reverse map: discovered predecessors per block.
829        let mut disc_preds: IndexVec<Block, SmallVec<[Block; 4]>> =
830            IndexVec::from_vec(vec![SmallVec::new(); num_blocks]);
831
832        let max_iterations = num_blocks * 8;
833        let mut iterations = 0;
834        let mut converged = true;
835
836        // Reusable buffer to avoid per-iteration allocations.
837        let mut stack_buf: Vec<AbsValue> = Vec::with_capacity(MAX_ABS_STACK_DEPTH);
838
839        while let Some(bid) = worklist.pop() {
840            iterations += 1;
841            if iterations > max_iterations {
842                converged = false;
843                break;
844            }
845
846            // Copy input state into reusable buffer.
847            stack_buf.clear();
848            match &block_states[bid] {
849                BlockState::Known(s) => stack_buf.extend_from_slice(s),
850                BlockState::Bottom => continue,
851            };
852
853            let block = &self.cfg.blocks[bid];
854            if !self.interpret_block(block.insts(), &mut stack_buf) {
855                continue;
856            }
857            let block = &self.cfg.blocks[bid];
858
859            // Discover dynamic-jump target edges from the snapshot recorded above.
860            let term_inst = block.terminator();
861            let term = &self.insts[term_inst];
862            if term.is_jump()
863                && !term.flags.contains(InstFlags::STATIC_JUMP)
864                && let Some(&operand) = self.snapshots.inputs[term_inst].last()
865            {
866                self.discover_jump_edges(
867                    operand,
868                    bid,
869                    const_sets,
870                    &mut discovered,
871                    &mut disc_preds,
872                );
873            }
874
875            // Propagate to static CFG successors and discovered dynamic-jump targets.
876            for &succ in block.succs.iter().chain(&discovered[bid]) {
877                if block_states[succ].join(&stack_buf, const_sets) {
878                    worklist.push(succ);
879                }
880            }
881        }
882
883        debug!(
884            "{msg} after {iterations} iterations (max={max_iterations})",
885            msg = if converged { "converged" } else { "did not converge" },
886        );
887
888        (discovered, converged)
889    }
890
891    /// Interpret a sequence of instructions on the abstract stack.
892    /// Returns `false` on stack underflow (conflict).
893    ///
894    /// The caller must pre-fill `stack` with the input state; on return it contains the output.
895    /// Records per-instruction operand snapshots into `self.snapshots`.
896    fn interpret_block(
897        &mut self,
898        insts: impl IntoIterator<Item = Inst>,
899        stack: &mut Vec<AbsValue>,
900    ) -> bool {
901        for i in insts {
902            let inst = &self.insts[i];
903            if inst.is_dead_code() {
904                continue;
905            }
906
907            let (inp, out) = inst.stack_io();
908            let inp = inp as usize;
909            let out = out as usize;
910
911            // Record pre-instruction input operand snapshot (in stack order, TOS last).
912            if inp > 0 {
913                let start = stack.len().saturating_sub(inp);
914                let snap = &mut self.snapshots.inputs[i];
915                snap.clear();
916                snap.extend_from_slice(&stack[start..]);
917            }
918
919            match inst.opcode {
920                op::PUSH0..=op::PUSH32 => {
921                    stack.push(AbsValue::Const(inst.imm()));
922                }
923                op::POP => {
924                    if stack.pop().is_none() {
925                        return false;
926                    }
927                }
928                op::DUP1..=op::DUP16 => {
929                    let depth = (inst.opcode - op::DUP1 + 1) as usize;
930                    if stack.len() < depth {
931                        return false;
932                    }
933                    stack.push(stack[stack.len() - depth]);
934                }
935                op::SWAP1..=op::SWAP16 => {
936                    let depth = (inst.opcode - op::SWAP1 + 1) as usize;
937                    let len = stack.len();
938                    if len < depth + 1 {
939                        return false;
940                    }
941                    stack.swap(len - 1, len - 1 - depth);
942                }
943                op::DUPN => {
944                    let depth = crate::decode_single(inst.imm_byte());
945                    match depth {
946                        Some(n) => {
947                            let n = n as usize;
948                            if stack.len() < n {
949                                // Depth exceeds the tracked abstract stack (truncated by
950                                // MAX_ABS_STACK_DEPTH). The slot is reachable at runtime
951                                // but unknown abstractly.
952                                stack.push(AbsValue::Top);
953                            } else {
954                                stack.push(stack[stack.len() - n]);
955                            }
956                        }
957                        None => return false,
958                    }
959                }
960                op::SWAPN => {
961                    let depth = crate::decode_single(inst.imm_byte());
962                    match depth {
963                        Some(n) => {
964                            let n = n as usize;
965                            let len = stack.len();
966                            if len < n + 1 {
967                                // Deep slot beyond tracked abstract stack; TOS becomes Top
968                                // and the deep slot (not tracked) is unchanged.
969                                if let Some(tos) = stack.last_mut() {
970                                    *tos = AbsValue::Top;
971                                }
972                            } else {
973                                stack.swap(len - 1, len - 1 - n);
974                            }
975                        }
976                        None => return false,
977                    }
978                }
979                op::EXCHANGE => {
980                    let pair = crate::decode_pair(inst.imm_byte());
981                    match pair {
982                        Some((n, m)) => {
983                            let (n, m) = (n as usize, m as usize);
984                            let len = stack.len();
985                            if len < m + 1 {
986                                // Deep slot beyond tracked abstract stack; the shallower
987                                // slot (if tracked) becomes Top.
988                                if len > n {
989                                    stack[len - 1 - n] = AbsValue::Top;
990                                }
991                            } else {
992                                stack.swap(len - 1 - n, len - 1 - m);
993                            }
994                        }
995                        None => return false,
996                    }
997                }
998                _ => {
999                    if stack.len() < inp {
1000                        return false;
1001                    }
1002
1003                    // Try constant folding for common arithmetic, respecting the gas budget.
1004                    let result = if out > 0 && self.compiler_gas_used < self.compiler_gas_limit {
1005                        let inputs_slice = &stack[stack.len() - inp..];
1006                        let mut interner = self.u256_interner.borrow_mut();
1007
1008                        // Check gas cost before doing the actual fold.
1009                        let gas =
1010                            super::const_fold::const_fold_gas(inst.opcode, inputs_slice, &interner);
1011                        if let Some(cost) = gas
1012                            && self.compiler_gas_used.saturating_add(cost)
1013                                <= self.compiler_gas_limit
1014                        {
1015                            let folded = super::const_fold::try_const_fold(
1016                                inst,
1017                                inputs_slice,
1018                                &mut interner,
1019                                self.code.len(),
1020                            );
1021                            if folded.is_some() {
1022                                self.compiler_gas_used += cost;
1023                            }
1024                            folded
1025                        } else {
1026                            None
1027                        }
1028                    } else {
1029                        None
1030                    };
1031
1032                    // Pop inputs.
1033                    stack.truncate(stack.len() - inp);
1034
1035                    // Push outputs.
1036                    if let Some(folded) = result {
1037                        debug_assert_eq!(out, 1);
1038                        stack.push(folded);
1039                    } else {
1040                        stack.resize(stack.len() + out, AbsValue::Top);
1041                    }
1042                }
1043            }
1044
1045            // Record post-instruction output snapshot.
1046            // Skip SWAP/SWAPN/EXCHANGE: they modify two positions and have no single "output".
1047            if out > 0 && !matches!(inst.opcode, op::SWAP1..=op::SWAP16 | op::SWAPN | op::EXCHANGE)
1048            {
1049                self.snapshots.outputs[i] = stack.last().copied();
1050            }
1051
1052            #[cfg(test)]
1053            if inst.opcode == crate::TEST_SUSPEND {
1054                stack.fill(AbsValue::Top);
1055            }
1056        }
1057
1058        true
1059    }
1060}
1061
1062#[cfg(test)]
1063pub(crate) mod tests {
1064    use super::*;
1065    pub(crate) use crate::bytecode::Inst;
1066    pub(crate) use revm_primitives::{U256, hardfork::SpecId, hex};
1067
1068    pub(crate) fn analyze_hex(hex: &str) -> Bytecode<'static> {
1069        let code = hex::decode(hex.trim()).unwrap();
1070        analyze_code(code)
1071    }
1072
1073    pub(crate) fn analyze_code(code: Vec<u8>) -> Bytecode<'static> {
1074        analyze_code_with(code, Default::default())
1075    }
1076
1077    pub(crate) fn analyze_code_spec(code: Vec<u8>, spec_id: SpecId) -> Bytecode<'static> {
1078        crate::tests::init_tracing();
1079
1080        eprintln!("{}", hex::encode(&code));
1081        let mut bytecode = Bytecode::new(code, spec_id, None);
1082        bytecode.analyze().unwrap();
1083        eprintln!("{bytecode}");
1084        bytecode
1085    }
1086
1087    pub(crate) fn analyze_code_with(
1088        code: Vec<u8>,
1089        config: crate::bytecode::AnalysisConfig,
1090    ) -> Bytecode<'static> {
1091        crate::tests::init_tracing();
1092
1093        eprintln!("{}", hex::encode(&code));
1094        let mut bytecode = Bytecode::test(code);
1095        bytecode.config = config;
1096        bytecode.analyze().unwrap();
1097        eprintln!("{bytecode}");
1098        bytecode
1099    }
1100
1101    pub(crate) fn analyze_asm(src: &str) -> Bytecode<'static> {
1102        analyze_code(crate::parse_asm(src).unwrap())
1103    }
1104
1105    pub(crate) fn analyze_asm_spec(src: &str, spec_id: SpecId) -> Bytecode<'static> {
1106        analyze_code_spec(crate::parse_asm(src).unwrap(), spec_id)
1107    }
1108
1109    pub(crate) fn analyze_asm_with(
1110        src: &str,
1111        config: crate::bytecode::AnalysisConfig,
1112    ) -> Bytecode<'static> {
1113        analyze_code_with(crate::parse_asm(src).unwrap(), config)
1114    }
1115
1116    #[test]
1117    fn revert_sub_call_storage_oog() {
1118        let bytecode = analyze_hex(
1119            "60606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063b28175c4146046578063c0406226146052575b6000565b3460005760506076565b005b34600057605c6081565b604051808215151515815260200191505060405180910390f35b600c6000819055505b565b600060896076565b600d600181905550600e600281905550600190505b905600a165627a7a723058202a8a75d7d795b5bcb9042fb18b283daa90b999a11ddec892f5487322",
1120        );
1121        assert!(bytecode.has_dynamic_jumps());
1122    }
1123
1124    #[test]
1125    fn revert_remote_sub_call_storage_oog() {
1126        let bytecode = analyze_hex(
1127            "608060405234801561001057600080fd5b506004361061002b5760003560e01c806373027f6d14610030575b600080fd5b61004a600480360381019061004591906101a9565b61004c565b005b6000808273ffffffffffffffffffffffffffffffffffffffff166040516024016040516020818303038152906040527fb28175c4000000000000000000000000000000000000000000000000000000007bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19166020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffff83818316178352505050506040516100f69190610247565b6000604051808303816000865af19150503d8060008114610133576040519150601f19603f3d011682016040523d82523d6000602084013e610138565b606091505b509150915081600155505050565b600080fd5b600073ffffffffffffffffffffffffffffffffffffffff82169050919050565b60006101768261014b565b9050919050565b6101868161016b565b811461019157600080fd5b50565b6000813590506101a38161017d565b92915050565b6000602082840312156101bf576101be610146565b5b60006101cd84828501610194565b91505092915050565b600081519050919050565b600081905092915050565b60005b8381101561020a5780820151818401526020810190506101ef565b60008484015250505050565b6000610221826101d6565b61022b81856101e1565b935061023b8185602086016101ec565b80840191505092915050565b60006102538284610216565b91508190509291505056fea2646970667358221220b4673c55c7b0268d7d118059e6509196d2185bb7fe040a7d3900f902c8542ea464736f6c63430008180033",
1128        );
1129        assert!(bytecode.has_dynamic_jumps());
1130    }
1131
1132    #[test]
1133    fn trans_storage_ok() {
1134        let contracts = [
1135            "366012575b600b5f6020565b5f5260205ff35b601c6160a75f6024565b6004565b5c90565b5d56",
1136            "60106001600a5f6012565b015f6016565b005b5c90565b5d56",
1137            "3033146033575b303303600e57005b601b5f35806001555f608d565b5f80808080305af1600255602e60016089565b600355005b603a5f6089565b8015608757604a600182035f608d565b5f80808080305af1156083576001606191035f608d565b5f80808080305af115608357607f60016078816089565b016001608d565b6006565b5f80fd5b005b5c90565b5d56",
1138            "60065f601d565b5f5560106001601d565b6001555f80808080335af1005b5c9056",
1139        ];
1140        let has_dynamic = [false, false, true, false];
1141        for (hex, &expected) in contracts.iter().zip(&has_dynamic) {
1142            let bytecode = analyze_hex(hex);
1143            assert_eq!(bytecode.has_dynamic_jumps(), expected, "contract: {hex}");
1144        }
1145    }
1146
1147    #[test]
1148    fn const_operand_basic() {
1149        // inst 0: PUSH1 0x42 -> stack: [0x42]
1150        // inst 1: PUSH1 0x01 -> stack: [0x42, 0x01]
1151        // inst 2: ADD        -> stack: [0x43]  (const-folded)
1152        // inst 3: PUSH1 0x00 -> stack: [0x43, 0x00]
1153        // inst 4: MSTORE     -> pops 2
1154        // inst 5: JUMPDEST
1155        // inst 6: STOP
1156        let bytecode = analyze_asm(
1157            "
1158            PUSH1 0x42
1159            PUSH1 0x01
1160            ADD
1161            PUSH1 0x00
1162            MSTORE
1163            JUMPDEST
1164            STOP
1165        ",
1166        );
1167        // At inst 2 (ADD), operand 0 (TOS) = 0x01, operand 1 = 0x42.
1168        assert_eq!(bytecode.const_operand(Inst::from_usize(2), 0), Some(U256::from(0x01)));
1169        assert_eq!(bytecode.const_operand(Inst::from_usize(2), 1), Some(U256::from(0x42)));
1170        // At inst 4 (MSTORE), operand 0 (TOS) = 0x00, operand 1 = 0x43 (folded ADD result).
1171        assert_eq!(bytecode.const_operand(Inst::from_usize(4), 0), Some(U256::from(0x00)));
1172        assert_eq!(bytecode.const_operand(Inst::from_usize(4), 1), Some(U256::from(0x43)));
1173    }
1174
1175    #[test]
1176    fn const_operand_dynamic() {
1177        // CALLDATALOAD pushes an unknown value -> const_operand should return None.
1178        let bytecode = analyze_asm(
1179            "
1180            PUSH0
1181            CALLDATALOAD
1182            PUSH0
1183            MSTORE
1184            STOP
1185        ",
1186        );
1187        // At inst 3 (MSTORE), operand 0 (TOS) = 0x00, operand 1 = unknown (CALLDATALOAD result).
1188        assert_eq!(bytecode.const_operand(Inst::from_usize(3), 0), Some(U256::ZERO));
1189        assert_eq!(bytecode.const_operand(Inst::from_usize(3), 1), None);
1190    }
1191
1192    #[test]
1193    fn const_output_basic() {
1194        // PUSH1 0x42 -> output[0] = 0x42
1195        // PUSH1 0x01 -> output[0] = 0x01
1196        // ADD        -> output[0] = 0x43 (const-folded)
1197        // PUSH0      -> output[0] = 0x00
1198        // MSTORE     -> no outputs
1199        // STOP
1200        let bytecode = analyze_asm(
1201            "
1202            PUSH1 0x42
1203            PUSH1 0x01
1204            ADD
1205            PUSH0
1206            MSTORE
1207            STOP
1208        ",
1209        );
1210        assert_eq!(bytecode.const_output(Inst::from_usize(0)), Some(U256::from(0x42)));
1211        assert_eq!(bytecode.const_output(Inst::from_usize(1)), Some(U256::from(0x01)));
1212        assert_eq!(bytecode.const_output(Inst::from_usize(2)), Some(U256::from(0x43)));
1213        assert_eq!(bytecode.const_output(Inst::from_usize(3)), Some(U256::ZERO));
1214        // MSTORE has no outputs.
1215        assert_eq!(bytecode.const_output(Inst::from_usize(4)), None);
1216    }
1217
1218    #[test]
1219    fn const_output_dynamic() {
1220        // CALLDATALOAD pushes an unknown -> output should be None.
1221        let bytecode = analyze_asm(
1222            "
1223            PUSH0
1224            CALLDATALOAD
1225            STOP
1226        ",
1227        );
1228        assert_eq!(bytecode.const_output(Inst::from_usize(0)), Some(U256::ZERO));
1229        assert_eq!(bytecode.const_output(Inst::from_usize(1)), None);
1230    }
1231
1232    /// DUPN, SWAPN, and EXCHANGE should propagate constants like their fixed counterparts.
1233    #[test]
1234    fn const_snapshot_eof_stack_ops() {
1235        // DUPN/SWAPN min index is 17, so we need 17 values on the stack.
1236        // 16 × PUSH0, PUSH1 0xAA, DUPN 17 (raw=0x80), STOP.
1237        let mut code: Vec<u8> = vec![op::PUSH0; 16];
1238        code.extend([op::PUSH1, 0xAA]); // inst 16: TOS = 0xAA
1239        code.extend([op::DUPN, 0x80]); // inst 17: DUPN 17 (dup bottom = 0x00)
1240        code.push(op::STOP); // inst 18
1241        let bytecode = analyze_code_spec(code, SpecId::AMSTERDAM);
1242        // DUPN duplicates the 17th item (bottom PUSH0 = 0x00).
1243        assert_eq!(bytecode.const_output(Inst::from_usize(17)), Some(U256::ZERO));
1244        // The PUSH1 0xAA is still there.
1245        assert_eq!(bytecode.const_output(Inst::from_usize(16)), Some(U256::from(0xAA)));
1246    }
1247
1248    #[test]
1249    fn snapshots_all_blocks() {
1250        // Blocks with unresolvable dynamic jumps must still get snapshots from
1251        // block_analysis_local. Here CALLDATALOAD produces an opaque jump target,
1252        // so the JUMP stays dynamic, but the block's other instructions should
1253        // still have snapshots populated.
1254        let bytecode = analyze_asm(
1255            "
1256            PUSH1 0x42          ; inst 0
1257            PUSH1 0x01          ; inst 1
1258            ADD                 ; inst 2: 0x42 + 0x01 = 0x43
1259            PUSH0               ; inst 3
1260            CALLDATALOAD        ; inst 4: opaque value
1261            JUMP                ; inst 5: dynamic, unresolvable
1262        target1:
1263            JUMPDEST            ; inst 6
1264            PUSH1 0x01          ; inst 7
1265            ADD                 ; inst 8
1266            STOP                ; inst 9
1267        target2:
1268            JUMPDEST            ; inst 10
1269            PUSH1 0x02          ; inst 11
1270            SUB                 ; inst 12
1271            STOP                ; inst 13
1272        ",
1273        );
1274        // The JUMP at inst 5 should remain dynamic.
1275        assert!(bytecode.has_dynamic_jumps, "expected unresolved dynamic jump");
1276        let jump = bytecode.inst(Inst::from_usize(5));
1277        assert!(jump.is_jump());
1278        assert!(!jump.flags.contains(InstFlags::STATIC_JUMP));
1279        // JUMP's TOS operand is Top (CALLDATALOAD result).
1280        assert_eq!(bytecode.const_operand(Inst::from_usize(5), 0), None);
1281
1282        // ADD 1
1283        assert_eq!(bytecode.const_operand(Inst::from_usize(2), 0), Some(U256::from(0x01)));
1284        assert_eq!(bytecode.const_operand(Inst::from_usize(2), 1), Some(U256::from(0x42)));
1285        assert_eq!(bytecode.const_output(Inst::from_usize(2)), Some(U256::from(0x43)));
1286
1287        // ADD 2
1288        assert_eq!(bytecode.const_operand(Inst::from_usize(8), 0), Some(U256::from(0x01)));
1289        assert_eq!(bytecode.const_operand(Inst::from_usize(8), 1), None);
1290        assert_eq!(bytecode.const_output(Inst::from_usize(8)), None);
1291
1292        // SUB
1293        assert_eq!(bytecode.const_operand(Inst::from_usize(12), 0), Some(U256::from(0x02)));
1294        assert_eq!(bytecode.const_operand(Inst::from_usize(12), 1), None);
1295        assert_eq!(bytecode.const_output(Inst::from_usize(12)), None);
1296    }
1297
1298    #[test]
1299    fn multi_target_jump() {
1300        // Internal function called from two sites with different return addresses.
1301        // The return JUMP at the end should resolve to Multi([ret1, ret2]).
1302        let bytecode = analyze_asm(
1303            "
1304            ; Call site 1.
1305            PUSH %ret1          ; push return address
1306            PUSH %func          ; push function entry
1307            JUMP                ; call function (static: PUSH+JUMP)
1308        ret1:
1309            JUMPDEST
1310            POP                 ; consume function result
1311            ; Call site 2.
1312            PUSH %ret2          ; push return address
1313            PUSH %func          ; push function entry
1314            JUMP                ; call function (static: PUSH+JUMP)
1315        ret2:
1316            JUMPDEST
1317            POP                 ; consume function result
1318            STOP
1319            ; Internal function.
1320        func:
1321            JUMPDEST
1322            PUSH1 0x42          ; push a result
1323            SWAP1               ; swap result and return address
1324            JUMP                ; return (dynamic)
1325        ",
1326        );
1327
1328        // The return JUMP (last inst before STOP's block) should be multi-target.
1329        let return_jump = bytecode
1330            .iter_insts()
1331            .find(|(_, d)| d.is_jump() && d.flags.contains(InstFlags::MULTI_JUMP));
1332        assert!(return_jump.is_some(), "expected a multi-target jump");
1333        let (rj_inst, _) = return_jump.unwrap();
1334        let targets = bytecode.multi_jump_targets(rj_inst).unwrap();
1335        assert_eq!(targets.len(), 2, "expected 2 targets, got {}", targets.len());
1336        // Verify both targets are JUMPDESTs.
1337        for &t in targets {
1338            assert_eq!(bytecode.inst(t).opcode, op::JUMPDEST);
1339        }
1340        // No dynamic jumps should remain.
1341        assert!(!bytecode.has_dynamic_jumps, "expected no dynamic jumps");
1342    }
1343
1344    /// Demonstrates a known unsoundness in `invalidate_suspect_jumps`.
1345    ///
1346    /// The invalidation pass only seeds suspicion from `Bottom + JUMPDEST` predecessors,
1347    /// but an unresolved dynamic jump can also reach a `Known` JUMPDEST block at runtime
1348    /// with a different stack than the fixpoint computed. This test constructs a case
1349    /// where the analysis incorrectly resolves a return JUMP to `Multi` even though an
1350    /// opaque (MLOAD-based) dynamic jump could reach the same function entry with an
1351    /// arbitrary return address.
1352    ///
1353    /// Layout:
1354    ///   Call site 1: PUSH ret1, PUSH fn, JUMP → fn returns to ret1
1355    ///   Call site 2: PUSH ret2, PUSH fn, JUMP → fn returns to ret2
1356    ///   Opaque jump: PUSH0, MLOAD, JUMP      → Top target, could be fn at runtime
1357    ///   fn:          JUMPDEST, PUSH 0x42, SWAP1, JUMP → return (should be Top)
1358    ///
1359    /// The fn JUMPDEST block is `Known([ConstSet({ret1, ret2})])` from the two static
1360    /// callers, so the return JUMP resolves to `Multi([ret1, ret2])`. But the opaque
1361    /// jump could call fn with any return address, making that resolution unsound.
1362    /// `invalidate_suspect_jumps` now seeds every reachable JUMPDEST block as suspect,
1363    /// so the return jump is correctly invalidated.
1364    #[test]
1365    fn known_jumpdest_unsound_resolution() {
1366        let bytecode = analyze_asm(
1367            "
1368            ; Call site 1.
1369            PUSH %ret1              ; pc=0
1370            PUSH %fn_entry          ; pc=2
1371            JUMP                    ; pc=4: -> fn_entry
1372        ret1:
1373            JUMPDEST                ; pc=5
1374            POP                     ; pc=6: drop result
1375            ; Call site 2.
1376            PUSH %ret2              ; pc=7
1377            PUSH %fn_entry          ; pc=9
1378            JUMP                    ; pc=11: -> fn_entry
1379        ret2:
1380            JUMPDEST                ; pc=12
1381            POP                     ; pc=13: drop result
1382            ; Opaque jump: loads target from memory (Top).
1383            PUSH0                   ; pc=14
1384            MLOAD                   ; pc=15: -> Top
1385            JUMP                    ; pc=16: dynamic, target = Top
1386            INVALID                 ; pc=17
1387            INVALID                 ; pc=18
1388            INVALID                 ; pc=19
1389            ; Internal function: pushes result, swaps with return addr, jumps back.
1390        fn_entry:
1391            JUMPDEST                ; pc=20
1392            PUSH1 0x42              ; pc=21: result
1393            SWAP1                   ; pc=23: swap result and return addr
1394            JUMP                    ; pc=24: return (UNSOUND: resolved to Multi)
1395        ",
1396        );
1397
1398        // The return JUMP at pc=24 should NOT be resolved — the opaque jump at
1399        // pc=16 can reach fn_entry with any return address. But the current
1400        // invalidation misses this because fn_entry's block is Known, not Bottom.
1401        let return_jump = bytecode.iter_insts().find(|(i, d)| bytecode.pc(*i) == 24 && d.is_jump());
1402        let (_, rj) = return_jump.unwrap();
1403        assert!(
1404            !rj.flags.contains(InstFlags::STATIC_JUMP),
1405            "unsound: return jump should not be resolved"
1406        );
1407    }
1408
1409    #[test]
1410    fn nested_call_return() {
1411        // Two-level call chain: outer calls wrapper, wrapper calls inner function.
1412        // Direct caller also calls the inner function directly.
1413        //
1414        //   call site 1 (direct):   PUSH ret1, PUSH inner, JUMP
1415        //   call site 2 (wrapper):  PUSH ret2, PUSH wrapper, JUMP
1416        //     wrapper:              PUSH ret_w, PUSH inner, JUMP
1417        //     ret_w:                SWAP1 POP JUMP  (returns to ret2)
1418        //
1419        // The inner function is called with different stack depths from the two
1420        // sites. Its return JUMP resolves to Multi([ret1, ret_w]). But the
1421        // wrapper's final JUMP (at ret_w) must recover the outer return address
1422        // from deep in the stack — which the analysis currently cannot resolve
1423        // because the top-aligned join pads it to Top.
1424        let bytecode = analyze_asm(
1425            "
1426            ; Call site 1: direct call to inner.
1427            PUSH %ret1              ; pc=0
1428            PUSH %inner             ; pc=2
1429            JUMP                    ; pc=4: -> inner
1430        ret1:
1431            JUMPDEST                ; pc=5
1432            POP                     ; pc=6: drop result
1433            ; Call site 2: call through wrapper.
1434            PUSH %ret2              ; pc=7
1435            PUSH1 0x42              ; pc=9: an argument
1436            PUSH %wrapper           ; pc=11
1437            JUMP                    ; pc=13: -> wrapper
1438        ret2:
1439            JUMPDEST                ; pc=14
1440            POP                     ; pc=15: drop result
1441            POP                     ; pc=16: drop arg
1442            STOP                    ; pc=17
1443            INVALID                 ; pc=18
1444            ; Wrapper function: calls inner, then returns to caller.
1445            ; Entry stack: [outer_ret, arg]
1446        wrapper:
1447            JUMPDEST                ; pc=19
1448            PUSH %ret_w             ; pc=20: push return addr for inner
1449            PUSH %inner             ; pc=22: push inner entry
1450            JUMP                    ; pc=24: -> inner
1451            INVALID                 ; pc=25
1452            INVALID                 ; pc=26
1453        ret_w:
1454            JUMPDEST                ; pc=27: inner returned here
1455            ; Stack: [outer_ret, arg, inner_result]
1456            POP                     ; pc=28: drop inner_result
1457            POP                     ; pc=29: drop arg
1458            JUMP                    ; pc=30: return to caller via outer_ret (dynamic)
1459            INVALID                 ; pc=31
1460            ; Inner function: pushes a result and returns.
1461            ; Entry stack: [..., ret_addr]
1462        inner:
1463            JUMPDEST                ; pc=32
1464            PUSH1 0x42              ; pc=33: push result
1465            SWAP1                   ; pc=35
1466            JUMP                    ; pc=36: jump to ret_addr
1467        ",
1468        );
1469
1470        // The wrapper return JUMP (pc=30) remains dynamic because the outer
1471        // return address is lost to Top during the top-aligned join.
1472        // Because an unresolved Top jump exists, the conservative invalidation
1473        // also invalidates the inner return JUMP — any reachable JUMPDEST
1474        // (including inner's entry) is suspect.
1475        assert!(bytecode.has_dynamic_jumps, "expected dynamic jumps to remain");
1476    }
1477
1478    /// Regression test: deep DUPN on Amsterdam must not cause the abstract interpreter to
1479    /// abort a block and leave a reachable JUMP as Bottom → INVALID_JUMP.
1480    #[test]
1481    fn amsterdam_deep_dupn_jump_not_invalid() {
1482        // Build a stack with 70 items, where the deepest is the final JUMP target.
1483        // A dispatcher JUMP is resolvable; a later JUMP in a block that uses DUPN 70
1484        // must also be correctly resolved (not marked INVALID_JUMP).
1485        let code = crate::parse_asm(
1486            "
1487            PUSH %target
1488            ; 69 × PUSH0
1489            PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0
1490            PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0
1491            PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0
1492            PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0
1493            PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0
1494            PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0
1495            PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0 PUSH0
1496            PUSH %resolved
1497            PUSH %dispatcher
1498            JUMP
1499        dispatcher:
1500            JUMPDEST
1501            JUMP
1502        resolved:
1503            JUMPDEST
1504            DUPN 70
1505            POP
1506        problem:
1507            JUMPDEST
1508            DUPN 70
1509            JUMP
1510        target:
1511            JUMPDEST
1512            STOP
1513        ",
1514        )
1515        .unwrap();
1516        let bytecode = analyze_code_spec(code, SpecId::AMSTERDAM);
1517
1518        // The "problem" JUMP must NOT be marked INVALID_JUMP.
1519        let jumps: Vec<_> = bytecode.iter_insts().filter(|(_, d)| d.opcode == op::JUMP).collect();
1520        for &(inst, data) in &jumps {
1521            assert!(
1522                !data.flags.contains(InstFlags::INVALID_JUMP),
1523                "JUMP inst={inst} pc={} incorrectly marked INVALID_JUMP",
1524                bytecode.pc(inst),
1525            );
1526        }
1527    }
1528
1529    #[test]
1530    fn hash_10k() {
1531        let code = revm_primitives::hex::decode(
1532            include_str!("../../../../../data/hash_10k.rt.hex").trim(),
1533        )
1534        .unwrap();
1535        let mut bytecode = Bytecode::test(code);
1536        bytecode.analyze().unwrap();
1537        assert!(!bytecode.has_dynamic_jumps, "expected all jumps to be resolved");
1538    }
1539}
1540
1541#[cfg(test)]
1542mod tests_edge_cases {
1543    use super::{tests::*, *};
1544
1545    /// Three callers to the same internal function. The return JUMP should
1546    /// resolve to Multi with three targets.
1547    #[test]
1548    fn multi_target_jump_three_callers() {
1549        let bytecode = analyze_asm(
1550            "
1551            ; Call site 1.
1552            PUSH %ret1
1553            PUSH %func
1554            JUMP
1555        ret1:
1556            JUMPDEST
1557            POP
1558            ; Call site 2.
1559            PUSH %ret2
1560            PUSH %func
1561            JUMP
1562        ret2:
1563            JUMPDEST
1564            POP
1565            ; Call site 3.
1566            PUSH %ret3
1567            PUSH %func
1568            JUMP
1569        ret3:
1570            JUMPDEST
1571            POP
1572            STOP
1573            ; Internal function.
1574        func:
1575            JUMPDEST
1576            PUSH1 0x42
1577            SWAP1
1578            JUMP
1579        ",
1580        );
1581
1582        let return_jump = bytecode
1583            .iter_insts()
1584            .find(|(_, d)| d.is_jump() && d.flags.contains(InstFlags::MULTI_JUMP));
1585        assert!(return_jump.is_some(), "expected a multi-target jump");
1586        let (rj_inst, _) = return_jump.unwrap();
1587        let targets = bytecode.multi_jump_targets(rj_inst).unwrap();
1588        assert_eq!(targets.len(), 3, "expected 3 targets, got {}", targets.len());
1589        assert!(!bytecode.has_dynamic_jumps, "expected no dynamic jumps");
1590    }
1591
1592    /// DUP and SWAP should preserve constant values through the stack.
1593    #[test]
1594    fn dup_swap_const_propagation() {
1595        // PUSH1 0xAA -> [0xAA]
1596        // PUSH1 0xBB -> [0xAA, 0xBB]
1597        // DUP2       -> [0xAA, 0xBB, 0xAA]
1598        // SWAP1      -> [0xAA, 0xAA, 0xBB]
1599        // PUSH0      -> [0xAA, 0xAA, 0xBB, 0x00]
1600        // MSTORE     -> pops (0x00, 0xBB), leaves [0xAA, 0xAA]
1601        // STOP
1602        let bytecode = analyze_asm(
1603            "
1604            PUSH1 0xAA          ; inst 0
1605            PUSH1 0xBB          ; inst 1
1606            DUP2                ; inst 2
1607            SWAP1               ; inst 3
1608            PUSH0               ; inst 4
1609            MSTORE              ; inst 5
1610            STOP                ; inst 6
1611        ",
1612        );
1613        // DUP2 output should be 0xAA.
1614        assert_eq!(bytecode.const_output(Inst::from_usize(2)), Some(U256::from(0xAA)));
1615        // At MSTORE (inst 5): TOS = 0x00, second = 0xBB (swapped back).
1616        assert_eq!(bytecode.const_operand(Inst::from_usize(5), 0), Some(U256::ZERO));
1617        assert_eq!(bytecode.const_operand(Inst::from_usize(5), 1), Some(U256::from(0xBB)));
1618    }
1619
1620    /// JUMPI with a constant condition should still record both operands.
1621    #[test]
1622    fn jumpi_const_condition_snapshot() {
1623        let bytecode = analyze_asm(
1624            "
1625            PUSH1 0x01              ; inst 0: condition = 1 (always taken)
1626            PUSH %target            ; inst 1: target
1627            JUMPI                   ; inst 2: static JUMPI
1628            STOP                    ; inst 3: fallthrough
1629        target:
1630            JUMPDEST                ; inst 4: target
1631            STOP                    ; inst 5
1632        ",
1633        );
1634        // The JUMPI should be resolved as static.
1635        assert!(!bytecode.has_dynamic_jumps);
1636    }
1637
1638    /// A loop where the stack grows each iteration until clamped.
1639    /// Verifies the analysis converges without losing precision at the top.
1640    #[test]
1641    fn loop_stack_growth_clamping() {
1642        // bb0: PUSH1 0x42, then fall through to bb1.
1643        // bb1: JUMPDEST, PUSH1 0x01, PUSH1 bb1_pc, JUMP  (loop back, growing stack).
1644        //
1645        // The stack grows by 1 each iteration (net +1 from PUSH before the loop jump).
1646        // The abstract stack should clamp to MAX_ABS_STACK_DEPTH without conflict.
1647        let bytecode = analyze_asm(
1648            "
1649            PUSH1 0x42              ; pc=0: initial value
1650            PUSH1 0x01              ; pc=2: another
1651        loop_header:
1652            JUMPDEST                ; loop header (bb1)
1653            PUSH1 0x01              ; push each iteration
1654            PUSH %loop_header       ; loop target
1655            JUMP                    ; back-edge
1656        ",
1657        );
1658        // Should converge without panicking.
1659        // The JUMP should be static (resolved by adjacent PUSH+JUMP).
1660        assert!(!bytecode.has_dynamic_jumps);
1661    }
1662
1663    /// A single-instruction block (just JUMPDEST as the terminator) used as a
1664    /// jump target. This tests the edge case where block.len() == 1.
1665    #[test]
1666    fn single_inst_block_jump_target() {
1667        let bytecode = analyze_asm(
1668            "
1669            PUSH %target            ; pc=0
1670            JUMP                    ; static jump
1671            INVALID
1672            INVALID
1673        target:
1674            JUMPDEST                ; single-inst block
1675            STOP
1676        ",
1677        );
1678        assert!(!bytecode.has_dynamic_jumps);
1679    }
1680
1681    /// A JUMP with an invalid target (not a JUMPDEST) should be marked invalid.
1682    #[test]
1683    fn invalid_jump_target() {
1684        // The dynamic jump target points to a non-JUMPDEST instruction.
1685        // The function pushes a constant that isn't a valid JUMPDEST pc.
1686        let bytecode = analyze_asm(
1687            "
1688            PUSH1 0xFF              ; pc=0: push invalid target
1689            PUSH %func              ; pc=2: push function entry
1690            JUMP                    ; pc=4: -> func
1691            ; Function: swap and jump to the caller-provided target.
1692        func:
1693            JUMPDEST
1694            JUMP                    ; jump to 0xFF (invalid)
1695        ",
1696        );
1697        // The dynamic JUMP at pc=6 should be resolved as invalid.
1698        let jump_inst = bytecode
1699            .iter_insts()
1700            .find(|(_, d)| d.is_jump() && d.flags.contains(InstFlags::INVALID_JUMP));
1701        assert!(jump_inst.is_some(), "expected an invalid jump");
1702    }
1703
1704    /// Constant propagation through a diamond CFG (if-then-else merge).
1705    /// Both branches push the same constant, so the merge should preserve it.
1706    #[test]
1707    fn diamond_cfg_same_const() {
1708        let bytecode = analyze_asm(
1709            "
1710            PUSH1 0x42              ; push same const on both paths
1711            PUSH0                   ; condition (always false)
1712            PUSH %then_pc           ; then target
1713            JUMPI                   ; branch
1714            ; Else: push same const.
1715            PUSH %merge
1716            JUMP                    ; -> merge
1717            ; Then:
1718        then_pc:
1719            JUMPDEST
1720            PUSH %merge
1721            JUMP                    ; -> merge
1722            ; Merge:
1723        merge:
1724            JUMPDEST
1725            PUSH0
1726            MSTORE                  ; MSTORE(0, 0x42)
1727            STOP
1728        ",
1729        );
1730        // At the merge MSTORE, the value (operand 1) should still be 0x42
1731        // since both branches had the same constant on the stack.
1732        let mstore = bytecode.iter_insts().find(|(_, d)| d.opcode == op::MSTORE).unwrap().0;
1733        assert_eq!(bytecode.const_operand(mstore, 1), Some(U256::from(0x42)));
1734    }
1735
1736    /// Diamond CFG where branches push different constants — the merge should
1737    /// yield None (Top) for const_operand.
1738    #[test]
1739    fn diamond_cfg_different_const() {
1740        let bytecode = analyze_asm(
1741            "
1742            PUSH0                   ; condition
1743            PUSH %then_pc
1744            JUMPI                   ; branch
1745            ; Else: push 0xAA.
1746            PUSH1 0xAA
1747            PUSH %merge
1748            JUMP                    ; -> merge
1749            INVALID
1750            ; Then: push 0xBB.
1751        then_pc:
1752            JUMPDEST
1753            PUSH1 0xBB
1754            PUSH %merge
1755            JUMP                    ; -> merge
1756            ; Merge:
1757        merge:
1758            JUMPDEST
1759            PUSH0
1760            MSTORE                  ; MSTORE(0, ???)
1761            STOP
1762        ",
1763        );
1764        // At the merge MSTORE, the value (operand 1) should be None
1765        // since the two branches push different constants.
1766        let mstore = bytecode.iter_insts().find(|(_, d)| d.opcode == op::MSTORE).unwrap().0;
1767        assert_eq!(bytecode.const_operand(mstore, 1), None);
1768    }
1769
1770    /// A private-return trampoline (`JUMPDEST; JUMP`) reached by an opaque jump
1771    /// can forward arbitrary stacks into a real function entry. The trampoline's
1772    /// Top jump must not be filtered out by the `is_private_return` check.
1773    #[test]
1774    fn trampoline_private_return_unsound() {
1775        let bytecode = analyze_asm(
1776            "
1777            ; Entry: branch to opaque path or call site 1.
1778            PUSH0                       ; pc=0
1779            CALLDATALOAD                ; pc=1
1780            PUSH %opaque_path           ; pc=2
1781            JUMPI                       ; pc=4
1782            ; Fallthrough: call site 1.
1783            PUSH %ret1                  ; pc=5
1784            PUSH %fn_entry              ; pc=7
1785            JUMP                        ; pc=9
1786        ret1:
1787            JUMPDEST                    ; pc=10
1788            POP                         ; pc=11
1789            ; Call site 2.
1790            PUSH %ret2                  ; pc=12
1791            PUSH %fn_entry              ; pc=14
1792            JUMP                        ; pc=16
1793        ret2:
1794            JUMPDEST                    ; pc=17
1795            POP                         ; pc=18
1796            STOP                        ; pc=19
1797            ; Opaque path (reachable from JUMPI).
1798        opaque_path:
1799            JUMPDEST                    ; pc=20
1800            PUSH0                       ; pc=21
1801            MLOAD                       ; pc=22: any_ret (Top)
1802            PUSH %tramp                 ; pc=23
1803            JUMP                        ; pc=25
1804            ; Trampoline: bare JUMPDEST + JUMP (is_private_return = true).
1805        tramp:
1806            JUMPDEST                    ; pc=26
1807            JUMP                        ; pc=27: target = any_ret (Top)
1808            ; Internal function.
1809        fn_entry:
1810            JUMPDEST                    ; pc=28
1811            PUSH1 0x42                  ; pc=29
1812            SWAP1                       ; pc=31
1813            JUMP                        ; pc=32: return
1814        ",
1815        );
1816
1817        // The fn return JUMP at pc=32 must NOT be resolved — the trampoline
1818        // can reach fn_entry with any return address.
1819        let return_jump = bytecode.iter_insts().find(|(i, d)| bytecode.pc(*i) == 32 && d.is_jump());
1820        let (_, rj) = return_jump.unwrap();
1821        assert!(
1822            !rj.flags.contains(InstFlags::STATIC_JUMP),
1823            "unsound: return jump should not be resolved when trampoline exists"
1824        );
1825    }
1826
1827    /// A JUMPI-based return where the target comes from the entry stack should
1828    /// be invalidated when a Top jump exists, just like JUMP-based returns.
1829    #[test]
1830    fn jumpi_return_unsound() {
1831        let bytecode = analyze_asm(
1832            "
1833            ; Call site 1.
1834            PUSH %ret1              ; pc=0
1835            PUSH %fn_entry          ; pc=2
1836            JUMP                    ; pc=4
1837        ret1:
1838            JUMPDEST                ; pc=5
1839            POP                     ; pc=6
1840            ; Call site 2.
1841            PUSH %ret2              ; pc=7
1842            PUSH %fn_entry          ; pc=9
1843            JUMP                    ; pc=11
1844        ret2:
1845            JUMPDEST                ; pc=12
1846            POP                     ; pc=13
1847            ; Opaque jump.
1848            PUSH0                   ; pc=14
1849            MLOAD                   ; pc=15
1850            JUMP                    ; pc=16: Top
1851            INVALID                 ; pc=17
1852            INVALID                 ; pc=18
1853            INVALID                 ; pc=19
1854            ; Internal function using JUMPI to return.
1855        fn_entry:
1856            JUMPDEST                ; pc=20
1857            PUSH1 0x42              ; pc=21: result
1858            SWAP1                   ; pc=23: [result, ret_addr]
1859            PUSH1 0x01              ; pc=24: condition (always true)
1860            SWAP1                   ; pc=26: [result, 1, ret_addr]
1861            JUMPI                   ; pc=27: conditional return (always taken)
1862            STOP                    ; pc=28: fallthrough (dead)
1863        ",
1864        );
1865
1866        // The JUMPI at pc=27 should NOT be resolved — opaque jump can reach
1867        // fn_entry with any return address.
1868        let return_jumpi =
1869            bytecode.iter_insts().find(|(i, d)| bytecode.pc(*i) == 27 && d.is_jump());
1870        let (_, rj) = return_jumpi.unwrap();
1871        assert!(
1872            !rj.flags.contains(InstFlags::STATIC_JUMP),
1873            "unsound: JUMPI return should not be resolved"
1874        );
1875    }
1876
1877    /// A third caller reaches the function entry with a known callee (static
1878    /// PUSH+JUMP) but an opaque return address (from MLOAD). The function's
1879    /// return jump must not be resolved because the return address is Top.
1880    #[test]
1881    fn opaque_return_addr_caller_unsound() {
1882        let bytecode = analyze_asm(
1883            "
1884            ; Entry: branch to opaque caller or fallthrough.
1885            PUSH0                       ; pc=0
1886            CALLDATALOAD                ; pc=1
1887            PUSH %opaque_caller         ; pc=2
1888            JUMPI                       ; pc=4
1889            ; Fallthrough: call site 1.
1890            PUSH %ret1                  ; pc=5
1891            PUSH %fn_entry              ; pc=7
1892            JUMP                        ; pc=9
1893        ret1:
1894            JUMPDEST                    ; pc=10
1895            POP                         ; pc=11
1896            ; Call site 2.
1897            PUSH %ret2                  ; pc=12
1898            PUSH %fn_entry              ; pc=14
1899            JUMP                        ; pc=16
1900        ret2:
1901            JUMPDEST                    ; pc=17
1902            POP                         ; pc=18
1903            STOP                        ; pc=19
1904            ; Opaque third caller: loads return addr from memory (reachable via JUMPI).
1905        opaque_caller:
1906            JUMPDEST                    ; pc=20
1907            PUSH0                       ; pc=21
1908            MLOAD                       ; pc=22: any_ret
1909            PUSH %fn_entry              ; pc=23
1910            JUMP                        ; pc=25: -> fn_entry with arbitrary return addr
1911            ; Internal function.
1912        fn_entry:
1913            JUMPDEST                    ; pc=26
1914            PUSH1 0x42                  ; pc=27
1915            SWAP1                       ; pc=29
1916            JUMP                        ; pc=30: return
1917        ",
1918        );
1919
1920        // The return JUMP at pc=30 must NOT be resolved — the opaque caller
1921        // at pc=25 reaches fn_entry with an arbitrary return address.
1922        let return_jump = bytecode.iter_insts().find(|(i, d)| bytecode.pc(*i) == 30 && d.is_jump());
1923        let (_, rj) = return_jump.unwrap();
1924        assert!(
1925            !rj.flags.contains(InstFlags::STATIC_JUMP),
1926            "unsound: return jump should not be resolved with opaque caller"
1927        );
1928    }
1929
1930    /// Loop counter (inline increment) must be Top after fixpoint.
1931    #[test]
1932    fn const_snapshot_loop_counter_inline() {
1933        let bytecode = analyze_asm(
1934            "
1935            PUSH1 0x00          ; inst 0: i = 0
1936        loop:
1937            JUMPDEST            ; inst 1
1938            DUP1                ; inst 2
1939            PUSH1 0x0a          ; inst 3
1940            LT                  ; inst 4
1941            ISZERO              ; inst 5
1942            PUSH %exit          ; inst 6
1943            JUMPI               ; inst 7
1944            PUSH1 0x01          ; inst 8
1945            ADD                 ; inst 9: i++
1946            PUSH %loop          ; inst 10
1947            JUMP                ; inst 11
1948        exit:
1949            JUMPDEST            ; inst 12
1950            POP                 ; inst 13
1951            STOP                ; inst 14
1952        ",
1953        );
1954
1955        // DUP1 at inst 2: counter is loop-variant.
1956        assert_eq!(bytecode.const_operand(Inst::from_usize(2), 0), None);
1957        // ADD at inst 9: TOS=1 (const), second=counter (Top).
1958        assert_eq!(bytecode.const_operand(Inst::from_usize(9), 0), Some(U256::from(1)));
1959        assert_eq!(bytecode.const_operand(Inst::from_usize(9), 1), None);
1960    }
1961
1962    /// Loop counter incremented via single-caller subroutine must be Top.
1963    #[test]
1964    fn const_snapshot_loop_counter_subroutine() {
1965        let bytecode = analyze_asm(
1966            "
1967            PUSH1 0x00
1968        loop:
1969            JUMPDEST            ; inst 1
1970            DUP1                ; inst 2
1971            PUSH1 0x0a
1972            LT
1973            ISZERO
1974            PUSH %exit
1975            JUMPI
1976            ; call add_fn(counter, 1) -> counter'
1977            PUSH1 0x01
1978            DUP2
1979            PUSH %ret
1980            SWAP2
1981            SWAP1
1982            PUSH %add_fn
1983            JUMP
1984        ret:
1985            JUMPDEST
1986            SWAP1
1987            POP
1988            PUSH %loop
1989            JUMP
1990        exit:
1991            JUMPDEST
1992            POP
1993            STOP
1994        add_fn:
1995            JUMPDEST
1996            PUSH1 0x00
1997            DUP3
1998            DUP3
1999            ADD
2000            SWAP1
2001            POP
2002            DUP1
2003            DUP3
2004            GT
2005            ISZERO
2006            PUSH %add_ok
2007            JUMPI
2008            INVALID
2009        add_ok:
2010            JUMPDEST
2011            SWAP3
2012            SWAP2
2013            POP
2014            POP
2015            JUMP
2016        ",
2017        );
2018
2019        assert_eq!(bytecode.const_operand(Inst::from_usize(2), 0), None);
2020    }
2021
2022    /// Loop counter incremented via multi-caller subroutine must be Top.
2023    #[test]
2024    fn const_snapshot_loop_counter_multi_caller() {
2025        let bytecode = analyze_asm(
2026            "
2027            ; Caller 1 (outside loop) to make add_fn multi-target.
2028            PUSH1 0x02
2029            PUSH1 0x03
2030            PUSH %ret_outer
2031            SWAP2
2032            SWAP1
2033            PUSH %add_fn
2034            JUMP
2035        ret_outer:
2036            JUMPDEST            ; inst 7
2037            POP
2038            PUSH1 0x00          ; i = 0
2039        loop:
2040            JUMPDEST            ; inst 10
2041            DUP1                ; inst 11
2042            PUSH1 0x0a
2043            LT
2044            ISZERO
2045            PUSH %exit
2046            JUMPI
2047            ; Caller 2 (inside loop).
2048            PUSH1 0x01
2049            DUP2
2050            PUSH %ret_loop
2051            SWAP2
2052            SWAP1
2053            PUSH %add_fn
2054            JUMP
2055        ret_loop:
2056            JUMPDEST
2057            SWAP1
2058            POP
2059            PUSH %loop
2060            JUMP
2061        exit:
2062            JUMPDEST
2063            POP
2064            STOP
2065        add_fn:
2066            JUMPDEST
2067            PUSH1 0x00
2068            DUP3
2069            DUP3
2070            ADD
2071            SWAP1
2072            POP
2073            DUP1
2074            DUP3
2075            GT
2076            ISZERO
2077            PUSH %add_ok
2078            JUMPI
2079            INVALID
2080        add_ok:
2081            JUMPDEST
2082            SWAP3
2083            SWAP2
2084            POP
2085            POP
2086            JUMP
2087        ",
2088        );
2089
2090        // DUP1 at inst 11: counter is loop-variant.
2091        assert_eq!(bytecode.const_operand(Inst::from_usize(11), 0), None);
2092    }
2093
2094    /// loopExp statetest: loop counters through checked_add subroutine must be Top.
2095    #[test]
2096    fn const_snapshot_loop_exp() {
2097        // tests/GeneralStateTests/VMTests/vmPerformance/loopExp.json
2098        let bytecode = analyze_hex(
2099            "608060405234801561001057600080fd5b506004361061004c5760003560e01c806363ad973214610051578063a34f51e414610081578063dfaf4a87146100b1578063f078245b146100e1575b600080fd5b61006b60048036038101906100669190610275565b610111565b60405161007891906102d7565b60405180910390f35b61009b60048036038101906100969190610275565b610199565b6040516100a891906102d7565b60405180910390f35b6100cb60048036038101906100c69190610275565b6101cb565b6040516100d891906102d7565b60405180910390f35b6100fb60048036038101906100f69190610275565b610208565b60405161010891906102d7565b60405180910390f35b60008083905060005b838110156101865785820a915085820a915085820a915085820a915085820a915085820a915085820a915085820a915085820a915085820a915085820a915085820a915085820a915085820a915085820a915085820a915060108161017f9190610321565b905061011a565b5080600081905550809150509392505050565b6000805b828110156101b9576001816101b29190610321565b905061019d565b508260008190555082905093925050505650565b60008083905060005b838110156101f55785820a91506001816101ee9190610321565b90506101d4565b5080600081905550809150509392505050565b6000805b82811015610228576010816102219190610321565b905061020c565b50826000819055508290509392505050565b600080fd5b6000819050919050565b6102528161023f565b811461025d57600080fd5b50565b60008135905061026f81610249565b92915050565b60008060006060848603121561028e5761028d61023a565b5b600061029c86828701610260565b93505060206102ad86828701610260565b92505060406102be86828701610260565b9150509250925092565b6102d18161023f565b82525050565b60006020820190506102ec60008301846102c8565b92915050565b7f4e487b71000000000000000000000000000000000000000000000000000000006000526011600452602460006000fd5b600061032c8261023f565b91506103378361023f565b9250828201905080821115610340f5761034e6102f2565b5b9291505056fea264697066735822122000b253a2b246ddfeb0f09ddf5ec7dc70e2235369b8a2f3f9b55e32a5ca01e04ff64736f6c63430008180033",
2100        );
2101
2102        // simpleLoop LT at pc=416: counter (depth=0) is loop-variant.
2103        let lt_inst = bytecode
2104            .insts
2105            .iter_enumerated()
2106            .find(|(i, inst)| inst.opcode == op::LT && bytecode.pc(*i) == 416)
2107            .map(|(i, _)| i)
2108            .expect("LT at pc=416 not found");
2109        assert_eq!(bytecode.const_operand(lt_inst, 0), None);
2110    }
2111
2112    /// Suspect-block invalidation must preserve block-local constants while
2113    /// restoring cross-block values to Top.
2114    ///
2115    /// The target block receives 0xAA from a predecessor (cross-block) and also
2116    /// has block-local PUSHes. Without restoration, the fixpoint's precise 0xAA
2117    /// would persist unsoundly; with restoration, only block-local constants survive.
2118    #[test]
2119    fn suspect_block_preserves_local_const_operand() {
2120        let bytecode = analyze_asm(
2121            "
2122            PUSH1 0xAA          ; inst 0: inherited into target
2123            CALLDATASIZE        ; inst 1: unknown condition
2124            PUSH %target        ; inst 2
2125            JUMPI               ; inst 3: target reachable, fallthrough reachable
2126
2127            PUSH0               ; inst 4
2128            CALLDATALOAD        ; inst 5: Top
2129            JUMP                ; inst 6: unresolved -> triggers suspect invalidation
2130
2131        target:
2132            JUMPDEST            ; inst 7: suspect seed
2133            PUSH0               ; inst 8: block-local constant
2134            MSTORE              ; inst 9: offset=0 (local), value=inherited 0xAA
2135            STOP                ; inst 10
2136        ",
2137        );
2138        assert!(bytecode.has_dynamic_jumps);
2139        // Block-local constant survives.
2140        assert_eq!(bytecode.const_output(Inst::from_usize(8)), Some(U256::ZERO));
2141        assert_eq!(bytecode.const_operand(Inst::from_usize(9), 0), Some(U256::ZERO));
2142        // Cross-block value must be restored to Top.
2143        assert_eq!(bytecode.const_operand(Inst::from_usize(9), 1), None);
2144    }
2145
2146    /// Same as above but asserting const_output: block-local PUSH output survives,
2147    /// but anything depending on cross-block values becomes None.
2148    #[test]
2149    fn suspect_block_preserves_local_const_output() {
2150        let bytecode = analyze_asm(
2151            "
2152            PUSH1 0xAA          ; inst 0: inherited into target
2153            CALLDATASIZE        ; inst 1
2154            PUSH %target        ; inst 2
2155            JUMPI               ; inst 3
2156
2157            PUSH0               ; inst 4
2158            CALLDATALOAD        ; inst 5: Top
2159            JUMP                ; inst 6: unresolvable
2160
2161        target:
2162            JUMPDEST            ; inst 7: suspect seed
2163            PUSH1 0x01          ; inst 8: block-local const output
2164            ADD                 ; inst 9: 0xAA + 0x01 in fixpoint, Top + 0x01 after restore
2165            STOP                ; inst 10
2166        ",
2167        );
2168        assert!(bytecode.has_dynamic_jumps);
2169        // Purely local push output preserved.
2170        assert_eq!(bytecode.const_output(Inst::from_usize(8)), Some(U256::from(0x01)));
2171        // ADD: local operand preserved, inherited operand restored to Top.
2172        assert_eq!(bytecode.const_operand(Inst::from_usize(9), 0), Some(U256::from(0x01)));
2173        assert_eq!(bytecode.const_operand(Inst::from_usize(9), 1), None);
2174        assert_eq!(bytecode.const_output(Inst::from_usize(9)), None);
2175    }
2176
2177    /// Fallthrough block after a JUMPI in a suspect JUMPDEST block becomes
2178    /// suspect via forward propagation. Block-local constants in the fallthrough
2179    /// survive, but cross-block inherited values are restored to Top.
2180    #[test]
2181    fn suspect_jumpi_fallthrough_preserves_consts() {
2182        let bytecode = analyze_asm(
2183            "
2184            PUSH1 0xAA          ; inst 0: inherited into target
2185            CALLDATASIZE        ; inst 1: unknown condition
2186            PUSH %target        ; inst 2
2187            JUMPI               ; inst 3
2188
2189            PUSH0               ; inst 4
2190            CALLDATALOAD        ; inst 5: Top
2191            JUMP                ; inst 6: unresolved
2192
2193        target:
2194            JUMPDEST            ; inst 7: suspect seed, entry stack [0xAA]
2195            PUSH1 0x01          ; inst 8: always take
2196            PUSH %after         ; inst 9
2197            JUMPI               ; inst 10: leaves [0xAA] for after
2198            STOP                ; inst 11
2199
2200        after:
2201            JUMPDEST            ; inst 12: suspect via forward propagation
2202            PUSH0               ; inst 13: block-local constant
2203            MSTORE              ; inst 14: offset=0 (local), value=inherited 0xAA
2204            STOP                ; inst 15
2205        ",
2206        );
2207        assert!(bytecode.has_dynamic_jumps);
2208        // Block-local constant in propagated-suspect fallthrough survives.
2209        assert_eq!(bytecode.const_output(Inst::from_usize(13)), Some(U256::ZERO));
2210        assert_eq!(bytecode.const_operand(Inst::from_usize(14), 0), Some(U256::ZERO));
2211        // Cross-block inherited value restored to Top.
2212        assert_eq!(bytecode.const_operand(Inst::from_usize(14), 1), None);
2213    }
2214
2215    /// Non-suspect blocks must retain their fixpoint-derived snapshots even when
2216    /// invalidation runs elsewhere. The fallthrough after a JUMPI is NOT a
2217    /// JUMPDEST, so it is never suspect. Block-local constants, const-folded
2218    /// results, and cross-block values resolved via the single-predecessor path
2219    /// (MLOAD after MSTORE) must all be preserved.
2220    #[test]
2221    fn nonsuspect_fallthrough_keeps_fixpoint_snapshots() {
2222        let bytecode = analyze_asm(
2223            "
2224            PUSH1 0x69          ; inst 0: cross-block value
2225            CALLDATASIZE        ; inst 1: unknown condition
2226            PUSH %target        ; inst 2
2227            JUMPI               ; inst 3
2228            ; Non-suspect fallthrough block (no JUMPDEST).
2229            PUSH1 0x0A          ; inst 4
2230            PUSH1 0x0B          ; inst 5
2231            ADD                 ; inst 6: 0x0A + 0x0B = 0x15, block-local
2232            PUSH0               ; inst 7
2233            MSTORE              ; inst 8
2234            MLOAD               ; inst 9: cross-block via single predecessor
2235            PUSH1 0x01          ; inst 10
2236            MSTORE              ; inst 11
2237            STOP                ; inst 12
2238            ; Opaque dynamic jump — triggers suspect invalidation.
2239            JUMPDEST            ; inst 13
2240            PUSH0               ; inst 14
2241            CALLDATALOAD        ; inst 15: Top
2242            JUMP                ; inst 16: unresolvable
2243        target:
2244            JUMPDEST            ; inst 17: suspect seed
2245            PUSH1 0x42          ; inst 18
2246            STOP                ; inst 19
2247        ",
2248        );
2249        assert!(bytecode.has_dynamic_jumps);
2250        // Fallthrough block: block-local constants survive.
2251        // ADD
2252        assert_eq!(bytecode.const_operand(Inst::from_usize(6), 0), Some(U256::from(0x0B)));
2253        assert_eq!(bytecode.const_operand(Inst::from_usize(6), 1), Some(U256::from(0x0A)));
2254        assert_eq!(bytecode.const_output(Inst::from_usize(6)), Some(U256::from(0x15)));
2255        // MSTORE 1
2256        assert_eq!(bytecode.const_operand(Inst::from_usize(8), 0), Some(U256::from(0)));
2257        assert_eq!(bytecode.const_operand(Inst::from_usize(8), 1), Some(U256::from(0x15)));
2258        // MLOAD: single predecessor (no JUMPDEST), so cross-block value resolves.
2259        assert_eq!(bytecode.const_operand(Inst::from_usize(9), 0), Some(U256::from(0x69)));
2260        // Target block: block-local PUSH survives.
2261        assert_eq!(bytecode.const_output(Inst::from_usize(18)), Some(U256::from(0x42)));
2262    }
2263
2264    /// JUMPI in a suspect JUMPDEST block: the fallthrough block inherits
2265    /// suspect status via forward propagation. Incoming stack values from the
2266    /// predecessor become Top (block-local pass doesn't see cross-block flow),
2267    /// while block-local PUSHes in the fallthrough are preserved.
2268    #[test]
2269    fn suspect_jumpi_fallthrough_mixed_operands() {
2270        let bytecode = analyze_asm(
2271            "
2272            ; Jump into the suspect JUMPDEST block with a value on the stack.
2273            PUSH1 0xAA          ; inst 0: will be on stack at entry to target
2274            PUSH %target        ; inst 1
2275            JUMP                ; inst 2: static jump
2276            ; Opaque dynamic jump — triggers suspect invalidation.
2277            JUMPDEST            ; inst 3
2278            PUSH0               ; inst 4
2279            CALLDATALOAD        ; inst 5: Top
2280            JUMP                ; inst 6: unresolvable
2281        target:
2282            ; Suspect JUMPDEST block with JUMPI.
2283            ; Entry stack: [0xAA] (from pred, but unknown to block-local pass).
2284            JUMPDEST            ; inst 7: suspect seed
2285            PUSH1 0x01          ; inst 8: condition
2286            PUSH %after         ; inst 9
2287            JUMPI               ; inst 10: pops condition+target, [0xAA] remains
2288            STOP                ; inst 11
2289        after:
2290            ; Fallthrough/target of JUMPI — suspect via propagation.
2291            ; Entry stack: [0xAA] (inherited, but Top in block-local pass).
2292            JUMPDEST            ; inst 12
2293            PUSH1 0xBB          ; inst 13: block-local
2294            ADD                 ; inst 14: 0xAA + 0xBB — depth 1 is incoming
2295            STOP                ; inst 15
2296        ",
2297        );
2298        assert!(bytecode.has_dynamic_jumps);
2299        // ADD at inst 14: TOS (depth 0) = 0xBB (block-local), depth 1 = Top
2300        // (incoming from suspect predecessor, block-local pass saw it as Top).
2301        assert_eq!(bytecode.const_operand(Inst::from_usize(14), 0), Some(U256::from(0xBB)));
2302        assert_eq!(bytecode.const_operand(Inst::from_usize(14), 1), None);
2303        assert_eq!(bytecode.const_output(Inst::from_usize(14)), None);
2304    }
2305
2306    /// Bytecode that is just STOP — minimal edge case.
2307    #[test]
2308    fn empty_bytecode() {
2309        let bytecode = analyze_asm("STOP");
2310        assert!(!bytecode.has_dynamic_jumps);
2311    }
2312
2313    /// A dynamic jump in dead code should not cause `has_dynamic_jumps` to be true.
2314    #[test]
2315    fn dead_code_dynamic_jump() {
2316        let bytecode = analyze_asm(
2317            "
2318            STOP
2319            PUSH0
2320            CALLDATALOAD
2321            JUMP
2322        ",
2323        );
2324        assert!(!bytecode.has_dynamic_jumps);
2325    }
2326
2327    /// Many callers to a shared internal function through relay blocks.
2328    /// When the fixpoint iteration cap is exceeded, partially-discovered
2329    /// multi-target jumps must NOT be committed as closed MULTI_JUMPs.
2330    #[test]
2331    fn non_converged_multi_jump_stays_dynamic() {
2332        // Generate bytecode with k call sites, b relay blocks, and one shared function.
2333        // Each call site pushes a different return address and jumps through the relay
2334        // chain into the shared function, which does PUSH1 0x42; SWAP1; JUMP.
2335        // With enough callers and relays, the fixpoint cap is exceeded before all
2336        // return addresses are discovered.
2337        let k = 15;
2338        let b = 31;
2339        let mut lines = Vec::new();
2340        lines.push("PUSH %call0".to_string());
2341        lines.push("JUMP".to_string());
2342        for i in 0..k {
2343            lines.push(format!("call{i}:"));
2344            lines.push("JUMPDEST".to_string());
2345            lines.push(format!("PUSH %ret{i}"));
2346            lines.push("PUSH %relay0".to_string());
2347            lines.push("JUMP".to_string());
2348
2349            lines.push(format!("ret{i}:"));
2350            lines.push("JUMPDEST".to_string());
2351            lines.push("POP".to_string());
2352            if i + 1 < k {
2353                lines.push(format!("PUSH %call{}", i + 1));
2354                lines.push("JUMP".to_string());
2355            } else {
2356                lines.push("STOP".to_string());
2357            }
2358        }
2359        for i in 0..b {
2360            lines.push(format!("relay{i}:"));
2361            lines.push("JUMPDEST".to_string());
2362            if i + 1 < b {
2363                lines.push(format!("PUSH %relay{}", i + 1));
2364            } else {
2365                lines.push("PUSH %fn_entry".to_string());
2366            }
2367            lines.push("JUMP".to_string());
2368        }
2369        lines.push("fn_entry:".to_string());
2370        lines.push("JUMPDEST".to_string());
2371        lines.push("PUSH1 0x42".to_string());
2372        lines.push("SWAP1".to_string());
2373        lines.push("JUMP".to_string());
2374
2375        let bytecode = analyze_asm(&lines.join("\n"));
2376
2377        // The shared function's return JUMP must NOT be committed as a closed
2378        // MULTI_JUMP when the fixpoint didn't converge — doing so would produce
2379        // an incomplete switch table missing some valid return addresses.
2380        let return_jump = bytecode
2381            .iter_insts()
2382            .find(|(_, d)| d.is_jump() && d.flags.contains(InstFlags::MULTI_JUMP));
2383        assert!(return_jump.is_none(), "non-converged fixpoint must not commit partial MULTI_JUMP");
2384        // It should remain dynamic instead.
2385        assert!(
2386            bytecode.has_dynamic_jumps,
2387            "return jump should remain dynamic when fixpoint doesn't converge"
2388        );
2389    }
2390}