Skip to main content

revmc_context/
lib.rs

1#![doc = include_str!("../README.md")]
2#![cfg_attr(not(test), warn(unused_extern_crates))]
3#![cfg_attr(docsrs, feature(doc_cfg))]
4#![cfg_attr(not(feature = "std"), no_std)]
5
6extern crate alloc;
7
8use alloc::vec::Vec;
9use core::{
10    fmt,
11    mem::MaybeUninit,
12    ptr::{self, NonNull},
13};
14use revm_interpreter::{
15    Gas, Host, InputsImpl, InstructionResult, Interpreter, InterpreterAction, InterpreterResult,
16    SharedMemory,
17    context_interface::cfg::GasParams,
18    interpreter_types::{Jumps, LegacyBytecode, ReturnData, RuntimeFlag},
19};
20use revm_primitives::{Address, B256, Bytes, Log, U256, hardfork::SpecId, ruint};
21
22mod arch;
23use arch::revmc_entry;
24pub use arch::revmc_exit;
25
26#[cfg(feature = "evm")]
27mod jit_evm;
28#[cfg(feature = "evm")]
29pub use jit_evm::JitEvm;
30
31/// Resume point for compiled EVM code after a CALL/CREATE suspension.
32///
33/// Encoded as the interpreter's bytecode PC. `0` means no resume (initial state),
34/// values `1..=N` identify individual suspend points.
35///
36/// Since the number of suspend points cannot exceed the bytecode length, the stored
37/// PC always stays within the allocation.
38#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
39#[repr(transparent)]
40#[doc(hidden)] // Not public API.
41pub struct ResumeAt(usize);
42
43impl ResumeAt {
44    /// Loads the resume point from the interpreter's current PC.
45    #[inline]
46    pub fn load(interpreter: &Interpreter) -> Self {
47        Self(interpreter.bytecode.pc())
48    }
49
50    /// Stores the resume point back into the interpreter's bytecode PC.
51    #[inline]
52    pub fn store(self, interpreter: &mut Interpreter) {
53        interpreter.bytecode.absolute_jump(self.0);
54    }
55
56    /// Returns the raw resume index.
57    #[inline]
58    pub const fn get(self) -> usize {
59        self.0
60    }
61}
62
63impl From<usize> for ResumeAt {
64    #[inline]
65    fn from(value: usize) -> Self {
66        Self(value)
67    }
68}
69
70impl PartialEq<usize> for ResumeAt {
71    #[inline]
72    fn eq(&self, other: &usize) -> bool {
73        self.0 == *other
74    }
75}
76
77/// The EVM bytecode compiler runtime context.
78///
79/// This is a simple wrapper around the interpreter's resources, allowing the compiled function to
80/// access the memory, input, gas, host, and other resources.
81///
82/// # Safety
83/// This struct uses `#[repr(C)]` to ensure a stable field layout since the JIT compiler
84/// generates code that accesses fields by offset using `offset_of!`.
85#[repr(C)]
86pub struct EvmContext<'a> {
87    /// The memory.
88    pub memory: &'a mut SharedMemory,
89    /// Input information (target address, caller, input data, call value).
90    pub input: &'a mut InputsImpl,
91    /// The gas.
92    pub gas: Gas,
93    /// The host.
94    pub host: &'a mut dyn Host,
95    /// The return action.
96    pub next_action: &'a mut Option<InterpreterAction>,
97    /// The return data.
98    pub return_data: &'a [u8],
99    /// Whether the context is static.
100    pub is_static: bool,
101    /// The spec ID for the current execution.
102    pub spec_id: SpecId,
103    /// Index that tracks where execution should resume after a CALL/CREATE suspension.
104    #[doc(hidden)] // Not public API.
105    pub resume_at: ResumeAt,
106    /// The contract bytecode, for CODECOPY at runtime.
107    pub bytecode: *const [u8],
108    /// Optional callback invoked by the LOG builtin after constructing the log,
109    /// **before** it is passed to [`Host::log`].
110    ///
111    /// Set to `None` when no inspector is active.
112    #[doc(hidden)]
113    pub on_log: Option<&'a mut (dyn FnMut(&Log) + 'a)>,
114    /// The size of the call input data, cached for CALLDATASIZE.
115    pub calldatasize: usize,
116    /// The result set by a builtin before exiting via [`revmc_exit`].
117    pub exit_result: InstructionResult,
118    /// Saved RSP from the entry trampoline, used by [`revmc_exit`] to unwind.
119    pub exit_sp: *mut u8,
120    /// Cached gas parameters from the host.
121    pub gas_params: GasParams,
122    /// Cached base pointer for the current memory context.
123    /// Points to `memory[checkpoint..]`, i.e. the start of the current context's memory.
124    /// Refreshed after any memory resize.
125    pub mem_base: *mut u8,
126    /// Cached length of the current memory context in bytes.
127    /// Refreshed after any memory resize.
128    pub mem_len: usize,
129}
130
131// Static assertions to ensure the struct layout matches expectations.
132// These offsets are used by the JIT compiler to access fields.
133const _: () = {
134    use core::mem::offset_of;
135
136    // Key fields accessed by JIT code
137    assert!(offset_of!(EvmContext<'_>, memory) == 0);
138    assert!(offset_of!(EvmContext<'_>, gas) == 16);
139    assert!(offset_of!(EvmContext<'_>, spec_id) == 113);
140    assert!(offset_of!(EvmContext<'_>, resume_at) == 120);
141    assert!(offset_of!(EvmContext<'_>, calldatasize) == 160);
142};
143
144impl fmt::Debug for EvmContext<'_> {
145    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
146        f.debug_struct("EvmContext").field("memory", &self.memory).finish_non_exhaustive()
147    }
148}
149
150impl<'a> EvmContext<'a> {
151    /// Creates a new context from an interpreter.
152    #[inline]
153    pub fn from_interpreter(interpreter: &'a mut Interpreter, host: &'a mut dyn Host) -> Self {
154        Self::from_interpreter_with_stack(interpreter, host).0
155    }
156
157    /// Creates a new context from an interpreter.
158    #[inline]
159    pub fn from_interpreter_with_stack<'b: 'a>(
160        interpreter: &'a mut Interpreter,
161        host: &'b mut dyn Host,
162    ) -> (Self, &'a mut EvmStack, &'a mut usize) {
163        let resume_at = ResumeAt::load(interpreter);
164        let (stack, stack_len) = EvmStack::from_interpreter_stack(&mut interpreter.stack);
165        let bytecode = interpreter.bytecode.bytecode_slice() as *const [u8];
166        let calldatasize = interpreter.input.input.len();
167        let gas_params = host.gas_params().clone();
168        let mut this = Self {
169            memory: &mut interpreter.memory,
170            input: &mut interpreter.input,
171            gas: interpreter.gas,
172            host,
173            next_action: &mut interpreter.bytecode.action,
174            return_data: interpreter.return_data.buffer(),
175            is_static: interpreter.runtime_flag.is_static(),
176            spec_id: interpreter.runtime_flag.spec_id(),
177            resume_at,
178            bytecode,
179            on_log: None,
180            calldatasize,
181            exit_result: InstructionResult::Stop,
182            exit_sp: ptr::null_mut(),
183            gas_params,
184            mem_base: ptr::null_mut(),
185            mem_len: 0,
186        };
187        this.refresh_memory_cache();
188        (this, stack, stack_len)
189    }
190
191    /// Refreshes the cached memory base pointer and length from `SharedMemory`.
192    ///
193    /// Must be called after any operation that may resize memory.
194    #[inline]
195    pub fn refresh_memory_cache(&mut self) {
196        let mut slice = self.memory.context_memory_mut();
197        self.mem_base = slice.as_mut_ptr();
198        self.mem_len = slice.len();
199    }
200}
201
202/// Declare [`RawEvmCompilerFn`] functions in an `extern "C"` block.
203///
204/// # Examples
205///
206/// ```no_run
207/// use revmc_context::{EvmCompilerFn, extern_revmc};
208///
209/// extern_revmc! {
210///    /// A simple function.
211///    pub fn test_fn;
212/// }
213///
214/// let test_fn = EvmCompilerFn::new(test_fn);
215/// ```
216#[macro_export]
217macro_rules! extern_revmc {
218    ($( $(#[$attr:meta])* $vis:vis fn $name:ident; )+) => {
219        #[allow(improper_ctypes)]
220        unsafe extern "C" {
221            $(
222                $(#[$attr])*
223                $vis fn $name(
224                    ecx: ::core::ptr::NonNull<$crate::EvmContext<'_>>,
225                    stack: ::core::ptr::NonNull<$crate::EvmStack>,
226                    stack_len: ::core::ptr::NonNull<usize>,
227                ) -> $crate::private::revm_interpreter::InstructionResult;
228            )+
229        }
230    };
231}
232
233/// The raw function signature of a bytecode function.
234///
235/// Prefer using [`EvmCompilerFn`] instead of this type. See [`EvmCompilerFn::call`] for more
236/// information.
237// When changing the signature, also update the corresponding declarations in `fn translate`.
238pub type RawEvmCompilerFn = unsafe extern "C" fn(
239    ecx: NonNull<EvmContext<'_>>,
240    stack: NonNull<EvmStack>,
241    stack_len: NonNull<usize>,
242) -> InstructionResult;
243
244/// An EVM bytecode function.
245#[derive(Clone, Copy, Debug, Hash)]
246pub struct EvmCompilerFn(RawEvmCompilerFn);
247
248impl From<RawEvmCompilerFn> for EvmCompilerFn {
249    #[inline]
250    fn from(f: RawEvmCompilerFn) -> Self {
251        Self::new(f)
252    }
253}
254
255impl From<EvmCompilerFn> for RawEvmCompilerFn {
256    #[inline]
257    fn from(f: EvmCompilerFn) -> Self {
258        f.into_inner()
259    }
260}
261
262impl EvmCompilerFn {
263    /// Wraps the function.
264    #[inline]
265    pub const fn new(f: RawEvmCompilerFn) -> Self {
266        Self(f)
267    }
268
269    /// Unwraps the function.
270    #[inline]
271    pub const fn into_inner(self) -> RawEvmCompilerFn {
272        self.0
273    }
274
275    /// Calls the function by re-using the interpreter's resources.
276    ///
277    /// This behaves similarly to `Interpreter::run_plain`, returning an [`InstructionResult`]
278    /// and the next action in an [`InterpreterAction`].
279    ///
280    /// # Safety
281    ///
282    /// The caller must ensure that the function is safe to call.
283    pub unsafe fn call_with_interpreter(
284        self,
285        interpreter: &mut Interpreter,
286        host: &mut dyn Host,
287    ) -> InterpreterAction {
288        self.call_with_interpreter_inner(interpreter, host, |_| {})
289    }
290
291    /// Like [`call_with_interpreter`](Self::call_with_interpreter), but calls `configure` on the
292    /// [`EvmContext`] before invoking the compiled function.
293    ///
294    /// This can be used to install callbacks (e.g. [`EvmContext::on_log`]) that fire during
295    /// execution.
296    ///
297    /// # Safety
298    ///
299    /// Same requirements as [`call_with_interpreter`](Self::call_with_interpreter).
300    #[doc(hidden)]
301    pub unsafe fn call_with_interpreter_with(
302        self,
303        interpreter: &mut Interpreter,
304        host: &mut dyn Host,
305        configure: impl FnOnce(&mut EvmContext<'_>),
306    ) -> InterpreterAction {
307        self.call_with_interpreter_inner(interpreter, host, configure)
308    }
309
310    unsafe fn call_with_interpreter_inner(
311        self,
312        interpreter: &mut Interpreter,
313        host: &mut dyn Host,
314        configure: impl FnOnce(&mut EvmContext<'_>),
315    ) -> InterpreterAction {
316        interpreter.bytecode.action = None;
317
318        let (mut ecx, stack, stack_len) =
319            EvmContext::from_interpreter_with_stack(interpreter, host);
320        configure(&mut ecx);
321        let result = self.call(stack, stack_len, &mut ecx);
322
323        let resume_at = ecx.resume_at;
324
325        // Set the remaining gas to 0 if the result is `OutOfGas`,
326        // as it might have overflown inside of the function.
327        if result == InstructionResult::OutOfGas {
328            ecx.gas.spend_all();
329        }
330
331        let return_data_is_empty = ecx.return_data.is_empty();
332        interpreter.gas = ecx.gas;
333
334        if return_data_is_empty {
335            interpreter.return_data.0.clear();
336        }
337
338        resume_at.store(interpreter);
339
340        if let Some(action) = interpreter.bytecode.action.take() {
341            action
342        } else {
343            InterpreterAction::Return(InterpreterResult {
344                result,
345                output: Bytes::new(),
346                gas: interpreter.gas,
347            })
348        }
349    }
350
351    /// Calls the function.
352    ///
353    /// Arguments:
354    /// - `stack`: The stack buffer.
355    /// - `stack_len`: The stack length.
356    /// - `ecx`: The context object.
357    ///
358    /// Use of this method is discouraged, as setup and cleanup need to be done manually.
359    ///
360    /// # Safety
361    ///
362    /// The caller must ensure that the arguments are valid and that the function is safe to call.
363    #[inline]
364    pub unsafe fn call(
365        self,
366        stack: &mut EvmStack,
367        stack_len: &mut usize,
368        ecx: &mut EvmContext<'_>,
369    ) -> InstructionResult {
370        revmc_entry(NonNull::from(ecx), NonNull::from(stack), NonNull::from(stack_len), self.0)
371    }
372
373    /// Same as [`call`](Self::call) but with `#[inline(never)]`.
374    ///
375    /// Use of this method is discouraged, as setup and cleanup need to be done manually.
376    ///
377    /// # Safety
378    ///
379    /// See [`call`](Self::call).
380    #[inline(never)]
381    pub unsafe fn call_noinline(
382        self,
383        stack: &mut EvmStack,
384        stack_len: &mut usize,
385        ecx: &mut EvmContext<'_>,
386    ) -> InstructionResult {
387        self.call(stack, stack_len, ecx)
388    }
389}
390
391/// EVM context stack.
392#[repr(C)]
393#[allow(missing_debug_implementations)]
394pub struct EvmStack([MaybeUninit<EvmWord>; 1024]);
395
396#[allow(clippy::new_without_default)]
397impl EvmStack {
398    /// The size of the stack in bytes.
399    pub const SIZE: usize = 32 * Self::CAPACITY;
400
401    /// The size of the stack in U256 elements.
402    pub const CAPACITY: usize = 1024;
403
404    /// Creates a new EVM stack, allocated on the stack.
405    ///
406    /// Use [`EvmStack::new_heap`] to create a stack on the heap.
407    #[inline]
408    pub fn new() -> Self {
409        Self(unsafe { MaybeUninit::uninit().assume_init() })
410    }
411
412    /// Creates a vector that can be used as a stack.
413    #[inline]
414    pub fn new_heap() -> Vec<EvmWord> {
415        Vec::with_capacity(1024)
416    }
417
418    /// Creates a stack from the interpreter's stack. Assumes that the stack is large enough.
419    #[inline]
420    pub fn from_interpreter_stack(stack: &mut revm_interpreter::Stack) -> (&mut Self, &mut usize) {
421        debug_assert!(stack.data().capacity() >= Self::CAPACITY);
422        let expected_len = stack.len();
423        unsafe {
424            let data = Self::from_mut_ptr(stack.data_mut().as_mut_ptr().cast());
425            // Vec { data: ptr, cap: usize, len: usize }
426            let len = &mut *(stack.data_mut() as *mut Vec<_>).cast::<usize>().add(2);
427            debug_assert_eq!(expected_len, *len);
428            (data, len)
429        }
430    }
431
432    /// Creates a stack from a vector's buffer.
433    ///
434    /// # Panics
435    ///
436    /// Panics if the vector's capacity is less than the required stack capacity.
437    #[inline]
438    pub fn from_vec(vec: &Vec<EvmWord>) -> &Self {
439        assert!(vec.capacity() >= Self::CAPACITY);
440        unsafe { Self::from_ptr(vec.as_ptr()) }
441    }
442
443    /// Creates a stack from a mutable vector's buffer.
444    ///
445    /// # Panics
446    ///
447    /// Panics if the vector's capacity is less than the required stack capacity.
448    #[inline]
449    pub fn from_mut_vec(vec: &mut Vec<EvmWord>) -> &mut Self {
450        assert!(vec.capacity() >= Self::CAPACITY);
451        unsafe { Self::from_mut_ptr(vec.as_mut_ptr()) }
452    }
453
454    /// Creates a stack from a pointer to a buffer.
455    ///
456    /// # Safety
457    ///
458    /// See [`from_vec`](Self::from_vec).
459    #[inline]
460    pub unsafe fn from_ptr<'a>(ptr: *const EvmWord) -> &'a Self {
461        debug_assert!(ptr.is_aligned());
462        unsafe { &*ptr.cast::<Self>() }
463    }
464
465    /// Creates a stack from a mutable pointer to a buffer.
466    ///
467    /// # Safety
468    ///
469    /// See [`from_mut_vec`](Self::from_mut_vec).
470    #[inline]
471    pub unsafe fn from_mut_ptr<'a>(ptr: *mut EvmWord) -> &'a mut Self {
472        debug_assert!(ptr.is_aligned());
473        unsafe { &mut *ptr.cast::<Self>() }
474    }
475
476    /// Returns a pointer to the stack.
477    #[inline]
478    pub const fn as_ptr(&self) -> *const EvmWord {
479        self.0.as_ptr().cast()
480    }
481
482    /// Returns a mutable pointer to the stack.
483    #[inline]
484    pub fn as_mut_ptr(&mut self) -> *mut EvmWord {
485        self.0.as_mut_ptr().cast()
486    }
487
488    /// Returns a slice of the initialized portion of the stack.
489    ///
490    /// # Safety
491    ///
492    /// The caller must ensure that the first `len` slots are initialized.
493    #[inline]
494    pub unsafe fn as_slice(&self, len: usize) -> &[EvmWord] {
495        assert!(len <= Self::CAPACITY);
496        unsafe { core::slice::from_raw_parts(self.as_ptr(), len) }
497    }
498
499    /// Returns a mutable slice of the initialized portion of the stack.
500    ///
501    /// # Safety
502    ///
503    /// The caller must ensure that the first `len` slots are initialized.
504    #[inline]
505    pub unsafe fn as_mut_slice(&mut self, len: usize) -> &mut [EvmWord] {
506        assert!(len <= Self::CAPACITY);
507        unsafe { core::slice::from_raw_parts_mut(self.as_mut_ptr(), len) }
508    }
509
510    /// Sets the value at the given index.
511    ///
512    /// # Panics
513    ///
514    /// Panics if the index is out of bounds.
515    #[inline]
516    pub fn set(&mut self, index: usize, value: EvmWord) {
517        self.0[index] = MaybeUninit::new(value);
518    }
519
520    /// Returns the word at the given index as a reference.
521    ///
522    /// # Safety
523    ///
524    /// The caller must ensure that the slot at `index` is initialized.
525    #[inline]
526    pub unsafe fn get(&self, index: usize) -> Option<&EvmWord> {
527        self.0.get(index).map(|slot| unsafe { slot.assume_init_ref() })
528    }
529
530    /// Returns the word at the given index as a mutable reference.
531    ///
532    /// # Safety
533    ///
534    /// The caller must ensure that the slot at `index` is initialized.
535    #[inline]
536    pub unsafe fn get_mut(&mut self, index: usize) -> Option<&mut EvmWord> {
537        self.0.get_mut(index).map(|slot| unsafe { slot.assume_init_mut() })
538    }
539
540    /// Returns the word at the given index as a reference.
541    ///
542    /// # Safety
543    ///
544    /// The caller must ensure that the index is within bounds.
545    #[inline]
546    pub unsafe fn get_unchecked(&self, index: usize) -> &EvmWord {
547        self.0.get_unchecked(index).assume_init_ref()
548    }
549
550    /// Returns the word at the given index as a mutable reference.
551    ///
552    /// # Safety
553    ///
554    /// The caller must ensure that the index is within bounds.
555    #[inline]
556    pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut EvmWord {
557        self.0.get_unchecked_mut(index).assume_init_mut()
558    }
559
560    /// Sets the value at the top of the stack to `value`, and grows the stack by 1.
561    ///
562    /// # Safety
563    ///
564    /// The caller must ensure that the stack is not full.
565    #[inline]
566    pub unsafe fn push(&mut self, value: EvmWord, len: &mut usize) {
567        self.set_unchecked(*len, value);
568        *len += 1;
569    }
570
571    /// Returns the value at the top of the stack.
572    ///
573    /// # Safety
574    ///
575    /// The caller must ensure that the stack is not empty.
576    #[inline]
577    pub unsafe fn top_unchecked(&self, len: usize) -> &EvmWord {
578        self.get_unchecked(len - 1)
579    }
580
581    /// Returns the value at the top of the stack as a mutable reference.
582    ///
583    /// # Safety
584    ///
585    /// The caller must ensure that the stack is not empty.
586    #[inline]
587    pub unsafe fn top_unchecked_mut(&mut self, len: usize) -> &mut EvmWord {
588        self.get_unchecked_mut(len - 1)
589    }
590
591    /// Returns the value at the given index from the top of the stack.
592    ///
593    /// # Safety
594    ///
595    /// The caller must ensure that `len >= n + 1`.
596    #[inline]
597    pub unsafe fn from_top_unchecked(&self, len: usize, n: usize) -> &EvmWord {
598        self.get_unchecked(len - n - 1)
599    }
600
601    /// Returns the value at the given index from the top of the stack as a mutable reference.
602    ///
603    /// # Safety
604    ///
605    /// The caller must ensure that `len >= n + 1`.
606    #[inline]
607    pub unsafe fn from_top_unchecked_mut(&mut self, len: usize, n: usize) -> &mut EvmWord {
608        self.get_unchecked_mut(len - n - 1)
609    }
610
611    /// Sets the value at the given index.
612    ///
613    /// # Safety
614    ///
615    /// The caller must ensure that the index is within bounds.
616    #[inline]
617    pub unsafe fn set_unchecked(&mut self, index: usize, value: EvmWord) {
618        *self.0.get_unchecked_mut(index) = MaybeUninit::new(value);
619    }
620}
621
622/// An EVM stack word, which is stored in native-endian order.
623#[repr(C, align(8))]
624#[derive(Clone, Copy, PartialEq, Eq)]
625pub struct EvmWord(B256);
626
627impl Default for EvmWord {
628    #[inline]
629    fn default() -> Self {
630        Self::ZERO
631    }
632}
633
634impl fmt::Debug for EvmWord {
635    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
636        self.to_u256().fmt(f)
637    }
638}
639
640impl fmt::Display for EvmWord {
641    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
642        self.to_u256().fmt(f)
643    }
644}
645
646impl TryFrom<EvmWord> for usize {
647    type Error = ruint::FromUintError<Self>;
648
649    #[inline]
650    fn try_from(w: EvmWord) -> Result<Self, Self::Error> {
651        Self::try_from(&w)
652    }
653}
654
655impl TryFrom<&EvmWord> for usize {
656    type Error = ruint::FromUintError<Self>;
657
658    #[inline]
659    fn try_from(w: &EvmWord) -> Result<Self, Self::Error> {
660        w.to_u256().try_into()
661    }
662}
663
664impl TryFrom<&mut EvmWord> for usize {
665    type Error = ruint::FromUintError<Self>;
666
667    #[inline]
668    fn try_from(w: &mut EvmWord) -> Result<Self, Self::Error> {
669        Self::try_from(&*w)
670    }
671}
672
673impl From<U256> for EvmWord {
674    #[inline]
675    fn from(u: U256) -> Self {
676        Self::from_u256(u)
677    }
678}
679
680impl EvmWord {
681    /// Zero.
682    pub const ZERO: Self = Self(B256::ZERO);
683
684    /// Create a new word from big-endian bytes.
685    #[inline]
686    pub const fn from_be_bytes(bytes: B256) -> Self {
687        Self::from_be(Self(bytes))
688    }
689
690    /// Create a new word from big-endian bytes.
691    #[inline]
692    pub const fn from_be_slice(bytes: &[u8]) -> Self {
693        Self::from_u256(U256::from_be_slice(bytes))
694    }
695
696    /// Create a new word from little-endian bytes.
697    #[inline]
698    pub const fn from_le_bytes(bytes: B256) -> Self {
699        Self::from_le(Self(bytes))
700    }
701
702    /// Create a new word from little-endian slice.
703    #[inline]
704    pub const fn from_le_slice(bytes: &[u8]) -> Self {
705        Self::from_u256(U256::from_le_slice(bytes))
706    }
707
708    /// Create a new word from native-endian bytes.
709    #[inline]
710    pub const fn from_ne_bytes(bytes: B256) -> Self {
711        Self(bytes)
712    }
713
714    /// Create a new word from a [`U256`]. This is a no-op on little-endian systems.
715    #[inline]
716    pub const fn from_u256(u: U256) -> Self {
717        #[cfg(target_endian = "little")]
718        return unsafe { core::mem::transmute::<U256, Self>(u) };
719        #[cfg(target_endian = "big")]
720        return Self(B256::new(u.to_be_bytes()));
721    }
722
723    /// Converts a big-endian representation into a native one.
724    #[inline]
725    pub const fn from_be(x: Self) -> Self {
726        #[cfg(target_endian = "little")]
727        return x.swap_bytes();
728        #[cfg(target_endian = "big")]
729        return x;
730    }
731
732    /// Converts a little-endian representation into a native one.
733    #[inline]
734    pub const fn from_le(x: Self) -> Self {
735        #[cfg(target_endian = "little")]
736        return x;
737        #[cfg(target_endian = "big")]
738        return x.swap_bytes();
739    }
740
741    /// Return the memory representation of this integer as a byte array in big-endian byte order.
742    #[inline]
743    pub const fn to_be_bytes(self) -> B256 {
744        self.to_be().to_ne_bytes()
745    }
746
747    /// Return the memory representation of this integer as a byte array in little-endian byte
748    /// order.
749    #[inline]
750    pub const fn to_le_bytes(self) -> B256 {
751        self.to_le().to_ne_bytes()
752    }
753
754    /// Return the memory representation of this integer as a byte array in native byte order.
755    #[inline]
756    pub const fn to_ne_bytes(self) -> B256 {
757        self.0
758    }
759
760    /// Converts `self` to big endian from the target's endianness.
761    #[inline]
762    pub const fn to_be(self) -> Self {
763        #[cfg(target_endian = "little")]
764        return self.swap_bytes();
765        #[cfg(target_endian = "big")]
766        return self;
767    }
768
769    /// Converts `self` to little endian from the target's endianness.
770    #[inline]
771    pub const fn to_le(self) -> Self {
772        #[cfg(target_endian = "little")]
773        return self;
774        #[cfg(target_endian = "big")]
775        return self.swap_bytes();
776    }
777
778    /// Reverses the byte order of the integer.
779    #[inline]
780    pub const fn swap_bytes(mut self) -> Self {
781        self.0.0.reverse();
782        self
783    }
784
785    /// Casts this value to a [`U256`]. This is a no-op on little-endian systems.
786    #[cfg(target_endian = "little")]
787    #[inline]
788    pub const fn as_u256(&self) -> &U256 {
789        unsafe { &*(self as *const Self as *const U256) }
790    }
791
792    /// Casts this value to a [`U256`]. This is a no-op on little-endian systems.
793    #[cfg(target_endian = "little")]
794    #[inline]
795    pub const fn as_u256_mut(&mut self) -> &mut U256 {
796        unsafe { &mut *(self as *mut Self as *mut U256) }
797    }
798
799    /// Converts this value to a [`U256`]. This is a simple copy on little-endian systems.
800    #[inline]
801    pub const fn to_u256(&self) -> U256 {
802        #[cfg(target_endian = "little")]
803        return *self.as_u256();
804        #[cfg(target_endian = "big")]
805        return U256::from_be_bytes(self.0.0);
806    }
807
808    /// Converts this value to a [`U256`]. This is a no-op on little-endian systems.
809    #[inline]
810    pub const fn into_u256(self) -> U256 {
811        #[cfg(target_endian = "little")]
812        return unsafe { core::mem::transmute::<Self, U256>(self) };
813        #[cfg(target_endian = "big")]
814        return U256::from_be_bytes(self.0.0);
815    }
816
817    /// Converts this value to an [`Address`].
818    #[inline]
819    pub fn to_address(self) -> Address {
820        Address::from_word(self.to_be_bytes())
821    }
822}
823
824// Macro re-exports.
825// Not public API.
826#[doc(hidden)]
827pub mod private {
828    pub use revm_interpreter;
829    pub use revm_primitives;
830}
831
832#[cfg(test)]
833mod tests {
834    use super::*;
835
836    #[test]
837    fn conversions() {
838        let mut word = EvmWord::ZERO;
839        assert_eq!(usize::try_from(word), Ok(0));
840        assert_eq!(usize::try_from(&word), Ok(0));
841        assert_eq!(usize::try_from(&mut word), Ok(0));
842    }
843
844    extern_revmc! {
845        #[link_name = "__test_fn"]
846        fn test_fn;
847    }
848
849    #[unsafe(no_mangle)]
850    extern "C" fn __test_fn(
851        _ecx: NonNull<EvmContext<'_>>,
852        _stack: NonNull<EvmStack>,
853        _stack_len: NonNull<usize>,
854    ) -> InstructionResult {
855        InstructionResult::Stop
856    }
857
858    #[test]
859    fn extern_macro() {
860        let f1 = EvmCompilerFn::new(test_fn).0;
861        let f2 = EvmCompilerFn::new(__test_fn).0;
862        assert!(core::ptr::fn_addr_eq(f1, f2), "{f1:?} != {f2:?}");
863    }
864}