revmc_cranelift/
lib.rs

1#![doc = include_str!("../README.md")]
2#![cfg_attr(not(test), warn(unused_extern_crates))]
3#![cfg_attr(docsrs, feature(doc_cfg))]
4
5use codegen::ir::Function;
6use cranelift::{
7    codegen::ir::{FuncRef, StackSlot},
8    prelude::*,
9};
10use cranelift_jit::{JITBuilder, JITModule};
11use cranelift_module::{DataDescription, FuncId, FuncOrDataId, Linkage, Module, ModuleError};
12use cranelift_object::{ObjectBuilder, ObjectModule};
13use pretty_clif::CommentWriter;
14use revmc_backend::{
15    eyre::eyre, Backend, BackendTypes, Builder, OptimizationLevel, Result, TailCallKind,
16    TypeMethods, U256,
17};
18use std::{
19    collections::HashMap,
20    io::Write,
21    path::Path,
22    sync::{Arc, RwLock},
23};
24
25mod pretty_clif;
26
27pub use cranelift;
28pub use cranelift_jit;
29pub use cranelift_module;
30pub use cranelift_native;
31
32/// The Cranelift-based EVM bytecode compiler backend.
33#[allow(missing_debug_implementations)]
34#[must_use]
35pub struct EvmCraneliftBackend {
36    /// The function builder context, which is reused across multiple FunctionBuilder instances.
37    builder_context: FunctionBuilderContext,
38
39    /// The main Cranelift context, which holds the state for codegen. Cranelift
40    /// separates this from `Module` to allow for parallel compilation, with a
41    /// context per thread, though this isn't in the simple demo here.
42    ctx: codegen::Context,
43
44    /// The module, with the jit backend, which manages the JIT'd functions.
45    module: ModuleWrapper,
46
47    symbols: Symbols,
48
49    opt_level: OptimizationLevel,
50    comments: CommentWriter,
51    functions: Vec<FuncId>,
52}
53
54#[allow(clippy::new_without_default)]
55impl EvmCraneliftBackend {
56    /// Returns `Ok(())` if the current architecture is supported, or `Err(())` if the host machine
57    /// is not supported in the current configuration.
58    pub fn is_supported() -> Result<(), &'static str> {
59        cranelift_native::builder().map(drop)
60    }
61
62    /// Creates a new instance of the JIT compiler.
63    ///
64    /// # Panics
65    ///
66    /// Panics if the current architecture is not supported. See
67    /// [`is_supported`](Self::is_supported).
68    #[track_caller]
69    pub fn new(aot: bool, opt_level: OptimizationLevel) -> Self {
70        let symbols = Symbols::new();
71        let module = ModuleWrapper::new(aot, opt_level, &symbols).unwrap();
72        Self {
73            builder_context: FunctionBuilderContext::new(),
74            ctx: module.get().make_context(),
75            module,
76            symbols,
77            opt_level,
78            comments: CommentWriter::new(),
79            functions: Vec::new(),
80        }
81    }
82
83    fn finish_module(&mut self) -> Result<Option<ObjectModule>> {
84        let aot = match self.module {
85            ModuleWrapper::Jit(_) => {
86                // TODO: Can `free_memory` take `&mut self` pls?
87                let new = ModuleWrapper::new_jit(self.opt_level, self.symbols.clone())?;
88                let ModuleWrapper::Jit(old) = std::mem::replace(&mut self.module, new) else {
89                    unreachable!()
90                };
91                unsafe { old.free_memory() };
92                None
93            }
94            ModuleWrapper::Aot(_) => {
95                let new = ModuleWrapper::new_aot(self.opt_level)?;
96                let ModuleWrapper::Aot(old) = std::mem::replace(&mut self.module, new) else {
97                    unreachable!()
98                };
99                Some(old)
100            }
101        };
102        self.module.get().clear_context(&mut self.ctx);
103        Ok(aot)
104    }
105}
106
107impl BackendTypes for EvmCraneliftBackend {
108    type Type = Type;
109    type Value = Value;
110    type StackSlot = StackSlot;
111    type BasicBlock = Block;
112    type Function = FuncRef;
113}
114
115impl TypeMethods for EvmCraneliftBackend {
116    fn type_ptr(&self) -> Self::Type {
117        self.module.get().target_config().pointer_type()
118    }
119
120    fn type_ptr_sized_int(&self) -> Self::Type {
121        self.type_ptr()
122    }
123
124    fn type_int(&self, bits: u32) -> Self::Type {
125        bits.try_into().ok().and_then(Type::int).unwrap_or_else(|| unimplemented!("type: i{bits}"))
126    }
127
128    fn type_array(&self, ty: Self::Type, size: u32) -> Self::Type {
129        unimplemented!("type: [{size} x {ty}]")
130    }
131
132    fn type_bit_width(&self, ty: Self::Type) -> u32 {
133        ty.bits()
134    }
135}
136
137impl Backend for EvmCraneliftBackend {
138    type Builder<'a> = EvmCraneliftBuilder<'a>;
139    type FuncId = FuncId;
140
141    fn ir_extension(&self) -> &'static str {
142        "clif"
143    }
144
145    fn set_module_name(&mut self, name: &str) {
146        let _ = name;
147    }
148
149    fn set_is_dumping(&mut self, yes: bool) {
150        self.ctx.set_disasm(yes);
151    }
152
153    fn set_debug_assertions(&mut self, yes: bool) {
154        let _ = yes;
155    }
156
157    fn opt_level(&self) -> OptimizationLevel {
158        self.opt_level
159    }
160
161    fn set_opt_level(&mut self, level: OptimizationLevel) {
162        // Note that this will only affect new functions after a new module is created in
163        // `free_all_functions`.
164        self.opt_level = level;
165    }
166
167    fn is_aot(&self) -> bool {
168        self.module.is_aot()
169    }
170
171    fn function_name_is_unique(&self, name: &str) -> bool {
172        self.module.get().get_name(name).is_none()
173    }
174
175    fn dump_ir(&mut self, path: &Path) -> Result<()> {
176        crate::pretty_clif::write_clif_file(
177            path,
178            self.module.get().isa(),
179            &self.ctx.func,
180            &self.comments,
181        );
182        Ok(())
183    }
184
185    fn dump_disasm(&mut self, path: &Path) -> Result<()> {
186        if let Some(disasm) = &self.ctx.compiled_code().unwrap().vcode {
187            crate::pretty_clif::write_ir_file(path, |file| file.write_all(disasm.as_bytes()))
188        }
189        Ok(())
190    }
191
192    fn build_function(
193        &mut self,
194        name: &str,
195        ret: Option<Self::Type>,
196        params: &[Self::Type],
197        param_names: &[&str],
198        linkage: revmc_backend::Linkage,
199    ) -> Result<(Self::Builder<'_>, FuncId)> {
200        self.ctx.func.clear();
201        if let Some(ret) = ret {
202            self.ctx.func.signature.returns.push(AbiParam::new(ret));
203        }
204        for param in params {
205            self.ctx.func.signature.params.push(AbiParam::new(*param));
206        }
207        let _ = param_names;
208        let ptr_type = self.type_ptr();
209        let id = self.module.get_mut().declare_function(
210            name,
211            convert_linkage(linkage),
212            &self.ctx.func.signature,
213        )?;
214        self.functions.push(id);
215        let bcx = FunctionBuilder::new(&mut self.ctx.func, &mut self.builder_context);
216        let mut builder = EvmCraneliftBuilder {
217            module: &mut self.module,
218            comments: &mut self.comments,
219            bcx,
220            ptr_type,
221            symbols: self.symbols.clone(),
222        };
223        let entry = builder.bcx.create_block();
224        builder.bcx.append_block_params_for_function_params(entry);
225        builder.bcx.switch_to_block(entry);
226        Ok((builder, id))
227    }
228
229    fn verify_module(&mut self) -> Result<()> {
230        Ok(())
231    }
232
233    fn optimize_module(&mut self) -> Result<()> {
234        // Define the function to jit. This finishes compilation, although
235        // there may be outstanding relocations to perform. Currently, jit
236        // cannot finish relocations until all functions to be called are
237        // defined. For this toy demo for now, we'll just finalize the
238        // function below.
239        for &id in &self.functions {
240            self.module.get_mut().define_function(id, &mut self.ctx)?;
241        }
242        self.functions.clear();
243
244        // Now that compilation is finished, we can clear out the context state.
245        self.module.get().clear_context(&mut self.ctx);
246
247        // Finalize the functions which we just defined, which resolves any outstanding relocations
248        // (patching in addresses, now that they're available).
249        self.module.finalize_definitions()?;
250
251        self.comments.clear();
252
253        Ok(())
254    }
255
256    fn write_object<W: std::io::Write>(&mut self, w: W) -> Result<()> {
257        let module =
258            self.finish_module()?.ok_or_else(|| eyre!("cannot write object in JIT mode"))?;
259        let product = module.finish();
260        product.object.write_stream(w).map_err(|e| eyre!("{e}"))?;
261        Ok(())
262    }
263
264    fn jit_function(&mut self, id: Self::FuncId) -> Result<usize> {
265        self.module.get_finalized_function(id).map(|ptr| ptr as usize)
266    }
267
268    unsafe fn free_function(&mut self, id: Self::FuncId) -> Result<()> {
269        // This doesn't exist yet.
270        let _ = id;
271        Ok(())
272    }
273
274    unsafe fn free_all_functions(&mut self) -> Result<()> {
275        self.finish_module().map(drop)
276    }
277}
278
279/// The Cranelift-based EVM bytecode compiler function builder.
280#[allow(missing_debug_implementations)]
281pub struct EvmCraneliftBuilder<'a> {
282    module: &'a mut ModuleWrapper,
283    comments: &'a mut CommentWriter,
284    bcx: FunctionBuilder<'a>,
285    ptr_type: Type,
286    symbols: Symbols,
287}
288
289impl BackendTypes for EvmCraneliftBuilder<'_> {
290    type Type = <EvmCraneliftBackend as BackendTypes>::Type;
291    type Value = <EvmCraneliftBackend as BackendTypes>::Value;
292    type StackSlot = <EvmCraneliftBackend as BackendTypes>::StackSlot;
293    type BasicBlock = <EvmCraneliftBackend as BackendTypes>::BasicBlock;
294    type Function = <EvmCraneliftBackend as BackendTypes>::Function;
295}
296
297impl TypeMethods for EvmCraneliftBuilder<'_> {
298    fn type_ptr(&self) -> Self::Type {
299        self.ptr_type
300    }
301
302    fn type_ptr_sized_int(&self) -> Self::Type {
303        self.ptr_type
304    }
305
306    fn type_int(&self, bits: u32) -> Self::Type {
307        bits.try_into().ok().and_then(Type::int).unwrap_or_else(|| unimplemented!("type: i{bits}"))
308    }
309
310    fn type_array(&self, ty: Self::Type, size: u32) -> Self::Type {
311        unimplemented!("type: [{size} x {ty}]")
312    }
313
314    fn type_bit_width(&self, ty: Self::Type) -> u32 {
315        ty.bits()
316    }
317}
318
319impl<'a> Builder for EvmCraneliftBuilder<'a> {
320    fn create_block(&mut self, name: &str) -> Self::BasicBlock {
321        let block = self.bcx.create_block();
322        if !name.is_empty() && self.comments.enabled() {
323            self.comments.add_comment(block, name);
324        }
325        block
326    }
327
328    fn create_block_after(&mut self, after: Self::BasicBlock, name: &str) -> Self::BasicBlock {
329        let block = self.create_block(name);
330        self.bcx.insert_block_after(block, after);
331        block
332    }
333
334    fn switch_to_block(&mut self, block: Self::BasicBlock) {
335        self.bcx.switch_to_block(block);
336    }
337
338    fn seal_block(&mut self, block: Self::BasicBlock) {
339        self.bcx.seal_block(block);
340    }
341
342    fn seal_all_blocks(&mut self) {
343        self.bcx.seal_all_blocks();
344    }
345
346    fn set_current_block_cold(&mut self) {
347        self.bcx.set_cold_block(self.bcx.current_block().unwrap());
348    }
349
350    fn current_block(&mut self) -> Option<Self::BasicBlock> {
351        self.bcx.current_block()
352    }
353
354    fn block_addr(&mut self, _block: Self::BasicBlock) -> Option<Self::Value> {
355        None
356    }
357
358    fn add_comment_to_current_inst(&mut self, comment: &str) {
359        let Some(block) = self.bcx.current_block() else { return };
360        let Some(inst) = self.bcx.func.layout.last_inst(block) else { return };
361        self.comments.add_comment(inst, comment);
362    }
363
364    fn fn_param(&mut self, index: usize) -> Self::Value {
365        let block = self.current_block().unwrap();
366        self.bcx.block_params(block)[index]
367    }
368
369    fn num_fn_params(&self) -> usize {
370        self.bcx.func.signature.params.len()
371    }
372
373    fn bool_const(&mut self, value: bool) -> Self::Value {
374        self.iconst(types::I8, value as i64)
375    }
376
377    fn iconst(&mut self, ty: Self::Type, value: i64) -> Self::Value {
378        self.bcx.ins().iconst(ty, value)
379    }
380
381    fn uconst(&mut self, ty: Self::Type, value: u64) -> Self::Value {
382        self.iconst(ty, value as i64)
383    }
384
385    fn iconst_256(&mut self, value: U256) -> Self::Value {
386        let _ = value;
387        todo!("no i256 :(")
388    }
389
390    fn str_const(&mut self, value: &str) -> Self::Value {
391        // https://github.com/rust-lang/rustc_codegen_cranelift/blob/1122338eb88648ec36a2eb2b1c27031fa897964d/src/common.rs#L432
392
393        let mut data = DataDescription::new();
394        data.define(value.as_bytes().into());
395        let msg_id = self.module.get_mut().declare_anonymous_data(false, false).unwrap();
396
397        // Ignore DuplicateDefinition error, as the data will be the same
398        let _ = self.module.get_mut().define_data(msg_id, &data);
399
400        let local_msg_id = self.module.get().declare_data_in_func(msg_id, self.bcx.func);
401        if self.comments.enabled() {
402            self.comments.add_comment(local_msg_id, value);
403        }
404        self.bcx.ins().global_value(self.ptr_type, local_msg_id)
405    }
406
407    fn nullptr(&mut self) -> Self::Value {
408        self.iconst(self.ptr_type, 0)
409    }
410
411    fn new_stack_slot_raw(&mut self, ty: Self::Type, name: &str) -> Self::StackSlot {
412        // https://github.com/rust-lang/rustc_codegen_cranelift/blob/1122338eb88648ec36a2eb2b1c27031fa897964d/src/common.rs#L388
413
414        /*
415        let _ = name;
416        let abi_align = 16;
417        if align <= abi_align {
418            self.bcx.create_sized_stack_slot(StackSlotData {
419                kind: StackSlotKind::ExplicitSlot,
420                // FIXME Don't force the size to a multiple of <abi_align> bytes once Cranelift gets
421                // a way to specify stack slot alignment.
422                size: (size + abi_align - 1) / abi_align * abi_align,
423            })
424        } else {
425            unimplemented!("{align} > {abi_align}")
426            /*
427            // Alignment is too big to handle using the above hack. Dynamically realign a stack slot
428            // instead. This wastes some space for the realignment.
429            let stack_slot = self.bcx.create_sized_stack_slot(StackSlotData {
430                kind: StackSlotKind::ExplicitSlot,
431                // FIXME Don't force the size to a multiple of <abi_align> bytes once Cranelift gets
432                // a way to specify stack slot alignment.
433                size: (size + align) / abi_align * abi_align,
434            });
435            let base_ptr = self.bcx.ins().stack_addr(self.pointer_type, stack_slot, 0);
436            let misalign_offset = self.bcx.ins().urem_imm(base_ptr, i64::from(align));
437            let realign_offset = self.bcx.ins().irsub_imm(misalign_offset, i64::from(align));
438            Pointer::new(self.bcx.ins().iadd(base_ptr, realign_offset))
439            */
440        }
441        */
442
443        let _ = name;
444        self.bcx.create_sized_stack_slot(StackSlotData {
445            kind: StackSlotKind::ExplicitSlot,
446            size: ty.bytes(),
447            align_shift: 1,
448        })
449    }
450
451    fn stack_load(&mut self, ty: Self::Type, slot: Self::StackSlot, name: &str) -> Self::Value {
452        let _ = name;
453        self.bcx.ins().stack_load(ty, slot, 0)
454    }
455
456    fn stack_store(&mut self, value: Self::Value, slot: Self::StackSlot) {
457        self.bcx.ins().stack_store(value, slot, 0);
458    }
459
460    fn stack_addr(&mut self, ty: Self::Type, slot: Self::StackSlot) -> Self::Value {
461        self.bcx.ins().stack_addr(ty, slot, 0)
462    }
463
464    fn load(&mut self, ty: Self::Type, ptr: Self::Value, name: &str) -> Self::Value {
465        let _ = name;
466        self.bcx.ins().load(ty, MemFlags::trusted(), ptr, 0)
467    }
468
469    fn load_unaligned(&mut self, ty: Self::Type, ptr: Self::Value, name: &str) -> Self::Value {
470        let _ = name;
471        self.bcx.ins().load(ty, MemFlags::new().with_notrap(), ptr, 0)
472    }
473
474    fn store(&mut self, value: Self::Value, ptr: Self::Value) {
475        self.bcx.ins().store(MemFlags::trusted(), value, ptr, 0);
476    }
477
478    fn store_unaligned(&mut self, value: Self::Value, ptr: Self::Value) {
479        self.bcx.ins().store(MemFlags::new().with_notrap(), value, ptr, 0);
480    }
481
482    fn nop(&mut self) {
483        self.bcx.ins().nop();
484    }
485
486    fn ret(&mut self, values: &[Self::Value]) {
487        self.bcx.ins().return_(values);
488    }
489
490    fn icmp(
491        &mut self,
492        cond: revmc_backend::IntCC,
493        lhs: Self::Value,
494        rhs: Self::Value,
495    ) -> Self::Value {
496        self.bcx.ins().icmp(convert_intcc(cond), lhs, rhs)
497    }
498
499    fn icmp_imm(&mut self, cond: revmc_backend::IntCC, lhs: Self::Value, rhs: i64) -> Self::Value {
500        self.bcx.ins().icmp_imm(convert_intcc(cond), lhs, rhs)
501    }
502
503    fn is_null(&mut self, ptr: Self::Value) -> Self::Value {
504        self.bcx.ins().icmp_imm(IntCC::Equal, ptr, 0)
505    }
506
507    fn is_not_null(&mut self, ptr: Self::Value) -> Self::Value {
508        self.bcx.ins().icmp_imm(IntCC::NotEqual, ptr, 0)
509    }
510
511    fn br(&mut self, dest: Self::BasicBlock) {
512        self.bcx.ins().jump(dest, &[]);
513    }
514
515    fn brif(
516        &mut self,
517        cond: Self::Value,
518        then_block: Self::BasicBlock,
519        else_block: Self::BasicBlock,
520    ) {
521        self.bcx.ins().brif(cond, then_block, &[], else_block, &[]);
522    }
523
524    fn switch(
525        &mut self,
526        index: Self::Value,
527        default: Self::BasicBlock,
528        targets: &[(u64, Self::BasicBlock)],
529        default_is_cold: bool,
530    ) {
531        let _ = default_is_cold;
532        let mut switch = cranelift::frontend::Switch::new();
533        for (value, block) in targets {
534            switch.set_entry(*value as u128, *block);
535        }
536        switch.emit(&mut self.bcx, index, default)
537    }
538
539    fn br_indirect(&mut self, _address: Self::Value, _destinations: &[Self::BasicBlock]) {
540        unimplemented!()
541    }
542
543    fn phi(&mut self, ty: Self::Type, incoming: &[(Self::Value, Self::BasicBlock)]) -> Self::Value {
544        let current = self.current_block().unwrap();
545        let param = self.bcx.append_block_param(current, ty);
546        for &(value, block) in incoming {
547            self.bcx.switch_to_block(block);
548            let last_inst = self.bcx.func.layout.last_inst(block).unwrap();
549            let src = self.bcx.ins().jump(current, &[value]);
550            self.bcx.func.transplant_inst(last_inst, src);
551        }
552        self.bcx.switch_to_block(current);
553        param
554    }
555
556    fn select(
557        &mut self,
558        cond: Self::Value,
559        then_value: Self::Value,
560        else_value: Self::Value,
561    ) -> Self::Value {
562        self.bcx.ins().select(cond, then_value, else_value)
563    }
564
565    fn lazy_select(
566        &mut self,
567        cond: Self::Value,
568        ty: Self::Type,
569        then_value: impl FnOnce(&mut Self) -> Self::Value,
570        else_value: impl FnOnce(&mut Self) -> Self::Value,
571    ) -> Self::Value {
572        let then_block = if let Some(current) = self.current_block() {
573            self.create_block_after(current, "then")
574        } else {
575            self.create_block("then")
576        };
577        let else_block = self.create_block_after(then_block, "else");
578        let done_block = self.create_block_after(else_block, "contd");
579        let done_value = self.bcx.append_block_param(done_block, ty);
580
581        self.brif(cond, then_block, else_block);
582
583        self.seal_block(then_block);
584        self.switch_to_block(then_block);
585        let then_value = then_value(self);
586        self.bcx.ins().jump(done_block, &[then_value]);
587
588        self.seal_block(else_block);
589        self.switch_to_block(else_block);
590        let else_value = else_value(self);
591        self.bcx.ins().jump(done_block, &[else_value]);
592
593        self.seal_block(done_block);
594        self.switch_to_block(done_block);
595        done_value
596    }
597
598    fn iadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
599        self.bcx.ins().iadd(lhs, rhs)
600    }
601
602    fn isub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
603        self.bcx.ins().isub(lhs, rhs)
604    }
605
606    fn imul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
607        self.bcx.ins().imul(lhs, rhs)
608    }
609
610    fn udiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
611        self.bcx.ins().udiv(lhs, rhs)
612    }
613
614    fn sdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
615        self.bcx.ins().sdiv(lhs, rhs)
616    }
617
618    fn urem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
619        self.bcx.ins().urem(lhs, rhs)
620    }
621
622    fn srem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
623        self.bcx.ins().srem(lhs, rhs)
624    }
625
626    fn iadd_imm(&mut self, lhs: Self::Value, rhs: i64) -> Self::Value {
627        self.bcx.ins().iadd_imm(lhs, rhs)
628    }
629
630    fn isub_imm(&mut self, lhs: Self::Value, rhs: i64) -> Self::Value {
631        self.iadd_imm(lhs, -rhs)
632    }
633
634    fn imul_imm(&mut self, lhs: Self::Value, rhs: i64) -> Self::Value {
635        self.bcx.ins().imul_imm(lhs, rhs)
636    }
637
638    fn uadd_overflow(&mut self, lhs: Self::Value, rhs: Self::Value) -> (Self::Value, Self::Value) {
639        self.bcx.ins().uadd_overflow(lhs, rhs)
640    }
641
642    fn usub_overflow(&mut self, lhs: Self::Value, rhs: Self::Value) -> (Self::Value, Self::Value) {
643        self.bcx.ins().usub_overflow(lhs, rhs)
644    }
645
646    fn uadd_sat(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
647        self.bcx.ins().uadd_sat(lhs, rhs)
648    }
649
650    fn umax(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
651        self.bcx.ins().umax(lhs, rhs)
652    }
653
654    fn umin(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
655        self.bcx.ins().umin(lhs, rhs)
656    }
657
658    fn bswap(&mut self, value: Self::Value) -> Self::Value {
659        self.bcx.ins().bswap(value)
660    }
661
662    fn bitor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
663        self.bcx.ins().bor(lhs, rhs)
664    }
665
666    fn bitand(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
667        self.bcx.ins().band(lhs, rhs)
668    }
669
670    fn bitxor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
671        self.bcx.ins().bxor(lhs, rhs)
672    }
673
674    fn bitnot(&mut self, value: Self::Value) -> Self::Value {
675        self.bcx.ins().bnot(value)
676    }
677
678    fn clz(&mut self, value: Self::Value) -> Self::Value {
679        self.bcx.ins().clz(value)
680    }
681
682    fn bitor_imm(&mut self, lhs: Self::Value, rhs: i64) -> Self::Value {
683        self.bcx.ins().bor_imm(lhs, rhs)
684    }
685
686    fn bitand_imm(&mut self, lhs: Self::Value, rhs: i64) -> Self::Value {
687        self.bcx.ins().band_imm(lhs, rhs)
688    }
689
690    fn bitxor_imm(&mut self, lhs: Self::Value, rhs: i64) -> Self::Value {
691        self.bcx.ins().bxor_imm(lhs, rhs)
692    }
693
694    fn ishl(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
695        self.bcx.ins().ishl(lhs, rhs)
696    }
697
698    fn ushr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
699        self.bcx.ins().ushr(lhs, rhs)
700    }
701
702    fn sshr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
703        self.bcx.ins().sshr(lhs, rhs)
704    }
705
706    fn zext(&mut self, ty: Self::Type, value: Self::Value) -> Self::Value {
707        self.bcx.ins().uextend(ty, value)
708    }
709
710    fn sext(&mut self, ty: Self::Type, value: Self::Value) -> Self::Value {
711        self.bcx.ins().sextend(ty, value)
712    }
713
714    fn ireduce(&mut self, to: Self::Type, value: Self::Value) -> Self::Value {
715        self.bcx.ins().ireduce(to, value)
716    }
717
718    fn inttoptr(&mut self, value: Self::Value, _ty: Self::Type) -> Self::Value {
719        // Cranelift uses the same representation for integers and pointers
720        value
721    }
722
723    fn gep(
724        &mut self,
725        ty: Self::Type,
726        ptr: Self::Value,
727        indexes: &[Self::Value],
728        name: &str,
729    ) -> Self::Value {
730        let _ = name;
731        let offset = self.bcx.ins().imul_imm(*indexes.first().unwrap(), ty.bytes() as i64);
732        self.bcx.ins().iadd(ptr, offset)
733    }
734
735    fn tail_call(
736        &mut self,
737        function: Self::Function,
738        args: &[Self::Value],
739        tail_call: TailCallKind,
740    ) -> Option<Self::Value> {
741        if tail_call != TailCallKind::None {
742            todo!();
743        }
744        let ins = self.bcx.ins().call(function, args);
745        self.bcx.inst_results(ins).first().copied()
746    }
747
748    fn is_compile_time_known(&mut self, _value: Self::Value) -> Option<Self::Value> {
749        None
750    }
751
752    fn memcpy(&mut self, dst: Self::Value, src: Self::Value, len: Self::Value) {
753        let config = self.module.get().target_config();
754        self.bcx.call_memcpy(config, dst, src, len)
755    }
756
757    fn unreachable(&mut self) {
758        self.bcx.ins().trap(TrapCode::user(0).unwrap());
759    }
760
761    fn get_or_build_function(
762        &mut self,
763        name: &str,
764        params: &[Self::Type],
765        ret: Option<Self::Type>,
766        linkage: revmc_backend::Linkage,
767        build: impl FnOnce(&mut Self),
768    ) -> Self::Function {
769        if let Some(f) = self.get_function(name) {
770            return f;
771        }
772
773        let mut sig = self.module.get().make_signature();
774        if let Some(ret) = ret {
775            sig.returns.push(AbiParam::new(ret));
776        }
777        for param in params {
778            sig.params.push(AbiParam::new(*param));
779        }
780
781        let id =
782            self.module.get_mut().declare_function(name, convert_linkage(linkage), &sig).unwrap();
783
784        let mut func = Function::new();
785        func.signature = sig;
786        let mut builder_ctx = FunctionBuilderContext::new();
787        let new_bcx = FunctionBuilder::new(&mut func, &mut builder_ctx);
788        // TODO: SAFETY: Not really safe, lifetime extension.
789        let new_bcx =
790            unsafe { std::mem::transmute::<FunctionBuilder<'_>, FunctionBuilder<'a>>(new_bcx) };
791        let old_bcx = std::mem::replace(&mut self.bcx, new_bcx);
792
793        let f = self.module.get_mut().declare_func_in_func(id, self.bcx.func);
794
795        let entry = self.bcx.create_block();
796        self.bcx.append_block_params_for_function_params(entry);
797        build(self);
798
799        self.bcx = old_bcx;
800
801        f
802    }
803
804    fn get_function(&mut self, name: &str) -> Option<Self::Function> {
805        self.module
806            .get()
807            .get_name(name)
808            .and_then(|id| match id {
809                FuncOrDataId::Func(f) => Some(f),
810                FuncOrDataId::Data(_) => None,
811            })
812            .map(|id| self.module.get_mut().declare_func_in_func(id, self.bcx.func))
813    }
814
815    fn get_printf_function(&mut self) -> Self::Function {
816        if let Some(f) = self.get_function("printf") {
817            return f;
818        }
819
820        unimplemented!()
821    }
822
823    fn add_function(
824        &mut self,
825        name: &str,
826        params: &[Self::Type],
827        ret: Option<Self::Type>,
828        address: Option<usize>,
829        linkage: revmc_backend::Linkage,
830    ) -> Self::Function {
831        let mut sig = self.module.get().make_signature();
832        if let Some(ret) = ret {
833            sig.returns.push(AbiParam::new(ret));
834        }
835        for param in params {
836            sig.params.push(AbiParam::new(*param));
837        }
838        if let Some(address) = address {
839            self.symbols.insert(name.to_string(), address as *const u8);
840        }
841        let id =
842            self.module.get_mut().declare_function(name, convert_linkage(linkage), &sig).unwrap();
843        self.module.get_mut().declare_func_in_func(id, self.bcx.func)
844    }
845
846    fn add_function_attribute(
847        &mut self,
848        function: Option<Self::Function>,
849        attribute: revmc_backend::Attribute,
850        loc: revmc_backend::FunctionAttributeLocation,
851    ) {
852        let _ = function;
853        let _ = attribute;
854        let _ = loc;
855        // TODO
856    }
857}
858
859#[derive(Clone, Debug, Default)]
860struct Symbols(Arc<RwLock<HashMap<String, usize>>>);
861
862impl Symbols {
863    fn new() -> Self {
864        Self::default()
865    }
866
867    fn get(&self, name: &str) -> Option<*const u8> {
868        self.0.read().unwrap().get(name).copied().map(|addr| addr as *const u8)
869    }
870
871    fn insert(&self, name: String, ptr: *const u8) -> Option<*const u8> {
872        self.0.write().unwrap().insert(name, ptr as usize).map(|addr| addr as *const u8)
873    }
874}
875
876enum ModuleWrapper {
877    Jit(JITModule),
878    Aot(ObjectModule),
879}
880
881impl ModuleWrapper {
882    fn new(aot: bool, opt_level: OptimizationLevel, symbols: &Symbols) -> Result<Self> {
883        if aot {
884            Self::new_aot(opt_level)
885        } else {
886            Self::new_jit(opt_level, symbols.clone())
887        }
888    }
889
890    fn new_jit(opt_level: OptimizationLevel, symbols: Symbols) -> Result<Self> {
891        // Build a custom ISA with is_pic=false to avoid PLT which isn't supported on ARM64
892        let mut flag_builder = settings::builder();
893        flag_builder.set("opt_level", opt_level_flag(opt_level))?;
894        flag_builder.set("is_pic", "false")?;
895        let isa_builder = cranelift_native::builder().map_err(|s| eyre!(s))?;
896        let isa = isa_builder.finish(settings::Flags::new(flag_builder))?;
897
898        let mut builder = JITBuilder::with_isa(isa, cranelift_module::default_libcall_names());
899        builder.symbol_lookup_fn(Box::new(move |s| symbols.get(s)));
900        Ok(Self::Jit(JITModule::new(builder)))
901    }
902
903    fn new_aot(opt_level: OptimizationLevel) -> Result<Self> {
904        let mut flag_builder = settings::builder();
905        flag_builder.set("opt_level", opt_level_flag(opt_level))?;
906        let isa_builder = cranelift_native::builder().map_err(|s| eyre!(s))?;
907        let isa = isa_builder.finish(settings::Flags::new(flag_builder))?;
908
909        let builder =
910            ObjectBuilder::new(isa, "jit".to_string(), cranelift_module::default_libcall_names())?;
911        Ok(Self::Aot(ObjectModule::new(builder)))
912    }
913
914    fn is_aot(&self) -> bool {
915        matches!(self, Self::Aot(_))
916    }
917
918    #[inline]
919    fn get(&self) -> &dyn Module {
920        match self {
921            Self::Jit(module) => module,
922            Self::Aot(module) => module,
923        }
924    }
925
926    #[inline]
927    fn get_mut(&mut self) -> &mut dyn Module {
928        match self {
929            Self::Jit(module) => module,
930            Self::Aot(module) => module,
931        }
932    }
933
934    #[allow(clippy::result_large_err)]
935    fn finalize_definitions(&mut self) -> Result<(), ModuleError> {
936        match self {
937            Self::Jit(module) => module.finalize_definitions(),
938            Self::Aot(_) => Ok(()),
939        }
940    }
941
942    fn get_finalized_function(&self, id: FuncId) -> Result<*const u8> {
943        match self {
944            Self::Jit(module) => Ok(module.get_finalized_function(id)),
945            Self::Aot(_) => Err(eyre!("cannot get finalized JIT function in AOT mode")),
946        }
947    }
948}
949
950fn convert_intcc(cond: revmc_backend::IntCC) -> IntCC {
951    match cond {
952        revmc_backend::IntCC::Equal => IntCC::Equal,
953        revmc_backend::IntCC::NotEqual => IntCC::NotEqual,
954        revmc_backend::IntCC::SignedLessThan => IntCC::SignedLessThan,
955        revmc_backend::IntCC::SignedGreaterThanOrEqual => IntCC::SignedGreaterThanOrEqual,
956        revmc_backend::IntCC::SignedGreaterThan => IntCC::SignedGreaterThan,
957        revmc_backend::IntCC::SignedLessThanOrEqual => IntCC::SignedLessThanOrEqual,
958        revmc_backend::IntCC::UnsignedLessThan => IntCC::UnsignedLessThan,
959        revmc_backend::IntCC::UnsignedGreaterThanOrEqual => IntCC::UnsignedGreaterThanOrEqual,
960        revmc_backend::IntCC::UnsignedGreaterThan => IntCC::UnsignedGreaterThan,
961        revmc_backend::IntCC::UnsignedLessThanOrEqual => IntCC::UnsignedLessThanOrEqual,
962    }
963}
964
965fn convert_linkage(linkage: revmc_backend::Linkage) -> Linkage {
966    match linkage {
967        revmc_backend::Linkage::Import => Linkage::Import,
968        revmc_backend::Linkage::Public => Linkage::Export,
969        revmc_backend::Linkage::Private => Linkage::Local,
970    }
971}
972
973fn opt_level_flag(opt_level: OptimizationLevel) -> &'static str {
974    match opt_level {
975        OptimizationLevel::None => "none",
976        OptimizationLevel::Less | OptimizationLevel::Default | OptimizationLevel::Aggressive => {
977            "speed"
978        }
979    }
980}