3088 lines
123 KiB
Rust
3088 lines
123 KiB
Rust
use crate::call_frame::CallFrame;
|
|
use crate::verifier::Verifier;
|
|
use crate::vm_init_error::VmInitError;
|
|
use crate::{HostContext, NativeInterface};
|
|
use prometeu_bytecode::isa::core::CoreOpCode as OpCode;
|
|
use prometeu_bytecode::ProgramImage;
|
|
use prometeu_bytecode::Value;
|
|
use crate::roots::{RootVisitor, visit_value_for_roots};
|
|
use crate::heap::Heap;
|
|
use crate::object::ObjectKind;
|
|
use prometeu_bytecode::{
|
|
TRAP_BAD_RET_SLOTS, TRAP_DIV_ZERO, TRAP_INVALID_FUNC, TRAP_INVALID_SYSCALL, TRAP_OOB,
|
|
TRAP_STACK_UNDERFLOW, TRAP_TYPE, TrapInfo,
|
|
};
|
|
use prometeu_hal::vm_fault::VmFault;
|
|
|
|
/// Reason why the Virtual Machine stopped execution during a specific run.
|
|
/// This allows the system to decide if it should continue execution in the next tick
|
|
/// or if the frame is finalized.
|
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
pub enum LogicalFrameEndingReason {
|
|
/// Execution reached a `FRAME_SYNC` instruction, marking the end of the logical frame.
|
|
FrameSync,
|
|
/// The cycle budget for the current host tick was exhausted before reaching `FRAME_SYNC`.
|
|
BudgetExhausted,
|
|
/// A `HALT` instruction was executed, terminating the program.
|
|
Halted,
|
|
/// The Program Counter (PC) reached the end of the available bytecode.
|
|
EndOfRom,
|
|
/// Execution hit a registered breakpoint.
|
|
Breakpoint,
|
|
/// A runtime trap occurred (e.g., out-of-bounds access).
|
|
Trap(TrapInfo),
|
|
/// A fatal error occurred that cannot be recovered (e.g., stack underflow).
|
|
Panic(String),
|
|
}
|
|
|
|
pub enum OpError {
|
|
Trap(u32, String),
|
|
Panic(String),
|
|
}
|
|
|
|
impl From<TrapInfo> for LogicalFrameEndingReason {
|
|
fn from(info: TrapInfo) -> Self {
|
|
LogicalFrameEndingReason::Trap(info)
|
|
}
|
|
}
|
|
|
|
/// A report detailing the results of an execution slice (run_budget).
|
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
pub struct BudgetReport {
|
|
/// Total virtual cycles consumed during this run.
|
|
pub cycles_used: u64,
|
|
/// Number of VM instructions executed.
|
|
pub steps_executed: u32,
|
|
/// The reason why this execution slice ended.
|
|
pub reason: LogicalFrameEndingReason,
|
|
}
|
|
|
|
pub struct VirtualMachine {
|
|
/// Program Counter (PC): The absolute byte offset in ROM for the next instruction.
|
|
pub pc: usize,
|
|
/// Operand Stack: The primary workspace for all mathematical and logical operations.
|
|
pub operand_stack: Vec<Value>,
|
|
/// Call Stack: Manages function call context (return addresses, frame limits).
|
|
pub call_stack: Vec<CallFrame>,
|
|
/// Global Variable Store: Variables that persist for the lifetime of the program.
|
|
pub globals: Vec<Value>,
|
|
/// The loaded executable (Bytecode + Constant Pool), that is the ROM translated.
|
|
pub program: ProgramImage,
|
|
/// Heap Memory: Dynamic allocation pool.
|
|
pub heap: Heap,
|
|
/// Total virtual cycles consumed since the VM started.
|
|
pub cycles: u64,
|
|
/// Stop flag: true if a `HALT` opcode was encountered.
|
|
pub halted: bool,
|
|
/// Set of ROM addresses used for software breakpoints in the debugger.
|
|
pub breakpoints: std::collections::HashSet<usize>,
|
|
/// GC: number of newly allocated live objects threshold to trigger a collection at safepoint.
|
|
/// The GC only runs at safepoints (e.g., FRAME_SYNC). 0 disables automatic GC.
|
|
pub gc_alloc_threshold: usize,
|
|
/// GC: snapshot of live objects count after the last collection (or VM init).
|
|
last_gc_live_count: usize,
|
|
/// Capability flags granted to the currently running program/cart.
|
|
/// Syscalls are capability-gated using `prometeu_hal::syscalls::SyscallMeta::caps`.
|
|
pub capabilities: prometeu_hal::syscalls::CapFlags,
|
|
}
|
|
|
|
|
|
impl Default for VirtualMachine {
|
|
fn default() -> Self {
|
|
Self::new(vec![], vec![])
|
|
}
|
|
}
|
|
|
|
impl VirtualMachine {
|
|
/// Creates a new VM instance with the provided bytecode and constants.
|
|
pub fn new(rom: Vec<u8>, constant_pool: Vec<Value>) -> Self {
|
|
Self {
|
|
pc: 0,
|
|
operand_stack: Vec::new(),
|
|
call_stack: Vec::new(),
|
|
globals: Vec::new(),
|
|
program: ProgramImage::new(
|
|
rom,
|
|
constant_pool,
|
|
vec![],
|
|
None,
|
|
std::collections::HashMap::new(),
|
|
),
|
|
heap: Heap::new(),
|
|
cycles: 0,
|
|
halted: false,
|
|
breakpoints: std::collections::HashSet::new(),
|
|
gc_alloc_threshold: 1024, // conservative default; tests may override
|
|
last_gc_live_count: 0,
|
|
capabilities: 0,
|
|
}
|
|
}
|
|
|
|
/// Resets the VM state and loads a new program.
|
|
/// This is typically called by the Firmware when starting a new App/Cartridge.
|
|
pub fn initialize(
|
|
&mut self,
|
|
program_bytes: Vec<u8>,
|
|
entrypoint: &str,
|
|
) -> Result<(), VmInitError> {
|
|
// Fail fast: reset state upfront. If we return early with an error,
|
|
// the VM is left in a "halted and empty" state.
|
|
self.program = ProgramImage::default();
|
|
self.pc = 0;
|
|
self.operand_stack.clear();
|
|
self.call_stack.clear();
|
|
self.globals.clear();
|
|
self.heap = Heap::new();
|
|
self.cycles = 0;
|
|
self.halted = true; // execution is impossible until a successful load
|
|
self.last_gc_live_count = 0;
|
|
// Preserve capabilities across loads; firmware may set them per cart.
|
|
|
|
// Only recognized format is loadable: PBS v0 industrial format
|
|
let program = if program_bytes.starts_with(b"PBS\0") {
|
|
match prometeu_bytecode::BytecodeLoader::load(&program_bytes) {
|
|
Ok(module) => {
|
|
// Run verifier on the module
|
|
let max_stacks = Verifier::verify(&module.code, &module.functions)
|
|
.map_err(VmInitError::VerificationFailed)?;
|
|
|
|
let mut program = ProgramImage::from(module);
|
|
|
|
let mut functions = program.functions.as_ref().to_vec();
|
|
for (func, max_stack) in functions.iter_mut().zip(max_stacks) {
|
|
func.max_stack_slots = max_stack;
|
|
}
|
|
program.functions = std::sync::Arc::from(functions);
|
|
|
|
program
|
|
}
|
|
Err(prometeu_bytecode::LoadError::InvalidVersion) => {
|
|
return Err(VmInitError::UnsupportedFormat);
|
|
}
|
|
Err(e) => {
|
|
return Err(VmInitError::ImageLoadFailed(e));
|
|
}
|
|
}
|
|
} else {
|
|
return Err(VmInitError::InvalidFormat);
|
|
};
|
|
|
|
// Resolve the entrypoint: empty (defaults to func 0), numeric func_idx, or symbol name.
|
|
let pc = if entrypoint.is_empty() {
|
|
program.functions.get(0).map(|f| f.code_offset as usize).unwrap_or(0)
|
|
} else if let Ok(func_idx) = entrypoint.parse::<usize>() {
|
|
program
|
|
.functions
|
|
.get(func_idx)
|
|
.map(|f| f.code_offset as usize)
|
|
.ok_or(VmInitError::EntrypointNotFound)?
|
|
} else {
|
|
// Try to resolve as a symbol name from the exports map
|
|
if let Some(&func_idx) = program.exports.get(entrypoint) {
|
|
program
|
|
.functions
|
|
.get(func_idx as usize)
|
|
.map(|f| f.code_offset as usize)
|
|
.ok_or(VmInitError::EntrypointNotFound)?
|
|
} else {
|
|
return Err(VmInitError::EntrypointNotFound);
|
|
}
|
|
};
|
|
|
|
// Finalize initialization by applying the new program and PC.
|
|
self.program = program;
|
|
self.pc = pc;
|
|
self.halted = false; // Successfully loaded, execution is now possible
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Sets the capability flags for the current program.
|
|
pub fn set_capabilities(&mut self, caps: prometeu_hal::syscalls::CapFlags) {
|
|
self.capabilities = caps;
|
|
}
|
|
|
|
/// Prepares the VM to execute a specific entrypoint by setting the PC and
|
|
/// pushing an initial call frame.
|
|
pub fn prepare_call(&mut self, entrypoint: &str) {
|
|
let func_idx = if let Ok(idx) = entrypoint.parse::<usize>() {
|
|
idx
|
|
} else {
|
|
// Try to resolve as a symbol name
|
|
self.program.exports.get(entrypoint).map(|&idx| idx as usize).ok_or(()).unwrap_or(0) // Default to 0 if not found
|
|
};
|
|
|
|
let callee = self.program.functions.get(func_idx).cloned().unwrap_or_default();
|
|
let addr = callee.code_offset as usize;
|
|
|
|
self.pc = addr;
|
|
self.halted = false;
|
|
|
|
// Pushing a sentinel frame so RET works at the top level.
|
|
// The return address is set to the end of ROM, which will naturally
|
|
// cause the VM to stop after returning from the entrypoint.
|
|
self.operand_stack.clear();
|
|
self.call_stack.clear();
|
|
|
|
// Entrypoint also needs locals allocated.
|
|
// For the sentinel frame, stack_base is always 0.
|
|
if let Some(func) = self.program.functions.get(func_idx) {
|
|
let total_slots = func.param_slots as u32 + func.local_slots as u32;
|
|
for _ in 0..total_slots {
|
|
self.operand_stack.push(Value::Null);
|
|
}
|
|
}
|
|
|
|
self.call_stack.push(CallFrame {
|
|
return_pc: self.program.rom.len() as u32,
|
|
stack_base: 0,
|
|
func_idx,
|
|
});
|
|
}
|
|
|
|
/// Executes the VM for a limited number of cycles (budget).
|
|
///
|
|
/// This is the heart of the deterministic execution model. Instead of running
|
|
/// indefinitely, the VM runs until it consumes its allocated budget or reaches
|
|
/// a synchronization point (`FRAME_SYNC`).
|
|
///
|
|
/// # Arguments
|
|
/// * `budget` - Maximum number of cycles allowed for this execution slice.
|
|
/// * `native` - Interface for handling syscalls (Firmware/OS).
|
|
/// * `hw` - Access to virtual hardware peripherals.
|
|
pub fn run_budget(
|
|
&mut self,
|
|
budget: u64,
|
|
native: &mut dyn NativeInterface,
|
|
ctx: &mut HostContext,
|
|
) -> Result<BudgetReport, String> {
|
|
let start_cycles = self.cycles;
|
|
let mut steps_executed = 0;
|
|
let mut ending_reason: Option<LogicalFrameEndingReason> = None;
|
|
|
|
while (self.cycles - start_cycles) < budget && !self.halted && self.pc < self.program.rom.len() {
|
|
// Debugger support: stop before executing an instruction if there's a breakpoint.
|
|
// Note: we skip the check for the very first step of a slice to avoid
|
|
// getting stuck on the same breakpoint repeatedly.
|
|
if steps_executed > 0 && self.breakpoints.contains(&self.pc) {
|
|
ending_reason = Some(LogicalFrameEndingReason::Breakpoint);
|
|
break;
|
|
}
|
|
|
|
let pc_before = self.pc;
|
|
let cycles_before = self.cycles;
|
|
|
|
// Execute a single step (Fetch-Decode-Execute)
|
|
if let Err(reason) = self.step(native, ctx) {
|
|
ending_reason = Some(reason);
|
|
break;
|
|
}
|
|
steps_executed += 1;
|
|
|
|
// Integrity check: ensure real progress is being made to avoid infinite loops
|
|
// caused by zero-cycle instructions or stuck PC.
|
|
if self.pc == pc_before && self.cycles == cycles_before && !self.halted {
|
|
ending_reason = Some(LogicalFrameEndingReason::Panic(format!(
|
|
"VM stuck at PC 0x{:08X}",
|
|
self.pc
|
|
)));
|
|
break;
|
|
}
|
|
}
|
|
|
|
// Determine why we stopped if no explicit reason (FrameSync/Breakpoint) was set.
|
|
if ending_reason.is_none() {
|
|
if self.halted {
|
|
ending_reason = Some(LogicalFrameEndingReason::Halted);
|
|
} else if self.pc >= self.program.rom.len() {
|
|
ending_reason = Some(LogicalFrameEndingReason::EndOfRom);
|
|
} else {
|
|
ending_reason = Some(LogicalFrameEndingReason::BudgetExhausted);
|
|
}
|
|
}
|
|
|
|
Ok(BudgetReport {
|
|
cycles_used: self.cycles - start_cycles,
|
|
steps_executed,
|
|
reason: ending_reason.unwrap(),
|
|
})
|
|
}
|
|
|
|
|
|
/// Executes a single instruction at the current Program Counter (PC).
|
|
///
|
|
/// This follows the classic CPU cycle:
|
|
/// 1. Fetch: Read the opcode from memory.
|
|
/// 2. Decode: Identify what operation to perform.
|
|
/// 3. Execute: Perform the operation, updating stacks, memory, or calling peripherals.
|
|
pub fn step(
|
|
&mut self,
|
|
native: &mut dyn NativeInterface,
|
|
ctx: &mut HostContext,
|
|
) -> Result<(), LogicalFrameEndingReason> {
|
|
if self.halted || self.pc >= self.program.rom.len() {
|
|
return Ok(());
|
|
}
|
|
|
|
let start_pc = self.pc;
|
|
|
|
// Fetch & Decode
|
|
let instr = prometeu_bytecode::decode_next(self.pc, &self.program.rom)
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
|
|
|
|
let opcode = instr.opcode;
|
|
self.pc = instr.next_pc;
|
|
|
|
// Execute
|
|
match opcode {
|
|
OpCode::Nop => {}
|
|
OpCode::Halt => {
|
|
self.halted = true;
|
|
}
|
|
OpCode::Jmp => {
|
|
let target = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
|
|
as usize;
|
|
let func_start = self
|
|
.call_stack
|
|
.last()
|
|
.map(|f| self.program.functions[f.func_idx].code_offset as usize)
|
|
.unwrap_or(0);
|
|
self.pc = func_start + target;
|
|
}
|
|
OpCode::JmpIfFalse => {
|
|
let target = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
|
|
as usize;
|
|
let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
match val {
|
|
Value::Boolean(false) => {
|
|
let func_start = self
|
|
.call_stack
|
|
.last()
|
|
.map(|f| self.program.functions[f.func_idx].code_offset as usize)
|
|
.unwrap_or(0);
|
|
self.pc = func_start + target;
|
|
}
|
|
Value::Boolean(true) => {}
|
|
_ => {
|
|
return Err(self.trap(
|
|
TRAP_TYPE,
|
|
opcode as u16,
|
|
format!("Expected boolean for JMP_IF_FALSE, got {:?}", val),
|
|
start_pc as u32,
|
|
));
|
|
}
|
|
}
|
|
}
|
|
OpCode::JmpIfTrue => {
|
|
let target = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
|
|
as usize;
|
|
let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
match val {
|
|
Value::Boolean(true) => {
|
|
let func_start = self
|
|
.call_stack
|
|
.last()
|
|
.map(|f| self.program.functions[f.func_idx].code_offset as usize)
|
|
.unwrap_or(0);
|
|
self.pc = func_start + target;
|
|
}
|
|
Value::Boolean(false) => {}
|
|
_ => {
|
|
return Err(self.trap(
|
|
TRAP_TYPE,
|
|
opcode as u16,
|
|
format!("Expected boolean for JMP_IF_TRUE, got {:?}", val),
|
|
start_pc as u32,
|
|
));
|
|
}
|
|
}
|
|
}
|
|
OpCode::Trap => {
|
|
// Manual breakpoint instruction: consume cycles and signal a breakpoint
|
|
self.cycles += OpCode::Trap.cycles();
|
|
return Err(LogicalFrameEndingReason::Breakpoint);
|
|
}
|
|
OpCode::MakeClosure => {
|
|
// Immediate carries (fn_id, capture_count)
|
|
let (fn_id, cap_count) = instr
|
|
.imm_u32x2()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
|
|
|
|
// Pop cap_count values from the operand stack, top-first.
|
|
let mut temp: Vec<Value> = Vec::with_capacity(cap_count as usize);
|
|
for _ in 0..cap_count {
|
|
let v = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
temp.push(v);
|
|
}
|
|
// Preserve order so that env[0] corresponds to captured_1 (the bottom-most
|
|
// among the popped values): reverse the temp vector.
|
|
temp.reverse();
|
|
|
|
// Allocate closure on heap and push its reference.
|
|
let href = self.heap.alloc_closure(fn_id, &temp);
|
|
self.push(Value::HeapRef(href));
|
|
}
|
|
OpCode::CallClosure => {
|
|
// Operand carries the number of user-supplied arguments (arg1..argN).
|
|
let user_arg_count = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
|
|
as usize;
|
|
|
|
// Pop the closure reference from the stack (top of stack).
|
|
let clos_val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
let href = match clos_val {
|
|
Value::HeapRef(h) => h,
|
|
other => {
|
|
return Err(self.trap(
|
|
TRAP_TYPE,
|
|
opcode as u16,
|
|
format!(
|
|
"CALL_CLOSURE expects a closure handle at TOS, got {:?}",
|
|
other
|
|
),
|
|
start_pc as u32,
|
|
))
|
|
}
|
|
};
|
|
|
|
// Validate that the heap object is indeed a Closure.
|
|
let header = self.heap.header(href).ok_or_else(|| {
|
|
self.trap(
|
|
TRAP_OOB,
|
|
opcode as u16,
|
|
format!("Invalid heap handle in CALL_CLOSURE: {:?}", href),
|
|
start_pc as u32,
|
|
)
|
|
})?;
|
|
if header.kind != ObjectKind::Closure {
|
|
return Err(self.trap(
|
|
TRAP_TYPE,
|
|
opcode as u16,
|
|
format!(
|
|
"CALL_CLOSURE on non-closure object kind {:?}",
|
|
header.kind
|
|
),
|
|
start_pc as u32,
|
|
));
|
|
}
|
|
|
|
// Pop user arguments from the operand stack (top-first), then fix order.
|
|
let mut user_args: Vec<Value> = Vec::with_capacity(user_arg_count);
|
|
for _ in 0..user_arg_count {
|
|
user_args.push(self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?);
|
|
}
|
|
user_args.reverse(); // Now in logical order: arg1..argN
|
|
|
|
// Resolve target function id from the closure payload.
|
|
let fn_id = self.heap.closure_fn_id(href).ok_or_else(|| {
|
|
LogicalFrameEndingReason::Panic(
|
|
"Internal error: malformed closure object (missing fn_id)".into(),
|
|
)
|
|
})? as usize;
|
|
|
|
let callee = self.program.functions.get(fn_id).ok_or_else(|| {
|
|
self.trap(
|
|
TRAP_INVALID_FUNC,
|
|
opcode as u16,
|
|
format!("Invalid func_id {} from closure", fn_id),
|
|
start_pc as u32,
|
|
)
|
|
})?;
|
|
// Copy required fields to drop the immutable borrow before mutating self
|
|
let callee_param_slots = callee.param_slots as usize;
|
|
let callee_local_slots = callee.local_slots as usize;
|
|
let callee_code_offset = callee.code_offset as usize;
|
|
|
|
// Validate arity: param_slots must equal hidden arg0 + user_arg_count.
|
|
let expected_params = 1usize + user_arg_count;
|
|
if callee_param_slots != expected_params {
|
|
return Err(self.trap(
|
|
TRAP_TYPE,
|
|
opcode as u16,
|
|
format!(
|
|
"CALL_CLOSURE arg_count mismatch: function expects {} total params (including hidden arg0), got hidden+{}",
|
|
callee_param_slots, expected_params
|
|
),
|
|
start_pc as u32,
|
|
));
|
|
}
|
|
|
|
// Prepare the operand stack to match the direct CALL convention:
|
|
// push hidden arg0 (closure_ref) followed by arg1..argN.
|
|
self.push(Value::HeapRef(href));
|
|
for v in user_args.into_iter() { self.push(v); }
|
|
|
|
let stack_base = self
|
|
.operand_stack
|
|
.len()
|
|
.checked_sub(callee_param_slots)
|
|
.ok_or_else(|| LogicalFrameEndingReason::Panic("Stack underflow".into()))?;
|
|
|
|
// Allocate and zero-init local slots
|
|
for _ in 0..callee_local_slots { self.operand_stack.push(Value::Null); }
|
|
|
|
self.call_stack.push(CallFrame { return_pc: self.pc as u32, stack_base, func_idx: fn_id });
|
|
self.pc = callee_code_offset;
|
|
}
|
|
OpCode::PushConst => {
|
|
let idx = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
|
|
as usize;
|
|
let val = self.program.constant_pool.get(idx).cloned().ok_or_else(|| {
|
|
LogicalFrameEndingReason::Panic("Invalid constant index".into())
|
|
})?;
|
|
self.push(val);
|
|
}
|
|
OpCode::PushI64 => {
|
|
let val = instr
|
|
.imm_i64()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
|
|
self.push(Value::Int64(val));
|
|
}
|
|
OpCode::PushI32 => {
|
|
let val = instr
|
|
.imm_i32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
|
|
self.push(Value::Int32(val));
|
|
}
|
|
OpCode::PushBounded => {
|
|
let val = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
|
|
if val > 0xFFFF {
|
|
return Err(self.trap(
|
|
TRAP_OOB,
|
|
opcode as u16,
|
|
format!("Bounded value overflow: {} > 0xFFFF", val),
|
|
start_pc as u32,
|
|
));
|
|
}
|
|
self.push(Value::Bounded(val));
|
|
}
|
|
OpCode::PushF64 => {
|
|
let val = instr
|
|
.imm_f64()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
|
|
self.push(Value::Float(val));
|
|
}
|
|
OpCode::PushBool => {
|
|
let val = instr
|
|
.imm_u8()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
|
|
self.push(Value::Boolean(val != 0));
|
|
}
|
|
OpCode::Pop => {
|
|
self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
}
|
|
OpCode::PopN => {
|
|
let n = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
|
|
for _ in 0..n {
|
|
self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
}
|
|
}
|
|
OpCode::Dup => {
|
|
let val = self.peek().map_err(|e| LogicalFrameEndingReason::Panic(e))?.clone();
|
|
self.push(val);
|
|
}
|
|
OpCode::Swap => {
|
|
let a = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
let b = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
self.push(a);
|
|
self.push(b);
|
|
}
|
|
OpCode::Add => self.binary_op(opcode, start_pc as u32, |a, b| match (&a, &b) {
|
|
(Value::String(_), _) | (_, Value::String(_)) => {
|
|
Ok(Value::String(format!("{}{}", a.to_string(), b.to_string())))
|
|
}
|
|
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_add(*b))),
|
|
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_add(*b))),
|
|
(Value::Int32(a), Value::Int64(b)) => {
|
|
Ok(Value::Int64((*a as i64).wrapping_add(*b)))
|
|
}
|
|
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_add(*b as i64))),
|
|
(Value::Float(a), Value::Float(b)) => Ok(Value::Float(a + b)),
|
|
(Value::Int32(a), Value::Float(b)) => Ok(Value::Float(*a as f64 + b)),
|
|
(Value::Float(a), Value::Int32(b)) => Ok(Value::Float(a + *b as f64)),
|
|
(Value::Int64(a), Value::Float(b)) => Ok(Value::Float(*a as f64 + b)),
|
|
(Value::Float(a), Value::Int64(b)) => Ok(Value::Float(a + *b as f64)),
|
|
(Value::Bounded(a), Value::Bounded(b)) => {
|
|
let res = a.saturating_add(*b);
|
|
if res > 0xFFFF {
|
|
Err(OpError::Trap(
|
|
TRAP_OOB,
|
|
format!("Bounded addition overflow: {} + {} = {}", a, b, res),
|
|
))
|
|
} else {
|
|
Ok(Value::Bounded(res))
|
|
}
|
|
}
|
|
_ => Err(OpError::Panic("Invalid types for ADD".into())),
|
|
})?,
|
|
OpCode::Sub => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
|
|
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_sub(b))),
|
|
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_sub(b))),
|
|
(Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64).wrapping_sub(b))),
|
|
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_sub(b as i64))),
|
|
(Value::Float(a), Value::Float(b)) => Ok(Value::Float(a - b)),
|
|
(Value::Int32(a), Value::Float(b)) => Ok(Value::Float(a as f64 - b)),
|
|
(Value::Float(a), Value::Int32(b)) => Ok(Value::Float(a - b as f64)),
|
|
(Value::Int64(a), Value::Float(b)) => Ok(Value::Float(a as f64 - b)),
|
|
(Value::Float(a), Value::Int64(b)) => Ok(Value::Float(a - b as f64)),
|
|
(Value::Bounded(a), Value::Bounded(b)) => {
|
|
if a < b {
|
|
Err(OpError::Trap(
|
|
TRAP_OOB,
|
|
format!("Bounded subtraction underflow: {} - {} < 0", a, b),
|
|
))
|
|
} else {
|
|
Ok(Value::Bounded(a - b))
|
|
}
|
|
}
|
|
_ => Err(OpError::Panic("Invalid types for SUB".into())),
|
|
})?,
|
|
OpCode::Mul => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
|
|
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_mul(b))),
|
|
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_mul(b))),
|
|
(Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64).wrapping_mul(b))),
|
|
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_mul(b as i64))),
|
|
(Value::Float(a), Value::Float(b)) => Ok(Value::Float(a * b)),
|
|
(Value::Int32(a), Value::Float(b)) => Ok(Value::Float(a as f64 * b)),
|
|
(Value::Float(a), Value::Int32(b)) => Ok(Value::Float(a * b as f64)),
|
|
(Value::Int64(a), Value::Float(b)) => Ok(Value::Float(a as f64 * b)),
|
|
(Value::Float(a), Value::Int64(b)) => Ok(Value::Float(a * b as f64)),
|
|
(Value::Bounded(a), Value::Bounded(b)) => {
|
|
let res = a as u64 * b as u64;
|
|
if res > 0xFFFF {
|
|
Err(OpError::Trap(
|
|
TRAP_OOB,
|
|
format!("Bounded multiplication overflow: {} * {} = {}", a, b, res),
|
|
))
|
|
} else {
|
|
Ok(Value::Bounded(res as u32))
|
|
}
|
|
}
|
|
_ => Err(OpError::Panic("Invalid types for MUL".into())),
|
|
})?,
|
|
OpCode::Div => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
|
|
(Value::Int32(a), Value::Int32(b)) => {
|
|
if b == 0 {
|
|
return Err(OpError::Trap(
|
|
TRAP_DIV_ZERO,
|
|
"Integer division by zero".into(),
|
|
));
|
|
}
|
|
Ok(Value::Int32(a / b))
|
|
}
|
|
(Value::Int64(a), Value::Int64(b)) => {
|
|
if b == 0 {
|
|
return Err(OpError::Trap(
|
|
TRAP_DIV_ZERO,
|
|
"Integer division by zero".into(),
|
|
));
|
|
}
|
|
Ok(Value::Int64(a / b))
|
|
}
|
|
(Value::Int32(a), Value::Int64(b)) => {
|
|
if b == 0 {
|
|
return Err(OpError::Trap(
|
|
TRAP_DIV_ZERO,
|
|
"Integer division by zero".into(),
|
|
));
|
|
}
|
|
Ok(Value::Int64(a as i64 / b))
|
|
}
|
|
(Value::Int64(a), Value::Int32(b)) => {
|
|
if b == 0 {
|
|
return Err(OpError::Trap(
|
|
TRAP_DIV_ZERO,
|
|
"Integer division by zero".into(),
|
|
));
|
|
}
|
|
Ok(Value::Int64(a / b as i64))
|
|
}
|
|
(Value::Float(a), Value::Float(b)) => {
|
|
if b == 0.0 {
|
|
return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into()));
|
|
}
|
|
Ok(Value::Float(a / b))
|
|
}
|
|
(Value::Int32(a), Value::Float(b)) => {
|
|
if b == 0.0 {
|
|
return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into()));
|
|
}
|
|
Ok(Value::Float(a as f64 / b))
|
|
}
|
|
(Value::Float(a), Value::Int32(b)) => {
|
|
if b == 0 {
|
|
return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into()));
|
|
}
|
|
Ok(Value::Float(a / b as f64))
|
|
}
|
|
(Value::Int64(a), Value::Float(b)) => {
|
|
if b == 0.0 {
|
|
return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into()));
|
|
}
|
|
Ok(Value::Float(a as f64 / b))
|
|
}
|
|
(Value::Float(a), Value::Int64(b)) => {
|
|
if b == 0 {
|
|
return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into()));
|
|
}
|
|
Ok(Value::Float(a / b as f64))
|
|
}
|
|
(Value::Bounded(a), Value::Bounded(b)) => {
|
|
if b == 0 {
|
|
return Err(OpError::Trap(
|
|
TRAP_DIV_ZERO,
|
|
"Bounded division by zero".into(),
|
|
));
|
|
}
|
|
Ok(Value::Bounded(a / b))
|
|
}
|
|
_ => Err(OpError::Panic("Invalid types for DIV".into())),
|
|
})?,
|
|
OpCode::Mod => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
|
|
(Value::Int32(a), Value::Int32(b)) => {
|
|
if b == 0 {
|
|
return Err(OpError::Trap(TRAP_DIV_ZERO, "Integer modulo by zero".into()));
|
|
}
|
|
Ok(Value::Int32(a % b))
|
|
}
|
|
(Value::Int64(a), Value::Int64(b)) => {
|
|
if b == 0 {
|
|
return Err(OpError::Trap(TRAP_DIV_ZERO, "Integer modulo by zero".into()));
|
|
}
|
|
Ok(Value::Int64(a % b))
|
|
}
|
|
(Value::Bounded(a), Value::Bounded(b)) => {
|
|
if b == 0 {
|
|
return Err(OpError::Trap(TRAP_DIV_ZERO, "Bounded modulo by zero".into()));
|
|
}
|
|
Ok(Value::Bounded(a % b))
|
|
}
|
|
_ => Err(OpError::Panic("Invalid types for MOD".into())),
|
|
})?,
|
|
OpCode::BoundToInt => {
|
|
let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
if let Value::Bounded(b) = val {
|
|
self.push(Value::Int64(b as i64));
|
|
} else {
|
|
return Err(LogicalFrameEndingReason::Panic(
|
|
"Expected bounded for BOUND_TO_INT".into(),
|
|
));
|
|
}
|
|
}
|
|
OpCode::IntToBoundChecked => {
|
|
let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
let int_val = val.as_integer().ok_or_else(|| {
|
|
LogicalFrameEndingReason::Panic(
|
|
"Expected integer for INT_TO_BOUND_CHECKED".into(),
|
|
)
|
|
})?;
|
|
if int_val < 0 || int_val > 0xFFFF {
|
|
return Err(self.trap(
|
|
TRAP_OOB,
|
|
OpCode::IntToBoundChecked as u16,
|
|
format!("Integer to bounded conversion out of range: {}", int_val),
|
|
start_pc as u32,
|
|
));
|
|
}
|
|
self.push(Value::Bounded(int_val as u32));
|
|
}
|
|
OpCode::Eq => {
|
|
self.binary_op(opcode, start_pc as u32, |a, b| Ok(Value::Boolean(a == b)))?
|
|
}
|
|
OpCode::Neq => {
|
|
self.binary_op(opcode, start_pc as u32, |a, b| Ok(Value::Boolean(a != b)))?
|
|
}
|
|
OpCode::Lt => self.binary_op(opcode, start_pc as u32, |a, b| {
|
|
a.partial_cmp(&b)
|
|
.map(|o| Value::Boolean(o == std::cmp::Ordering::Less))
|
|
.ok_or_else(|| OpError::Panic("Invalid types for LT".into()))
|
|
})?,
|
|
OpCode::Gt => self.binary_op(opcode, start_pc as u32, |a, b| {
|
|
a.partial_cmp(&b)
|
|
.map(|o| Value::Boolean(o == std::cmp::Ordering::Greater))
|
|
.ok_or_else(|| OpError::Panic("Invalid types for GT".into()))
|
|
})?,
|
|
OpCode::Lte => self.binary_op(opcode, start_pc as u32, |a, b| {
|
|
a.partial_cmp(&b)
|
|
.map(|o| Value::Boolean(o != std::cmp::Ordering::Greater))
|
|
.ok_or_else(|| OpError::Panic("Invalid types for LTE".into()))
|
|
})?,
|
|
OpCode::Gte => self.binary_op(opcode, start_pc as u32, |a, b| {
|
|
a.partial_cmp(&b)
|
|
.map(|o| Value::Boolean(o != std::cmp::Ordering::Less))
|
|
.ok_or_else(|| OpError::Panic("Invalid types for GTE".into()))
|
|
})?,
|
|
OpCode::And => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
|
|
(Value::Boolean(a), Value::Boolean(b)) => Ok(Value::Boolean(a && b)),
|
|
_ => Err(OpError::Panic("Invalid types for AND".into())),
|
|
})?,
|
|
OpCode::Or => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
|
|
(Value::Boolean(a), Value::Boolean(b)) => Ok(Value::Boolean(a || b)),
|
|
_ => Err(OpError::Panic("Invalid types for OR".into())),
|
|
})?,
|
|
OpCode::Not => {
|
|
let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
if let Value::Boolean(b) = val {
|
|
self.push(Value::Boolean(!b));
|
|
} else {
|
|
return Err(LogicalFrameEndingReason::Panic("Invalid type for NOT".into()));
|
|
}
|
|
}
|
|
OpCode::BitAnd => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
|
|
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a & b)),
|
|
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a & b)),
|
|
(Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64) & b)),
|
|
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a & (b as i64))),
|
|
_ => Err(OpError::Panic("Invalid types for BitAnd".into())),
|
|
})?,
|
|
OpCode::BitOr => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
|
|
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a | b)),
|
|
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a | b)),
|
|
(Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64) | b)),
|
|
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a | (b as i64))),
|
|
_ => Err(OpError::Panic("Invalid types for BitOr".into())),
|
|
})?,
|
|
OpCode::BitXor => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
|
|
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a ^ b)),
|
|
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a ^ b)),
|
|
(Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64) ^ b)),
|
|
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a ^ (b as i64))),
|
|
_ => Err(OpError::Panic("Invalid types for BitXor".into())),
|
|
})?,
|
|
OpCode::Shl => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
|
|
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_shl(b as u32))),
|
|
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_shl(b as u32))),
|
|
(Value::Int32(a), Value::Int64(b)) => {
|
|
Ok(Value::Int64((a as i64).wrapping_shl(b as u32)))
|
|
}
|
|
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_shl(b as u32))),
|
|
_ => Err(OpError::Panic("Invalid types for Shl".into())),
|
|
})?,
|
|
OpCode::Shr => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
|
|
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_shr(b as u32))),
|
|
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_shr(b as u32))),
|
|
(Value::Int32(a), Value::Int64(b)) => {
|
|
Ok(Value::Int64((a as i64).wrapping_shr(b as u32)))
|
|
}
|
|
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_shr(b as u32))),
|
|
_ => Err(OpError::Panic("Invalid types for Shr".into())),
|
|
})?,
|
|
OpCode::Neg => {
|
|
let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
match val {
|
|
Value::Int32(a) => self.push(Value::Int32(a.wrapping_neg())),
|
|
Value::Int64(a) => self.push(Value::Int64(a.wrapping_neg())),
|
|
Value::Float(a) => self.push(Value::Float(-a)),
|
|
_ => {
|
|
return Err(LogicalFrameEndingReason::Panic("Invalid type for Neg".into()));
|
|
}
|
|
}
|
|
}
|
|
OpCode::GetGlobal => {
|
|
let idx = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
|
|
as usize;
|
|
let val = self.globals.get(idx).cloned().ok_or_else(|| {
|
|
LogicalFrameEndingReason::Panic("Invalid global index".into())
|
|
})?;
|
|
self.push(val);
|
|
}
|
|
OpCode::SetGlobal => {
|
|
let idx = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
|
|
as usize;
|
|
let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
if idx >= self.globals.len() {
|
|
self.globals.resize(idx + 1, Value::Null);
|
|
}
|
|
self.globals[idx] = val;
|
|
}
|
|
OpCode::GetLocal => {
|
|
let slot = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
|
|
let frame = self.call_stack.last().ok_or_else(|| {
|
|
LogicalFrameEndingReason::Panic("No active call frame".into())
|
|
})?;
|
|
let func = &self.program.functions[frame.func_idx];
|
|
|
|
crate::local_addressing::check_local_slot(
|
|
func,
|
|
slot,
|
|
opcode as u16,
|
|
start_pc as u32,
|
|
)
|
|
.map_err(|trap_info| {
|
|
self.trap(trap_info.code, trap_info.opcode, trap_info.message, trap_info.pc)
|
|
})?;
|
|
|
|
let stack_idx = crate::local_addressing::local_index(frame, slot);
|
|
let val = self.operand_stack.get(stack_idx).cloned().ok_or_else(|| {
|
|
LogicalFrameEndingReason::Panic(
|
|
"Internal error: validated local slot not found in stack".into(),
|
|
)
|
|
})?;
|
|
self.push(val);
|
|
}
|
|
OpCode::SetLocal => {
|
|
let slot = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
|
|
let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
let frame = self.call_stack.last().ok_or_else(|| {
|
|
LogicalFrameEndingReason::Panic("No active call frame".into())
|
|
})?;
|
|
let func = &self.program.functions[frame.func_idx];
|
|
|
|
crate::local_addressing::check_local_slot(
|
|
func,
|
|
slot,
|
|
opcode as u16,
|
|
start_pc as u32,
|
|
)
|
|
.map_err(|trap_info| {
|
|
self.trap(trap_info.code, trap_info.opcode, trap_info.message, trap_info.pc)
|
|
})?;
|
|
|
|
let stack_idx = crate::local_addressing::local_index(frame, slot);
|
|
self.operand_stack[stack_idx] = val;
|
|
}
|
|
OpCode::Call => {
|
|
let func_id = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
|
|
as usize;
|
|
let callee = self.program.functions.get(func_id).ok_or_else(|| {
|
|
self.trap(
|
|
TRAP_INVALID_FUNC,
|
|
opcode as u16,
|
|
format!("Invalid func_id {}", func_id),
|
|
start_pc as u32,
|
|
)
|
|
})?;
|
|
|
|
if self.operand_stack.len() < callee.param_slots as usize {
|
|
return Err(LogicalFrameEndingReason::Panic(format!(
|
|
"Stack underflow during CALL to func {}: expected at least {} arguments, got {}",
|
|
func_id,
|
|
callee.param_slots,
|
|
self.operand_stack.len()
|
|
)));
|
|
}
|
|
|
|
let stack_base = self.operand_stack.len() - callee.param_slots as usize;
|
|
|
|
// Allocate and zero-init local_slots
|
|
for _ in 0..callee.local_slots {
|
|
self.operand_stack.push(Value::Null);
|
|
}
|
|
|
|
self.call_stack.push(CallFrame {
|
|
return_pc: self.pc as u32,
|
|
stack_base,
|
|
func_idx: func_id,
|
|
});
|
|
self.pc = callee.code_offset as usize;
|
|
}
|
|
OpCode::Ret => {
|
|
let frame = self.call_stack.pop().ok_or_else(|| {
|
|
LogicalFrameEndingReason::Panic("Call stack underflow".into())
|
|
})?;
|
|
let func = &self.program.functions[frame.func_idx];
|
|
let return_slots = func.return_slots as usize;
|
|
|
|
let current_height = self.operand_stack.len();
|
|
let expected_height = frame.stack_base
|
|
+ func.param_slots as usize
|
|
+ func.local_slots as usize
|
|
+ return_slots;
|
|
|
|
if current_height != expected_height {
|
|
return Err(self.trap(TRAP_BAD_RET_SLOTS, opcode as u16, format!(
|
|
"Incorrect stack height at RET in func {}: expected {} slots (stack_base={} + params={} + locals={} + returns={}), got {}",
|
|
frame.func_idx, expected_height, frame.stack_base, func.param_slots, func.local_slots, return_slots, current_height
|
|
), start_pc as u32));
|
|
}
|
|
|
|
// Copy return values (preserving order: pop return_slots values, then reverse to push back)
|
|
let mut return_vals = Vec::with_capacity(return_slots);
|
|
for _ in 0..return_slots {
|
|
return_vals.push(self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?);
|
|
}
|
|
return_vals.reverse();
|
|
|
|
self.operand_stack.truncate(frame.stack_base);
|
|
for val in return_vals {
|
|
self.push(val);
|
|
}
|
|
self.pc = frame.return_pc as usize;
|
|
}
|
|
OpCode::Syscall => {
|
|
let pc_at_syscall = start_pc as u32;
|
|
let id = instr
|
|
.imm_u32()
|
|
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
|
|
let syscall = prometeu_hal::syscalls::Syscall::from_u32(id).ok_or_else(|| {
|
|
self.trap(
|
|
TRAP_INVALID_SYSCALL,
|
|
OpCode::Syscall as u16,
|
|
format!("Unknown syscall: 0x{:08X}", id),
|
|
pc_at_syscall,
|
|
)
|
|
})?;
|
|
|
|
// Capability check before any side effects or argument consumption.
|
|
let meta = prometeu_hal::syscalls::meta_for(syscall);
|
|
if (self.capabilities & meta.caps) != meta.caps {
|
|
return Err(self.trap(
|
|
TRAP_INVALID_SYSCALL,
|
|
OpCode::Syscall as u16,
|
|
format!(
|
|
"Missing capability for syscall {} (required=0x{:X})",
|
|
syscall.name(), meta.caps
|
|
),
|
|
pc_at_syscall,
|
|
));
|
|
}
|
|
|
|
let args_count = syscall.args_count();
|
|
|
|
let mut args = Vec::with_capacity(args_count);
|
|
for _ in 0..args_count {
|
|
let v = self.pop().map_err(|_e| {
|
|
self.trap(
|
|
TRAP_STACK_UNDERFLOW,
|
|
OpCode::Syscall as u16,
|
|
"Syscall argument stack underflow".to_string(),
|
|
pc_at_syscall,
|
|
)
|
|
})?;
|
|
args.push(v);
|
|
}
|
|
args.reverse();
|
|
|
|
let stack_height_before = self.operand_stack.len();
|
|
let mut ret = crate::HostReturn::new(&mut self.operand_stack);
|
|
native.syscall(id, &args, &mut ret, ctx).map_err(|fault| match fault {
|
|
VmFault::Trap(code, msg) => {
|
|
self.trap(code, OpCode::Syscall as u16, msg, pc_at_syscall)
|
|
}
|
|
VmFault::Panic(msg) => LogicalFrameEndingReason::Panic(msg),
|
|
VmFault::Unavailable => {
|
|
LogicalFrameEndingReason::Panic("Host feature unavailable".into())
|
|
}
|
|
})?;
|
|
|
|
let stack_height_after = self.operand_stack.len();
|
|
let results_pushed = stack_height_after - stack_height_before;
|
|
if results_pushed != syscall.results_count() {
|
|
return Err(LogicalFrameEndingReason::Panic(format!(
|
|
"Syscall {} (0x{:08X}) results mismatch: expected {}, got {}",
|
|
syscall.name(),
|
|
id,
|
|
syscall.results_count(),
|
|
results_pushed
|
|
)));
|
|
}
|
|
}
|
|
OpCode::FrameSync => {
|
|
// Marks the logical end of a frame: consume cycles and signal to the driver
|
|
self.cycles += OpCode::FrameSync.cycles();
|
|
|
|
// GC Safepoint: only at FRAME_SYNC
|
|
if self.gc_alloc_threshold > 0 {
|
|
let live_now = self.heap.len();
|
|
let since_last = live_now.saturating_sub(self.last_gc_live_count);
|
|
if since_last >= self.gc_alloc_threshold {
|
|
// Collect GC roots from VM state
|
|
struct CollectRoots(Vec<prometeu_bytecode::HeapRef>);
|
|
impl crate::roots::RootVisitor for CollectRoots {
|
|
fn visit_heap_ref(&mut self, r: prometeu_bytecode::HeapRef) {
|
|
self.0.push(r);
|
|
}
|
|
}
|
|
let mut collector = CollectRoots(Vec::new());
|
|
self.visit_roots(&mut collector);
|
|
|
|
// Run mark-sweep
|
|
self.heap.mark_from_roots(collector.0);
|
|
self.heap.sweep();
|
|
// Update baseline for next cycles
|
|
self.last_gc_live_count = self.heap.len();
|
|
}
|
|
}
|
|
|
|
return Err(LogicalFrameEndingReason::FrameSync);
|
|
}
|
|
}
|
|
|
|
// Apply the instruction cost to the cycle counter
|
|
self.cycles += opcode.cycles();
|
|
Ok(())
|
|
}
|
|
|
|
pub fn trap(
|
|
&self,
|
|
code: u32,
|
|
opcode: u16,
|
|
message: String,
|
|
pc: u32,
|
|
) -> LogicalFrameEndingReason {
|
|
LogicalFrameEndingReason::Trap(self.program.create_trap(code, opcode, message, pc))
|
|
}
|
|
|
|
pub fn push(&mut self, val: Value) {
|
|
self.operand_stack.push(val);
|
|
}
|
|
|
|
pub fn pop(&mut self) -> Result<Value, String> {
|
|
self.operand_stack.pop().ok_or("Stack underflow".into())
|
|
}
|
|
|
|
pub fn pop_number(&mut self) -> Result<f64, String> {
|
|
let val = self.pop()?;
|
|
val.as_float().ok_or_else(|| "Expected number".into())
|
|
}
|
|
|
|
pub fn pop_integer(&mut self) -> Result<i64, String> {
|
|
let val = self.pop()?;
|
|
if let Value::Boolean(b) = val {
|
|
return Ok(if b { 1 } else { 0 });
|
|
}
|
|
val.as_integer().ok_or_else(|| "Expected integer".into())
|
|
}
|
|
|
|
pub fn peek(&self) -> Result<&Value, String> {
|
|
self.operand_stack.last().ok_or("Stack underflow".into())
|
|
}
|
|
|
|
fn binary_op<F>(
|
|
&mut self,
|
|
opcode: OpCode,
|
|
start_pc: u32,
|
|
f: F,
|
|
) -> Result<(), LogicalFrameEndingReason>
|
|
where
|
|
F: FnOnce(Value, Value) -> Result<Value, OpError>,
|
|
{
|
|
let b = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
let a = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?;
|
|
match f(a, b) {
|
|
Ok(res) => {
|
|
self.push(res);
|
|
Ok(())
|
|
}
|
|
Err(OpError::Trap(code, msg)) => Err(self.trap(code, opcode as u16, msg, start_pc)),
|
|
Err(OpError::Panic(msg)) => Err(LogicalFrameEndingReason::Panic(msg)),
|
|
}
|
|
}
|
|
|
|
/// Visit all GC roots reachable from the VM state.
|
|
/// This includes:
|
|
/// - Entire operand stack values
|
|
/// - Locals/args in each call frame (derived from `stack_base` and function layout)
|
|
/// - Global variables
|
|
pub fn visit_roots<V: RootVisitor + ?Sized>(&self, visitor: &mut V) {
|
|
// 1) Operand stack (all values are roots)
|
|
for v in &self.operand_stack {
|
|
visit_value_for_roots(v, visitor);
|
|
}
|
|
|
|
// 2) Call frames: iterate locals/args range for each frame
|
|
for frame in &self.call_stack {
|
|
if let Some(func_meta) = self.program.functions.get(frame.func_idx) {
|
|
let start = frame.stack_base;
|
|
let frame_slots = (func_meta.param_slots as usize) + (func_meta.local_slots as usize);
|
|
let mut end = start.saturating_add(frame_slots);
|
|
// Clamp to current stack height just in case
|
|
if end > self.operand_stack.len() { end = self.operand_stack.len(); }
|
|
for i in start..end {
|
|
if let Some(v) = self.operand_stack.get(i) {
|
|
visit_value_for_roots(v, visitor);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// 3) Globals
|
|
for g in &self.globals {
|
|
visit_value_for_roots(g, visitor);
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
|
|
fn new_test_vm(rom: Vec<u8>, constant_pool: Vec<Value>) -> VirtualMachine {
|
|
let rom_len = rom.len() as u32;
|
|
let mut vm = VirtualMachine::new(rom, constant_pool);
|
|
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: rom_len,
|
|
..Default::default()
|
|
}]);
|
|
vm
|
|
}
|
|
use crate::HostReturn;
|
|
use prometeu_bytecode::{FunctionMeta, TRAP_INVALID_LOCAL, TRAP_STACK_UNDERFLOW};
|
|
use prometeu_hal::expect_int;
|
|
|
|
struct MockNative;
|
|
impl NativeInterface for MockNative {
|
|
fn syscall(
|
|
&mut self,
|
|
_id: u32,
|
|
_args: &[Value],
|
|
_ret: &mut HostReturn,
|
|
_ctx: &mut HostContext,
|
|
) -> Result<(), VmFault> {
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_arithmetic_chain() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// (10 + 20) * 2 / 5 % 4 = 12 * 2 / 5 % 4 = 60 / 5 % 4 = 12 % 4 = 0
|
|
// wait: (10 + 20) = 30. 30 * 2 = 60. 60 / 5 = 12. 12 % 4 = 0.
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&10i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&20i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Add as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&2i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Mul as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&5i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Div as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&4i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Mod as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
assert_eq!(vm.pop().unwrap(), Value::Int32(0));
|
|
}
|
|
|
|
#[test]
|
|
fn test_div_by_zero_trap() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&10i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Div as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(trap) => {
|
|
assert_eq!(trap.code, TRAP_DIV_ZERO);
|
|
assert_eq!(trap.opcode, OpCode::Div as u16);
|
|
}
|
|
_ => panic!("Expected Trap, got {:?}", report.reason),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_int_to_bound_checked_trap() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&70000i32.to_le_bytes()); // > 65535
|
|
rom.extend_from_slice(&(OpCode::IntToBoundChecked as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(trap) => {
|
|
assert_eq!(trap.code, TRAP_OOB);
|
|
assert_eq!(trap.opcode, OpCode::IntToBoundChecked as u16);
|
|
}
|
|
_ => panic!("Expected Trap, got {:?}", report.reason),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_bounded_add_overflow_trap() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushBounded as u16).to_le_bytes());
|
|
rom.extend_from_slice(&60000u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushBounded as u16).to_le_bytes());
|
|
rom.extend_from_slice(&10000u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Add as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(trap) => {
|
|
assert_eq!(trap.code, TRAP_OOB);
|
|
assert_eq!(trap.opcode, OpCode::Add as u16);
|
|
}
|
|
_ => panic!("Expected Trap, got {:?}", report.reason),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_comparisons_polymorphic() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// 10 < 20.5 (true)
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&10i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushF64 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&20.5f64.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Lt as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
assert_eq!(vm.pop().unwrap(), Value::Boolean(true));
|
|
}
|
|
|
|
#[test]
|
|
fn test_push_i64_immediate() {
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&42i64.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
assert_eq!(vm.peek().unwrap(), &Value::Int64(42));
|
|
}
|
|
|
|
#[test]
|
|
fn test_push_f64_immediate() {
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushF64 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&3.14f64.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
assert_eq!(vm.peek().unwrap(), &Value::Float(3.14));
|
|
}
|
|
|
|
#[test]
|
|
fn test_push_bool_immediate() {
|
|
let mut rom = Vec::new();
|
|
// True
|
|
rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
|
|
rom.push(1);
|
|
// False
|
|
rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
|
|
rom.push(0);
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.step(&mut native, &mut ctx).unwrap(); // Push true
|
|
assert_eq!(vm.peek().unwrap(), &Value::Boolean(true));
|
|
vm.step(&mut native, &mut ctx).unwrap(); // Push false
|
|
assert_eq!(vm.peek().unwrap(), &Value::Boolean(false));
|
|
}
|
|
|
|
#[test]
|
|
fn test_push_const_string() {
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushConst as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let cp = vec![Value::String("hello".into())];
|
|
let mut vm = VirtualMachine::new(rom, cp);
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
assert_eq!(vm.peek().unwrap(), &Value::String("hello".into()));
|
|
}
|
|
|
|
#[test]
|
|
fn test_call_ret_scope_separation() {
|
|
let mut rom = Vec::new();
|
|
|
|
// entrypoint:
|
|
// PUSH_I64 10
|
|
// CALL func_id 1
|
|
// HALT
|
|
rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&10i64.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes()); // func_id 1
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let func_addr = rom.len();
|
|
|
|
// func:
|
|
// PUSH_I64 20
|
|
// GET_LOCAL 0 -- should be 10 (arg)
|
|
// ADD -- 10 + 20 = 30
|
|
// SET_LOCAL 0 -- store result in local 0 (the arg slot)
|
|
// GET_LOCAL 0 -- read 30 back
|
|
// RET
|
|
rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&20i64.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Add as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::SetLocal as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
|
|
let functions = vec![
|
|
FunctionMeta { code_offset: 0, code_len: func_addr as u32, ..Default::default() },
|
|
FunctionMeta {
|
|
code_offset: func_addr as u32,
|
|
code_len: (rom.len() - func_addr) as u32,
|
|
param_slots: 1,
|
|
return_slots: 1,
|
|
..Default::default()
|
|
},
|
|
];
|
|
|
|
let mut vm = VirtualMachine {
|
|
program: ProgramImage::new(
|
|
rom,
|
|
vec![],
|
|
functions,
|
|
None,
|
|
std::collections::HashMap::new(),
|
|
),
|
|
..Default::default()
|
|
};
|
|
vm.prepare_call("0");
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// Run until Halt
|
|
let mut steps = 0;
|
|
while !vm.halted && steps < 100 {
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
steps += 1;
|
|
}
|
|
|
|
assert!(vm.halted);
|
|
assert_eq!(vm.pop_integer().unwrap(), 30);
|
|
assert_eq!(vm.operand_stack.len(), 0);
|
|
assert_eq!(vm.call_stack.len(), 1);
|
|
// Scope frames removed: no scope stack to assert on
|
|
}
|
|
|
|
#[test]
|
|
fn test_ret_mandatory_value() {
|
|
let mut rom = Vec::new();
|
|
// entrypoint: CALL func_id 1; HALT
|
|
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes()); // func_id 1
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let func_addr = rom.len();
|
|
// func: RET (SEM VALOR ANTES)
|
|
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
|
|
let functions = vec![
|
|
FunctionMeta { code_offset: 0, code_len: func_addr as u32, ..Default::default() },
|
|
FunctionMeta {
|
|
code_offset: func_addr as u32,
|
|
code_len: (rom.len() - func_addr) as u32,
|
|
param_slots: 0,
|
|
return_slots: 1,
|
|
..Default::default()
|
|
},
|
|
];
|
|
|
|
let mut vm = VirtualMachine {
|
|
program: ProgramImage::new(
|
|
rom,
|
|
vec![],
|
|
functions,
|
|
None,
|
|
std::collections::HashMap::new(),
|
|
),
|
|
..Default::default()
|
|
};
|
|
vm.prepare_call("0");
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.step(&mut native, &mut ctx).unwrap(); // CALL
|
|
let res = vm.step(&mut native, &mut ctx); // RET -> should fail
|
|
assert!(res.is_err());
|
|
match res.unwrap_err() {
|
|
LogicalFrameEndingReason::Trap(trap) => {
|
|
assert_eq!(trap.code, TRAP_BAD_RET_SLOTS);
|
|
}
|
|
_ => panic!("Expected Trap(TRAP_BAD_RET_SLOTS)"),
|
|
}
|
|
|
|
// Agora com valor de retorno
|
|
let mut rom2 = Vec::new();
|
|
rom2.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
|
|
rom2.extend_from_slice(&1u32.to_le_bytes());
|
|
rom2.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
let func_addr2 = rom2.len();
|
|
rom2.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes());
|
|
rom2.extend_from_slice(&123i64.to_le_bytes());
|
|
rom2.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
|
|
let functions2 = vec![
|
|
FunctionMeta { code_offset: 0, code_len: func_addr2 as u32, ..Default::default() },
|
|
FunctionMeta {
|
|
code_offset: func_addr2 as u32,
|
|
code_len: (rom2.len() - func_addr2) as u32,
|
|
param_slots: 0,
|
|
return_slots: 1,
|
|
..Default::default()
|
|
},
|
|
];
|
|
|
|
let mut vm2 = VirtualMachine {
|
|
program: ProgramImage::new(
|
|
rom2,
|
|
vec![],
|
|
functions2,
|
|
None,
|
|
std::collections::HashMap::new(),
|
|
),
|
|
..Default::default()
|
|
};
|
|
vm2.prepare_call("0");
|
|
vm2.step(&mut native, &mut ctx).unwrap(); // CALL
|
|
vm2.step(&mut native, &mut ctx).unwrap(); // PUSH_I64
|
|
vm2.step(&mut native, &mut ctx).unwrap(); // RET
|
|
|
|
assert_eq!(vm2.operand_stack.len(), 1);
|
|
assert_eq!(vm2.pop().unwrap(), Value::Int64(123));
|
|
}
|
|
|
|
// Scope tests removed under PR-2.1 (scope frames eliminated)
|
|
|
|
#[test]
|
|
fn test_push_i32() {
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&42i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
assert_eq!(vm.peek().unwrap(), &Value::Int32(42));
|
|
}
|
|
|
|
#[test]
|
|
fn test_bitwise_promotion() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// i32 & i32 -> i32
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0xF0i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0x0Fi32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::BitAnd as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
assert_eq!(vm.pop().unwrap(), Value::Int32(0));
|
|
|
|
// i32 | i64 -> i64
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0xF0i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0x0Fi64.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::BitOr as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
assert_eq!(vm.pop().unwrap(), Value::Int64(0xFF));
|
|
}
|
|
|
|
#[test]
|
|
fn test_comparisons_lte_gte() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// 10 <= 20 (true)
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&10i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&20i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Lte as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
assert_eq!(vm.pop().unwrap(), Value::Boolean(true));
|
|
|
|
// 20 >= 20 (true)
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&20i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&20i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Gte as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
assert_eq!(vm.pop().unwrap(), Value::Boolean(true));
|
|
}
|
|
|
|
#[test]
|
|
fn test_negation() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&42i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Neg as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
assert_eq!(vm.pop().unwrap(), Value::Int32(-42));
|
|
}
|
|
|
|
#[test]
|
|
fn test_jmp_if_true() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// Corrected Calculations:
|
|
// 0-1: PushBool
|
|
// 2: 1 (u8)
|
|
// 3-4: JmpIfTrue
|
|
// 5-8: addr (u32)
|
|
// 9-10: Halt (Offset 9)
|
|
// 11-12: PushI32 (Offset 11)
|
|
// 13-16: 100 (i32)
|
|
// 17-18: Halt
|
|
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
|
|
rom.push(1);
|
|
rom.extend_from_slice(&(OpCode::JmpIfTrue as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(11u32).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); // Offset 9
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); // Offset 11
|
|
rom.extend_from_slice(&100i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.step(&mut native, &mut ctx).unwrap(); // PushBool
|
|
vm.step(&mut native, &mut ctx).unwrap(); // JmpIfTrue
|
|
assert_eq!(vm.pc, 11);
|
|
vm.step(&mut native, &mut ctx).unwrap(); // PushI32
|
|
assert_eq!(vm.pop().unwrap(), Value::Int32(100));
|
|
}
|
|
|
|
#[test]
|
|
fn test_trap_opcode() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&42i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Trap as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
assert_eq!(report.reason, LogicalFrameEndingReason::Breakpoint);
|
|
assert_eq!(vm.pc, 8); // PushI32 (6 bytes) + Trap (2 bytes)
|
|
assert_eq!(vm.peek().unwrap(), &Value::Int32(42));
|
|
}
|
|
|
|
#[test]
|
|
fn test_pop_n_opcode() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&2i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&3i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PopN as u16).to_le_bytes());
|
|
rom.extend_from_slice(&2u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
assert_eq!(vm.pop().unwrap(), Value::Int32(1));
|
|
assert!(vm.pop().is_err()); // Stack should be empty
|
|
}
|
|
|
|
|
|
#[test]
|
|
fn test_entry_point_ret_with_prepare_call() {
|
|
// PushI32 0 (0x17), then Ret (0x51)
|
|
let rom = vec![
|
|
0x17, 0x00, // PushI32
|
|
0x00, 0x00, 0x00, 0x00, // value 0
|
|
0x11, 0x00, // Pop
|
|
0x51, 0x00, // Ret
|
|
];
|
|
let mut vm = VirtualMachine::new(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: rom.len() as u32,
|
|
..Default::default()
|
|
}]);
|
|
let mut ctx = HostContext::new(None);
|
|
struct TestNative;
|
|
impl NativeInterface for TestNative {
|
|
fn syscall(
|
|
&mut self,
|
|
_id: u32,
|
|
_args: &[Value],
|
|
_ret: &mut HostReturn,
|
|
_ctx: &mut HostContext,
|
|
) -> Result<(), VmFault> {
|
|
Ok(())
|
|
}
|
|
}
|
|
let mut native = TestNative;
|
|
|
|
vm.prepare_call("0");
|
|
let result = vm.run_budget(100, &mut native, &mut ctx).expect("VM run failed");
|
|
assert_eq!(result.reason, LogicalFrameEndingReason::EndOfRom);
|
|
}
|
|
|
|
#[test]
|
|
fn test_syscall_abi_multi_slot_return() {
|
|
let rom = vec![
|
|
0x70, 0x00, // Syscall + Reserved
|
|
0x01, 0x00, 0x00, 0x00, // Syscall ID 1
|
|
];
|
|
|
|
struct MultiReturnNative;
|
|
impl NativeInterface for MultiReturnNative {
|
|
fn syscall(
|
|
&mut self,
|
|
_id: u32,
|
|
_args: &[Value],
|
|
ret: &mut HostReturn,
|
|
_ctx: &mut HostContext,
|
|
) -> Result<(), VmFault> {
|
|
ret.push_bool(true);
|
|
ret.push_int(42);
|
|
ret.push_bounded(255)?;
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
let mut vm = VirtualMachine::new(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: rom.len() as u32,
|
|
..Default::default()
|
|
}]);
|
|
let mut native = MultiReturnNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.prepare_call("0");
|
|
// Ensure we have SYSTEM capability so we pass capability gate
|
|
vm.set_capabilities(prometeu_hal::syscalls::caps::SYSTEM);
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
// Under PR5, VM enforces return-slot count based on SyscallMeta during syscall
|
|
// execution. A mismatch yields a Panic with a descriptive message.
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Panic(msg) => {
|
|
assert!(msg.contains("results mismatch"));
|
|
}
|
|
_ => panic!("Expected Panic with results mismatch, got {:?}", report.reason),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_syscall_abi_void_return() {
|
|
let rom = vec![
|
|
0x70, 0x00, // Syscall + Reserved
|
|
0x01, 0x00, 0x00, 0x00, // Syscall ID 1
|
|
];
|
|
|
|
struct VoidReturnNative;
|
|
impl NativeInterface for VoidReturnNative {
|
|
fn syscall(
|
|
&mut self,
|
|
_id: u32,
|
|
_args: &[Value],
|
|
_ret: &mut HostReturn,
|
|
_ctx: &mut HostContext,
|
|
) -> Result<(), VmFault> {
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
let mut vm = VirtualMachine::new(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: rom.len() as u32,
|
|
..Default::default()
|
|
}]);
|
|
let mut native = VoidReturnNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.prepare_call("0");
|
|
vm.operand_stack.push(Value::Int32(100));
|
|
vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
assert_eq!(vm.pop().unwrap(), Value::Int32(100));
|
|
assert!(vm.operand_stack.is_empty());
|
|
}
|
|
|
|
#[test]
|
|
fn test_syscall_arg_type_mismatch_trap() {
|
|
// GfxClear (0x1001) takes 1 argument
|
|
let rom = vec![
|
|
0x16, 0x00, // PushBool + Reserved
|
|
0x01, // value 1 (true)
|
|
0x70, 0x00, // Syscall + Reserved
|
|
0x01, 0x10, 0x00, 0x00, // Syscall ID 0x1001
|
|
];
|
|
|
|
struct ArgCheckNative;
|
|
impl NativeInterface for ArgCheckNative {
|
|
fn syscall(
|
|
&mut self,
|
|
_id: u32,
|
|
args: &[Value],
|
|
_ret: &mut HostReturn,
|
|
_ctx: &mut HostContext,
|
|
) -> Result<(), VmFault> {
|
|
expect_int(args, 0)?;
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
let mut vm = VirtualMachine::new(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: rom.len() as u32,
|
|
..Default::default()
|
|
}]);
|
|
let mut native = ArgCheckNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.prepare_call("0");
|
|
// Ensure we have GFX capability so we reach type checking inside native handler
|
|
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(trap) => {
|
|
assert_eq!(trap.code, TRAP_TYPE);
|
|
assert_eq!(trap.opcode, OpCode::Syscall as u16);
|
|
}
|
|
_ => panic!("Expected Trap, got {:?}", report.reason),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_invalid_syscall_trap() {
|
|
let rom = vec![
|
|
0x70, 0x00, // Syscall + Reserved
|
|
0xEF, 0xBE, 0xAD, 0xDE, // 0xDEADBEEF
|
|
];
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.prepare_call("0");
|
|
// Grant GFX capability so arg underflow is checked (capability gate is first)
|
|
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(trap) => {
|
|
assert_eq!(trap.code, TRAP_INVALID_SYSCALL);
|
|
assert_eq!(trap.opcode, OpCode::Syscall as u16);
|
|
assert!(trap.message.contains("Unknown syscall"));
|
|
assert_eq!(trap.pc, 0);
|
|
}
|
|
_ => panic!("Expected Trap, got {:?}", report.reason),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_syscall_arg_underflow_trap() {
|
|
// GfxClear (0x1001) expects 1 arg
|
|
let rom = vec![
|
|
0x70, 0x00, // Syscall + Reserved
|
|
0x01, 0x10, 0x00, 0x00, // Syscall ID 0x1001
|
|
];
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.prepare_call("0");
|
|
// Grant GFX capability so arg underflow is checked (capability gate is first)
|
|
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(trap) => {
|
|
assert_eq!(trap.code, TRAP_STACK_UNDERFLOW);
|
|
assert_eq!(trap.opcode, OpCode::Syscall as u16);
|
|
assert!(trap.message.contains("underflow"));
|
|
assert_eq!(trap.pc, 0);
|
|
}
|
|
_ => panic!("Expected Trap, got {:?}", report.reason),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_syscall_missing_capability_trap() {
|
|
// Program: directly call GfxClear (0x1001). We check caps before args, so no underflow.
|
|
let rom = vec![
|
|
0x70, 0x00, // Syscall + Reserved
|
|
0x01, 0x10, 0x00, 0x00, // Syscall ID 0x1001 (LE)
|
|
];
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
// Remove all capabilities
|
|
vm.set_capabilities(0);
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(trap) => {
|
|
assert_eq!(trap.code, TRAP_INVALID_SYSCALL);
|
|
assert_eq!(trap.opcode, OpCode::Syscall as u16);
|
|
assert!(trap.message.contains("Missing capability"));
|
|
assert_eq!(trap.pc, 0);
|
|
}
|
|
other => panic!("Expected Trap, got {:?}", other),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_syscall_with_capability_success() {
|
|
// Program: push arg 0; call GfxClear (0x1001)
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Syscall as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0x1001u32.to_le_bytes()); // GfxClear
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
// Grant only GFX capability
|
|
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
// Any non-trap outcome is considered success here
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(trap) => panic!("Unexpected trap: {:?}", trap),
|
|
_ => {}
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_syscall_results_count_mismatch_panic() {
|
|
// GfxClear565 (0x1010) expects 0 results
|
|
let rom = vec![
|
|
0x17, 0x00, // PushI32
|
|
0x00, 0x00, 0x00, 0x00, // value 0
|
|
0x70, 0x00, // Syscall + Reserved
|
|
0x10, 0x10, 0x00, 0x00, // Syscall ID 0x1010
|
|
];
|
|
|
|
struct BadNative;
|
|
impl NativeInterface for BadNative {
|
|
fn syscall(
|
|
&mut self,
|
|
_id: u32,
|
|
_args: &[Value],
|
|
ret: &mut HostReturn,
|
|
_ctx: &mut HostContext,
|
|
) -> Result<(), VmFault> {
|
|
// Wrong: GfxClear565 is void but we push something
|
|
ret.push_int(42);
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
// Grant GFX capability so results mismatch path is exercised
|
|
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
|
|
let mut native = BadNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Panic(msg) => assert!(msg.contains("results mismatch")),
|
|
_ => panic!("Expected Panic, got {:?}", report.reason),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_host_return_bounded_overflow_trap() {
|
|
let mut stack = Vec::new();
|
|
let mut ret = HostReturn::new(&mut stack);
|
|
let res = ret.push_bounded(65536);
|
|
assert!(res.is_err());
|
|
match res.err().unwrap() {
|
|
VmFault::Trap(code, _) => {
|
|
assert_eq!(code, TRAP_OOB);
|
|
}
|
|
_ => panic!("Expected Trap"),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_loader_hardening_invalid_magic() {
|
|
let mut vm = VirtualMachine::default();
|
|
let res = vm.initialize(vec![0, 0, 0, 0], "");
|
|
assert_eq!(res, Err(VmInitError::InvalidFormat));
|
|
// VM should remain empty
|
|
assert_eq!(vm.program.rom.len(), 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_loader_hardening_unsupported_version() {
|
|
let mut vm = VirtualMachine::default();
|
|
let mut header = vec![0u8; 32];
|
|
header[0..4].copy_from_slice(b"PBS\0");
|
|
header[4..6].copy_from_slice(&1u16.to_le_bytes()); // version 1 (unsupported)
|
|
|
|
let res = vm.initialize(header, "");
|
|
assert_eq!(res, Err(VmInitError::UnsupportedFormat));
|
|
}
|
|
|
|
#[test]
|
|
fn test_loader_hardening_malformed_pbs_v0() {
|
|
let mut vm = VirtualMachine::default();
|
|
let mut header = vec![0u8; 32];
|
|
header[0..4].copy_from_slice(b"PBS\0");
|
|
header[8..12].copy_from_slice(&1u32.to_le_bytes()); // 1 section claimed but none provided
|
|
|
|
let res = vm.initialize(header, "");
|
|
match res {
|
|
Err(VmInitError::ImageLoadFailed(prometeu_bytecode::LoadError::UnexpectedEof)) => {}
|
|
_ => panic!("Expected PbsV0LoadFailed(UnexpectedEof), got {:?}", res),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_loader_hardening_entrypoint_not_found() {
|
|
let mut vm = VirtualMachine::default();
|
|
// Valid empty PBS v0 module
|
|
let mut header = vec![0u8; 32];
|
|
header[0..4].copy_from_slice(b"PBS\0");
|
|
|
|
// Try to initialize with numeric entrypoint 10 (out of bounds for empty ROM)
|
|
let res = vm.initialize(header, "10");
|
|
assert_eq!(res, Err(VmInitError::EntrypointNotFound));
|
|
|
|
// VM state should not be updated
|
|
assert_eq!(vm.pc, 0);
|
|
assert_eq!(vm.program.rom.len(), 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_loader_hardening_successful_init() {
|
|
let mut vm = VirtualMachine::default();
|
|
vm.pc = 123; // Pollution
|
|
|
|
let mut header = vec![0u8; 32];
|
|
header[0..4].copy_from_slice(b"PBS\0");
|
|
|
|
let res = vm.initialize(header, "");
|
|
assert!(res.is_ok());
|
|
assert_eq!(vm.pc, 0);
|
|
assert_eq!(vm.program.rom.len(), 0);
|
|
assert_eq!(vm.cycles, 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_calling_convention_add() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// F0 (entry):
|
|
// PUSH_I32 10
|
|
// PUSH_I32 20
|
|
// CALL 1 (add)
|
|
// HALT
|
|
// F1 (add):
|
|
// GET_LOCAL 0 (a)
|
|
// GET_LOCAL 1 (b)
|
|
// ADD
|
|
// RET (1 slot)
|
|
|
|
let mut rom = Vec::new();
|
|
// F0
|
|
let f0_start = 0;
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&10i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&20i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
let f0_len = rom.len() - f0_start;
|
|
|
|
// F1
|
|
let f1_start = rom.len() as u32;
|
|
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Add as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
let f1_len = rom.len() as u32 - f1_start;
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![
|
|
FunctionMeta {
|
|
code_offset: f0_start as u32,
|
|
code_len: f0_len as u32,
|
|
..Default::default()
|
|
},
|
|
FunctionMeta {
|
|
code_offset: f1_start,
|
|
code_len: f1_len,
|
|
param_slots: 2,
|
|
return_slots: 1,
|
|
..Default::default()
|
|
},
|
|
]);
|
|
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
|
|
assert_eq!(vm.operand_stack.last().unwrap(), &Value::Int32(30));
|
|
}
|
|
|
|
#[test]
|
|
fn test_calling_convention_multi_slot_return() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// F0:
|
|
// CALL 1
|
|
// HALT
|
|
// F1:
|
|
// PUSH_I32 100
|
|
// PUSH_I32 200
|
|
// RET (2 slots)
|
|
|
|
let mut rom = Vec::new();
|
|
// F0
|
|
let f0_start = 0;
|
|
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
let f0_len = rom.len() - f0_start;
|
|
|
|
// F1
|
|
let f1_start = rom.len() as u32;
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&100i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&200i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
let f1_len = rom.len() as u32 - f1_start;
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![
|
|
FunctionMeta {
|
|
code_offset: f0_start as u32,
|
|
code_len: f0_len as u32,
|
|
..Default::default()
|
|
},
|
|
FunctionMeta {
|
|
code_offset: f1_start,
|
|
code_len: f1_len,
|
|
param_slots: 0,
|
|
return_slots: 2,
|
|
..Default::default()
|
|
},
|
|
]);
|
|
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
|
|
// Stack should be [100, 200]
|
|
assert_eq!(vm.operand_stack.len(), 2);
|
|
assert_eq!(vm.operand_stack[0], Value::Int32(100));
|
|
assert_eq!(vm.operand_stack[1], Value::Int32(200));
|
|
}
|
|
|
|
#[test]
|
|
fn test_calling_convention_void_call() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// F0:
|
|
// PUSH_I32 42
|
|
// CALL 1
|
|
// HALT
|
|
// F1:
|
|
// POP
|
|
// RET (0 slots)
|
|
|
|
let mut rom = Vec::new();
|
|
let f0_start = 0;
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&42i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
let f0_len = rom.len() - f0_start;
|
|
|
|
let f1_start = rom.len() as u32;
|
|
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
let f1_len = rom.len() as u32 - f1_start;
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![
|
|
FunctionMeta {
|
|
code_offset: f0_start as u32,
|
|
code_len: f0_len as u32,
|
|
..Default::default()
|
|
},
|
|
FunctionMeta {
|
|
code_offset: f1_start,
|
|
code_len: f1_len,
|
|
param_slots: 1,
|
|
return_slots: 0,
|
|
..Default::default()
|
|
},
|
|
]);
|
|
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
|
|
assert_eq!(vm.operand_stack.len(), 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_trap_invalid_func() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// CALL 99 (invalid)
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
|
|
rom.extend_from_slice(&99u32.to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(trap) => {
|
|
assert_eq!(trap.code, TRAP_INVALID_FUNC);
|
|
assert_eq!(trap.opcode, OpCode::Call as u16);
|
|
}
|
|
_ => panic!("Expected Trap(TRAP_INVALID_FUNC), got {:?}", report.reason),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_trap_bad_ret_slots() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// F0: CALL 1; HALT
|
|
// F1: PUSH_I32 42; RET (expected 0 slots)
|
|
|
|
let mut rom = Vec::new();
|
|
let f0_start = 0;
|
|
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
let f0_len = rom.len() - f0_start;
|
|
|
|
let f1_start = rom.len() as u32;
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&42i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
let f1_len = rom.len() as u32 - f1_start;
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![
|
|
FunctionMeta {
|
|
code_offset: f0_start as u32,
|
|
code_len: f0_len as u32,
|
|
..Default::default()
|
|
},
|
|
FunctionMeta {
|
|
code_offset: f1_start,
|
|
code_len: f1_len,
|
|
param_slots: 0,
|
|
return_slots: 0, // ERROR: function pushes 42 but returns 0
|
|
..Default::default()
|
|
},
|
|
]);
|
|
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(trap) => {
|
|
assert_eq!(trap.code, TRAP_BAD_RET_SLOTS);
|
|
assert_eq!(trap.opcode, OpCode::Ret as u16);
|
|
assert!(trap.message.contains("Incorrect stack height"));
|
|
}
|
|
_ => panic!("Expected Trap(TRAP_BAD_RET_SLOTS), got {:?}", report.reason),
|
|
}
|
|
}
|
|
#[test]
|
|
fn test_locals_round_trip() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// PUSH_I32 42
|
|
// SET_LOCAL 0
|
|
// PUSH_I32 0 (garbage)
|
|
// GET_LOCAL 0
|
|
// RET (1 slot)
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&42i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::SetLocal as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Pop as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: 20,
|
|
local_slots: 1,
|
|
return_slots: 1,
|
|
..Default::default()
|
|
}]);
|
|
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
assert_eq!(report.reason, LogicalFrameEndingReason::EndOfRom);
|
|
// RET pops return values and pushes them back on the caller stack (which is the sentinel frame's stack here).
|
|
assert_eq!(vm.operand_stack, vec![Value::Int32(42)]);
|
|
}
|
|
|
|
#[test]
|
|
fn test_locals_per_call_isolation() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// Function 0 (entry):
|
|
// CALL 1
|
|
// POP
|
|
// CALL 1
|
|
// HALT
|
|
// Function 1:
|
|
// GET_LOCAL 0 (should be Null initially)
|
|
// PUSH_I32 42
|
|
// SET_LOCAL 0
|
|
// RET (1 slot: the initial Null)
|
|
|
|
let mut rom = Vec::new();
|
|
// F0
|
|
let f0_start = 0;
|
|
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Pop as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
let f0_len = rom.len() - f0_start;
|
|
|
|
// F1
|
|
let f1_start = rom.len() as u32;
|
|
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&42i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::SetLocal as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
let f1_len = rom.len() as u32 - f1_start;
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![
|
|
FunctionMeta {
|
|
code_offset: f0_start as u32,
|
|
code_len: f0_len as u32,
|
|
..Default::default()
|
|
},
|
|
FunctionMeta {
|
|
code_offset: f1_start,
|
|
code_len: f1_len,
|
|
local_slots: 1,
|
|
return_slots: 1,
|
|
..Default::default()
|
|
},
|
|
]);
|
|
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
|
|
|
|
// The last value on stack is the return of the second CALL 1,
|
|
// which should be Value::Null because locals are zero-initialized on each call.
|
|
assert_eq!(vm.operand_stack.last().unwrap(), &Value::Null);
|
|
}
|
|
|
|
#[test]
|
|
fn test_invalid_local_index_traps() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// Function with 0 params, 1 local.
|
|
// GET_LOCAL 1 (OOB)
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: 8,
|
|
local_slots: 1,
|
|
..Default::default()
|
|
}]);
|
|
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(trap) => {
|
|
assert_eq!(trap.code, TRAP_INVALID_LOCAL);
|
|
assert_eq!(trap.opcode, OpCode::GetLocal as u16);
|
|
assert!(trap.message.contains("out of bounds"));
|
|
}
|
|
_ => panic!("Expected Trap, got {:?}", report.reason),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_nested_if() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// if (true) {
|
|
// if (false) {
|
|
// PUSH 1
|
|
// } else {
|
|
// PUSH 2
|
|
// }
|
|
// } else {
|
|
// PUSH 3
|
|
// }
|
|
// HALT
|
|
let mut rom = Vec::new();
|
|
// 0: PUSH_BOOL true
|
|
rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
|
|
rom.push(1);
|
|
// 3: JMP_IF_FALSE -> ELSE1 (offset 42)
|
|
rom.extend_from_slice(&(OpCode::JmpIfFalse as u16).to_le_bytes());
|
|
rom.extend_from_slice(&42u32.to_le_bytes());
|
|
|
|
// INNER IF:
|
|
// 9: PUSH_BOOL false
|
|
rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
|
|
rom.push(0);
|
|
// 12: JMP_IF_FALSE -> ELSE2 (offset 30)
|
|
rom.extend_from_slice(&(OpCode::JmpIfFalse as u16).to_le_bytes());
|
|
rom.extend_from_slice(&30u32.to_le_bytes());
|
|
// 18: PUSH_I32 1
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1i32.to_le_bytes());
|
|
// 24: JMP -> END (offset 48)
|
|
rom.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
|
|
rom.extend_from_slice(&48u32.to_le_bytes());
|
|
|
|
// ELSE2:
|
|
// 30: PUSH_I32 2
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&2i32.to_le_bytes());
|
|
// 36: JMP -> END (offset 48)
|
|
rom.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
|
|
rom.extend_from_slice(&48u32.to_le_bytes());
|
|
|
|
// ELSE1:
|
|
// 42: PUSH_I32 3
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&3i32.to_le_bytes());
|
|
|
|
// END:
|
|
// 48: HALT
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
// We need to set up the function meta for absolute jumps to work correctly
|
|
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: 50,
|
|
..Default::default()
|
|
}]);
|
|
vm.prepare_call("0");
|
|
|
|
vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
assert_eq!(vm.pop().unwrap(), Value::Int32(2));
|
|
}
|
|
|
|
#[test]
|
|
fn test_if_with_empty_branches() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// PUSH_BOOL true
|
|
// JMP_IF_FALSE -> ELSE (offset 15)
|
|
// // Empty then
|
|
// JMP -> END (offset 15)
|
|
// ELSE:
|
|
// // Empty else
|
|
// END:
|
|
// HALT
|
|
let mut rom = Vec::new();
|
|
// 0-2: PUSH_BOOL true
|
|
rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
|
|
rom.push(1);
|
|
// 3-8: JMP_IF_FALSE -> 15
|
|
rom.extend_from_slice(&(OpCode::JmpIfFalse as u16).to_le_bytes());
|
|
rom.extend_from_slice(&15u32.to_le_bytes());
|
|
// 9-14: JMP -> 15
|
|
rom.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
|
|
rom.extend_from_slice(&15u32.to_le_bytes());
|
|
// 15-16: HALT
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: 17,
|
|
..Default::default()
|
|
}]);
|
|
vm.prepare_call("0");
|
|
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
|
|
assert_eq!(vm.operand_stack.len(), 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_jmp_if_non_boolean_trap() {
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// PUSH_I32 1
|
|
// JMP_IF_TRUE 9
|
|
// HALT
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::JmpIfTrue as u16).to_le_bytes());
|
|
rom.extend_from_slice(&9u32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: 14,
|
|
..Default::default()
|
|
}]);
|
|
vm.prepare_call("0");
|
|
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(trap) => {
|
|
assert_eq!(trap.code, TRAP_TYPE);
|
|
assert_eq!(trap.opcode, OpCode::JmpIfTrue as u16);
|
|
assert!(trap.message.contains("Expected boolean"));
|
|
}
|
|
_ => panic!("Expected Trap, got {:?}", report.reason),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_gc_triggers_only_at_frame_sync() {
|
|
use crate::object::ObjectKind;
|
|
|
|
// ROM: NOP; FRAME_SYNC; HALT
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::Nop as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: rom.len() as u32,
|
|
..Default::default()
|
|
}]);
|
|
|
|
// Set a very low threshold to trigger GC as soon as we hit FRAME_SYNC
|
|
vm.gc_alloc_threshold = 1;
|
|
|
|
// Allocate an unreachable object (no roots referencing it)
|
|
let _orphan = vm.heap.allocate_object(ObjectKind::Bytes, &[1, 2, 3]);
|
|
assert_eq!(vm.heap.len(), 1);
|
|
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// Step 1: NOP — should not run GC
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
assert_eq!(vm.heap.len(), 1, "GC must not run except at safepoints");
|
|
|
|
// Step 2: FRAME_SYNC — GC should run and reclaim the unreachable object
|
|
match vm.step(&mut native, &mut ctx) {
|
|
Err(LogicalFrameEndingReason::FrameSync) => {}
|
|
other => panic!("Expected FrameSync, got {:?}", other),
|
|
}
|
|
assert_eq!(vm.heap.len(), 0, "Unreachable object must be reclaimed at FRAME_SYNC");
|
|
}
|
|
|
|
#[test]
|
|
fn test_gc_keeps_roots_and_collects_unreachable_at_frame_sync() {
|
|
use crate::object::ObjectKind;
|
|
|
|
// ROM: FRAME_SYNC; FRAME_SYNC; HALT
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: rom.len() as u32,
|
|
..Default::default()
|
|
}]);
|
|
|
|
vm.gc_alloc_threshold = 1;
|
|
|
|
// Allocate two objects; make one a root by placing it on the operand stack
|
|
let rooted = vm.heap.allocate_object(ObjectKind::Bytes, &[9, 9]);
|
|
let unreachable = vm.heap.allocate_object(ObjectKind::Bytes, &[8, 8, 8]);
|
|
assert_eq!(vm.heap.len(), 2);
|
|
vm.operand_stack.push(Value::HeapRef(rooted));
|
|
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// Execute FRAME_SYNC: should trigger GC
|
|
match vm.step(&mut native, &mut ctx) {
|
|
Err(LogicalFrameEndingReason::FrameSync) => {}
|
|
other => panic!("Expected FrameSync, got {:?}", other),
|
|
}
|
|
|
|
// Rooted must survive; unreachable must be collected
|
|
assert_eq!(vm.heap.len(), 1);
|
|
assert!(vm.heap.is_valid(rooted));
|
|
assert!(!vm.heap.is_valid(unreachable));
|
|
}
|
|
|
|
#[test]
|
|
fn test_gc_simple_allocation_collection_cycle() {
|
|
use crate::object::ObjectKind;
|
|
|
|
// ROM: FRAME_SYNC; FRAME_SYNC; HALT
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: rom.len() as u32,
|
|
..Default::default()
|
|
}]);
|
|
|
|
// Make GC trigger on any allocation delta
|
|
vm.gc_alloc_threshold = 1;
|
|
|
|
// Cycle 1: allocate one unreachable object
|
|
let _h1 = vm.heap.allocate_object(ObjectKind::Bytes, &[1]);
|
|
assert_eq!(vm.heap.len(), 1);
|
|
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// FRAME_SYNC should collect it (first FRAME_SYNC)
|
|
match vm.step(&mut native, &mut ctx) {
|
|
Err(LogicalFrameEndingReason::FrameSync) => {}
|
|
other => panic!("Expected FrameSync, got {:?}", other),
|
|
}
|
|
assert_eq!(vm.heap.len(), 0);
|
|
|
|
// Cycle 2: allocate again and collect again deterministically
|
|
let _h2 = vm.heap.allocate_object(ObjectKind::Bytes, &[2]);
|
|
assert_eq!(vm.heap.len(), 1);
|
|
// Second FRAME_SYNC should also be reached deterministically
|
|
match vm.step(&mut native, &mut ctx) {
|
|
Err(LogicalFrameEndingReason::FrameSync) => {}
|
|
other => panic!("Expected FrameSync, got {:?}", other),
|
|
}
|
|
assert_eq!(vm.heap.len(), 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_gc_many_short_lived_objects_stress() {
|
|
use crate::object::ObjectKind;
|
|
|
|
// ROM: FRAME_SYNC; HALT
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: rom.len() as u32,
|
|
..Default::default()
|
|
}]);
|
|
|
|
// Deterministic: trigger collection when any growth since last sweep occurs
|
|
vm.gc_alloc_threshold = 1;
|
|
|
|
// Allocate many small, unreferenced objects
|
|
let count = 2048usize; // stress but still quick
|
|
for i in 0..count {
|
|
let byte = (i & 0xFF) as u8;
|
|
let _ = vm.heap.allocate_object(ObjectKind::Bytes, &[byte]);
|
|
}
|
|
assert_eq!(vm.heap.len(), count);
|
|
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
|
|
// Single FRAME_SYNC should reclaim all since there are no roots
|
|
match vm.step(&mut native, &mut ctx) {
|
|
Err(LogicalFrameEndingReason::FrameSync) => {}
|
|
other => panic!("Expected FrameSync, got {:?}", other),
|
|
}
|
|
|
|
assert_eq!(vm.heap.len(), 0, "All short-lived objects must be reclaimed deterministically");
|
|
}
|
|
|
|
#[test]
|
|
fn test_make_closure_zero_captures() {
|
|
use prometeu_bytecode::{FunctionMeta, Value};
|
|
|
|
// ROM: MAKE_CLOSURE fn_id=7, cap=0; HALT
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::MakeClosure as u16).to_le_bytes());
|
|
rom.extend_from_slice(&7u32.to_le_bytes()); // fn_id
|
|
rom.extend_from_slice(&0u32.to_le_bytes()); // capture_count
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: rom.len() as u32,
|
|
..Default::default()
|
|
}]);
|
|
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
// step MAKE_CLOSURE
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
// step HALT
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
|
|
assert!(vm.halted);
|
|
assert_eq!(vm.operand_stack.len(), 1);
|
|
let top = vm.peek().unwrap().clone();
|
|
let href = match top { Value::HeapRef(h) => h, _ => panic!("Expected HeapRef on stack") };
|
|
assert!(vm.heap.is_valid(href));
|
|
assert_eq!(vm.heap.closure_fn_id(href), Some(7));
|
|
let env = vm.heap.closure_env_slice(href).expect("env slice");
|
|
assert_eq!(env.len(), 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_make_closure_multiple_captures_and_order() {
|
|
use prometeu_bytecode::{FunctionMeta, Value};
|
|
|
|
// Build ROM:
|
|
// PUSH_I32 1; PUSH_I32 2; PUSH_I32 3; // Stack: [1,2,3]
|
|
// MAKE_CLOSURE fn_id=9, cap=3; // Pops 3 (3,2,1), env = [1,2,3]
|
|
// HALT
|
|
let mut rom = Vec::new();
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&2i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&3i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::MakeClosure as u16).to_le_bytes());
|
|
rom.extend_from_slice(&9u32.to_le_bytes()); // fn_id
|
|
rom.extend_from_slice(&3u32.to_le_bytes()); // capture_count
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
|
|
code_offset: 0,
|
|
code_len: rom.len() as u32,
|
|
..Default::default()
|
|
}]);
|
|
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
// Execute instructions until HALT
|
|
while !vm.halted {
|
|
vm.step(&mut native, &mut ctx).unwrap();
|
|
}
|
|
|
|
// After HALT, stack must contain only the closure ref
|
|
assert_eq!(vm.operand_stack.len(), 1);
|
|
let href = match vm.pop().unwrap() { Value::HeapRef(h) => h, _ => panic!("Expected HeapRef") };
|
|
assert_eq!(vm.heap.closure_fn_id(href), Some(9));
|
|
let env = vm.heap.closure_env_slice(href).expect("env slice");
|
|
assert_eq!(env.len(), 3);
|
|
assert_eq!(env[0], Value::Int32(1));
|
|
assert_eq!(env[1], Value::Int32(2));
|
|
assert_eq!(env[2], Value::Int32(3));
|
|
}
|
|
|
|
#[test]
|
|
fn test_call_closure_returns_constant() {
|
|
use prometeu_bytecode::{FunctionMeta, Value};
|
|
|
|
// F0 (entry): MAKE_CLOSURE fn=1, cap=0; CALL_CLOSURE argc=0; HALT
|
|
// F1 (callee): PUSH_I32 7; RET
|
|
let mut rom = Vec::new();
|
|
let f0_start = 0usize;
|
|
rom.extend_from_slice(&(OpCode::MakeClosure as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes()); // fn_id
|
|
rom.extend_from_slice(&0u32.to_le_bytes()); // capture_count
|
|
rom.extend_from_slice(&(OpCode::CallClosure as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes()); // argc = 0 user args
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
let f0_len = rom.len() - f0_start;
|
|
|
|
// F1 code
|
|
let f1_start = rom.len() as u32;
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&7i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
let f1_len = rom.len() as u32 - f1_start;
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![
|
|
FunctionMeta { code_offset: f0_start as u32, code_len: f0_len as u32, ..Default::default() },
|
|
FunctionMeta { code_offset: f1_start, code_len: f1_len, param_slots: 1, return_slots: 1, ..Default::default() },
|
|
]);
|
|
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
|
|
assert_eq!(vm.operand_stack.len(), 1);
|
|
assert_eq!(vm.operand_stack[0], Value::Int32(7));
|
|
}
|
|
|
|
#[test]
|
|
fn test_call_closure_with_captures_ignored() {
|
|
use prometeu_bytecode::{FunctionMeta, Value};
|
|
|
|
// F0: PUSH_I32 123; MAKE_CLOSURE fn=1 cap=1; CALL_CLOSURE 0; HALT
|
|
// F1: PUSH_I32 42; RET
|
|
let mut rom = Vec::new();
|
|
let f0_start = 0usize;
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&123i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::MakeClosure as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes()); // fn_id
|
|
rom.extend_from_slice(&1u32.to_le_bytes()); // capture_count
|
|
rom.extend_from_slice(&(OpCode::CallClosure as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes()); // argc = 0
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
let f0_len = rom.len() - f0_start;
|
|
|
|
let f1_start = rom.len() as u32;
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&42i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
let f1_len = rom.len() as u32 - f1_start;
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![
|
|
FunctionMeta { code_offset: f0_start as u32, code_len: f0_len as u32, ..Default::default() },
|
|
FunctionMeta { code_offset: f1_start, code_len: f1_len, param_slots: 1, return_slots: 1, ..Default::default() },
|
|
]);
|
|
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
|
|
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
|
|
assert_eq!(vm.operand_stack, vec![Value::Int32(42)]);
|
|
}
|
|
|
|
#[test]
|
|
fn test_call_closure_on_non_closure_traps() {
|
|
use prometeu_bytecode::FunctionMeta;
|
|
|
|
// F0: PUSH_I32 1; CALL_CLOSURE 0; HALT -> should TRAP_TYPE on CALL_CLOSURE
|
|
let mut rom = Vec::new();
|
|
let f0_start = 0usize;
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::CallClosure as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes());
|
|
// Leave HALT for after run to ensure we trap before
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
let f0_len = rom.len() - f0_start;
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta { code_offset: f0_start as u32, code_len: f0_len as u32, ..Default::default() }]);
|
|
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(10, &mut native, &mut ctx).unwrap();
|
|
match report.reason {
|
|
LogicalFrameEndingReason::Trap(info) => {
|
|
assert_eq!(info.code, TRAP_TYPE);
|
|
assert_eq!(info.opcode, OpCode::CallClosure as u16);
|
|
}
|
|
other => panic!("Expected Trap(TYPE) from CALL_CLOSURE on non-closure, got {:?}", other),
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_nested_call_closure() {
|
|
use prometeu_bytecode::{FunctionMeta, Value};
|
|
|
|
// F0: MAKE_CLOSURE fn=1 cap=0; CALL_CLOSURE 0; CALL_CLOSURE 0; HALT
|
|
// F1: MAKE_CLOSURE fn=2 cap=0; RET // returns a closure
|
|
// F2: PUSH_I32 55; RET // returns constant
|
|
let mut rom = Vec::new();
|
|
// F0
|
|
let f0_start = 0usize;
|
|
rom.extend_from_slice(&(OpCode::MakeClosure as u16).to_le_bytes());
|
|
rom.extend_from_slice(&1u32.to_le_bytes()); // fn_id = 1
|
|
rom.extend_from_slice(&0u32.to_le_bytes()); // cap=0
|
|
rom.extend_from_slice(&(OpCode::CallClosure as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes()); // argc=0 -> pushes a closure from F1
|
|
rom.extend_from_slice(&(OpCode::CallClosure as u16).to_le_bytes());
|
|
rom.extend_from_slice(&0u32.to_le_bytes()); // argc=0 -> call returned closure F2
|
|
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
let f0_len = rom.len() - f0_start;
|
|
|
|
// F1
|
|
let f1_start = rom.len() as u32;
|
|
rom.extend_from_slice(&(OpCode::MakeClosure as u16).to_le_bytes());
|
|
rom.extend_from_slice(&2u32.to_le_bytes()); // fn_id = 2
|
|
rom.extend_from_slice(&0u32.to_le_bytes()); // cap=0
|
|
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes()); // return the HeapRef on stack
|
|
let f1_len = rom.len() as u32 - f1_start;
|
|
|
|
// F2
|
|
let f2_start = rom.len() as u32;
|
|
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
rom.extend_from_slice(&55i32.to_le_bytes());
|
|
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
let f2_len = rom.len() as u32 - f2_start;
|
|
|
|
let mut vm = new_test_vm(rom.clone(), vec![]);
|
|
vm.program.functions = std::sync::Arc::from(vec![
|
|
FunctionMeta { code_offset: f0_start as u32, code_len: f0_len as u32, ..Default::default() },
|
|
FunctionMeta { code_offset: f1_start, code_len: f1_len, param_slots: 1, return_slots: 1, ..Default::default() },
|
|
FunctionMeta { code_offset: f2_start, code_len: f2_len, param_slots: 1, return_slots: 1, ..Default::default() },
|
|
]);
|
|
|
|
let mut native = MockNative;
|
|
let mut ctx = HostContext::new(None);
|
|
vm.prepare_call("0");
|
|
let report = vm.run_budget(200, &mut native, &mut ctx).unwrap();
|
|
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
|
|
assert_eq!(vm.operand_stack, vec![Value::Int32(55)]);
|
|
}
|
|
}
|