4604 lines
180 KiB
Rust

mod gc;
mod loader;
mod runtime;
mod stack;
use crate::call_frame::CallFrame;
use crate::heap::{CoroutineState, Heap};
use crate::lookup_intrinsic_by_id;
use crate::object::ObjectKind;
use crate::roots::{RootVisitor, visit_value_for_roots};
use crate::scheduler::Scheduler;
use crate::verifier::Verifier;
use crate::vm_init_error::{LoaderPatchError, VmInitError};
use crate::{HostContext, NativeInterface};
use prometeu_bytecode::HeapRef;
use prometeu_bytecode::ProgramImage;
use prometeu_bytecode::Value;
use prometeu_bytecode::decode_next;
use prometeu_bytecode::isa::core::CoreOpCode as OpCode;
use prometeu_bytecode::model::BytecodeModule;
use prometeu_bytecode::{
TRAP_BAD_RET_SLOTS, TRAP_DIV_ZERO, TRAP_EXPLICIT, TRAP_INVALID_FUNC, TRAP_INVALID_INTRINSIC,
TRAP_INVALID_SYSCALL, TRAP_OOB, TRAP_STACK_UNDERFLOW, TRAP_TYPE, TrapInfo,
};
use prometeu_hal::syscalls::caps::NONE;
use prometeu_hal::vm_fault::VmFault;
/// Reason why the Virtual Machine stopped execution during a specific run.
/// This allows the system to decide if it should continue execution in the next tick
/// or if the frame is finalized.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum LogicalFrameEndingReason {
/// Execution reached a `FRAME_SYNC` instruction, marking the end of the logical frame.
FrameSync,
/// The cycle budget for the current host tick was exhausted before reaching `FRAME_SYNC`.
BudgetExhausted,
/// A `HALT` instruction was executed, terminating the program.
Halted,
/// The Program Counter (PC) reached the end of the available bytecode.
EndOfRom,
/// Execution hit a registered breakpoint.
Breakpoint,
/// A runtime trap occurred (e.g., out-of-bounds access).
Trap(TrapInfo),
/// A fatal error occurred that cannot be recovered (e.g., stack underflow).
Panic(String),
}
pub(crate) enum OpError {
Trap(u32, String),
}
impl From<TrapInfo> for LogicalFrameEndingReason {
fn from(info: TrapInfo) -> Self {
LogicalFrameEndingReason::Trap(info)
}
}
/// A report detailing the results of an execution slice (run_budget).
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BudgetReport {
/// Total virtual cycles consumed during this run.
pub cycles_used: u64,
/// Number of VM instructions executed.
pub steps_executed: u32,
/// The reason why this execution slice ended.
pub reason: LogicalFrameEndingReason,
}
pub struct VirtualMachine {
/// Program Counter (PC): The absolute byte offset in ROM for the next instruction.
pc: usize,
/// Operand Stack: The primary workspace for all mathematical and logical operations.
operand_stack: Vec<Value>,
/// Call Stack: Manages function call context (return addresses, frame limits).
call_stack: Vec<CallFrame>,
/// Global Variable Store: Variables that persist for the lifetime of the program.
globals: Vec<Value>,
/// The loaded executable (Bytecode + Constant Pool), that is the ROM translated.
program: ProgramImage,
/// Heap Memory: Dynamic allocation pool.
heap: Heap,
/// Total virtual cycles consumed since the VM started.
cycles: u64,
/// Stop flag: true if a `HALT` opcode was encountered.
halted: bool,
/// Set of ROM addresses used for software breakpoints in the debugger.
breakpoints: std::collections::HashSet<usize>,
/// GC: number of newly allocated live objects threshold to trigger a collection at safepoint.
/// The GC only runs at safepoints (e.g., FRAME_SYNC). 0 disables automatic GC.
gc_alloc_threshold: usize,
/// GC: snapshot of live objects count after the last collection (or VM init).
last_gc_live_count: usize,
/// Capability flags granted to the currently running program/cart.
/// Syscalls are capability-gated using `prometeu_hal::syscalls::SyscallMeta::caps`.
capabilities: prometeu_hal::syscalls::CapFlags,
/// Cooperative scheduler: set to true when `YIELD` opcode is executed.
/// The runtime/scheduler should only act on this at safepoints (FRAME_SYNC).
yield_requested: bool,
/// Absolute wake tick requested by the currently running coroutine (when it executes `SLEEP`).
///
/// Canonical rule (authoritative):
/// - `SLEEP N` suspends the coroutine for exactly N full scheduler ticks AFTER the current
/// `FRAME_SYNC` completes. If `SLEEP` is executed during tick `T`, the coroutine must resume
/// in the frame whose end-of-frame tick will be `T + N + 1`.
/// - Implementation detail: we compute `wake_tick = current_tick + duration + 1` at the time
/// `SLEEP` executes. The scheduler wakes sleeping coroutines when `current_tick >= wake_tick`.
///
/// This definition is deterministic and eliminates off-by-one ambiguity.
sleep_requested_until: Option<u64>,
/// Logical tick counter advanced at each FRAME_SYNC boundary.
current_tick: u64,
/// Cooperative scheduler instance managing ready/sleeping queues.
scheduler: Scheduler,
/// Handle to the currently running coroutine (owns the active VM context).
current_coro: Option<HeapRef>,
}
impl Default for VirtualMachine {
fn default() -> Self {
Self::new(vec![], vec![])
}
}
impl VirtualMachine {
/// Returns the current program counter.
pub fn pc(&self) -> usize {
self.pc
}
/// Returns true if there are no active call frames.
pub fn call_stack_is_empty(&self) -> bool {
self.call_stack.is_empty()
}
/// Returns up to `n` values from the top of the operand stack (top-first order).
pub fn operand_stack_top(&self, n: usize) -> Vec<Value> {
let len = self.operand_stack.len();
let start = len.saturating_sub(n);
self.operand_stack[start..].iter().rev().cloned().collect()
}
/// Returns true if the VM has executed a HALT and is not currently running.
pub fn is_halted(&self) -> bool {
self.halted
}
/// Adds a software breakpoint at the given PC.
pub fn insert_breakpoint(&mut self, pc: usize) {
let _ = self.breakpoints.insert(pc);
}
/// Removes a software breakpoint at the given PC, if present.
pub fn remove_breakpoint(&mut self, pc: usize) {
let _ = self.breakpoints.remove(&pc);
}
/// Returns the list of currently configured breakpoints.
pub fn breakpoints_list(&self) -> Vec<usize> {
self.breakpoints.iter().cloned().collect()
}
// Test-only helpers for internal unit tests within this crate.
#[cfg(test)]
pub(crate) fn push_operand_for_test(&mut self, v: Value) {
self.operand_stack.push(v);
}
/// Creates a new VM instance with the provided bytecode and constants.
pub fn new(rom: Vec<u8>, constant_pool: Vec<Value>) -> Self {
Self {
pc: 0,
operand_stack: Vec::new(),
call_stack: Vec::new(),
globals: Vec::new(),
program: ProgramImage::new(
rom,
constant_pool,
vec![],
None,
std::collections::HashMap::new(),
),
heap: Heap::new(),
cycles: 0,
halted: false,
breakpoints: std::collections::HashSet::new(),
gc_alloc_threshold: 1024, // conservative default; tests may override
last_gc_live_count: 0,
capabilities: NONE,
yield_requested: false,
sleep_requested_until: None,
current_tick: 0,
scheduler: Scheduler::new(),
current_coro: None,
}
}
/// Executes a single instruction at the current Program Counter (PC).
///
/// This follows the classic CPU cycle:
/// 1. Fetch: Read the opcode from memory.
/// 2. Decode: Identify what operation to perform.
/// 3. Execute: Perform the operation, updating stacks, memory, or calling peripherals.
pub fn step(
&mut self,
native: &mut dyn NativeInterface,
ctx: &mut HostContext,
) -> Result<(), LogicalFrameEndingReason> {
// If there is no currently running coroutine (e.g., all are sleeping),
// we cannot execute any instruction this frame. End the frame immediately
// with a safepoint to advance tick and potentially wake sleepers.
if self.current_coro.is_none() {
self.cycles += OpCode::FrameSync.cycles();
self.handle_safepoint();
return Err(LogicalFrameEndingReason::FrameSync);
}
if self.halted || self.pc >= self.program.rom.len() {
return Ok(());
}
let start_pc = self.pc;
// Fetch & Decode
let instr = prometeu_bytecode::decode_next(self.pc, &self.program.rom)
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
let opcode = instr.opcode;
self.pc = instr.next_pc;
// Execute
match opcode {
OpCode::Nop => {}
OpCode::Halt => {
self.halted = true;
}
OpCode::Jmp => {
let target = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
as usize;
let func_start = self
.call_stack
.last()
.map(|f| self.program.functions[f.func_idx].code_offset as usize)
.unwrap_or(0);
self.pc = func_start + target;
}
OpCode::JmpIfFalse => {
let target = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
as usize;
let val =
self.pop_trap(opcode, start_pc as u32, "JMP_IF_FALSE requires one operand")?;
match val {
Value::Boolean(false) => {
let func_start = self
.call_stack
.last()
.map(|f| self.program.functions[f.func_idx].code_offset as usize)
.unwrap_or(0);
self.pc = func_start + target;
}
Value::Boolean(true) => {}
_ => {
return Err(self.trap(
TRAP_TYPE,
opcode as u16,
format!("Expected boolean for JMP_IF_FALSE, got {:?}", val),
start_pc as u32,
));
}
}
}
OpCode::JmpIfTrue => {
let target = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
as usize;
let val =
self.pop_trap(opcode, start_pc as u32, "JMP_IF_TRUE requires one operand")?;
match val {
Value::Boolean(true) => {
let func_start = self
.call_stack
.last()
.map(|f| self.program.functions[f.func_idx].code_offset as usize)
.unwrap_or(0);
self.pc = func_start + target;
}
Value::Boolean(false) => {}
_ => {
return Err(self.trap(
TRAP_TYPE,
opcode as u16,
format!("Expected boolean for JMP_IF_TRUE, got {:?}", val),
start_pc as u32,
));
}
}
}
OpCode::Trap => {
// TRAP is guest-visible ISA, distinct from debugger breakpoints.
self.cycles += OpCode::Trap.cycles();
return Err(self.trap(
TRAP_EXPLICIT,
opcode as u16,
"Program requested trap".into(),
start_pc as u32,
));
}
OpCode::Spawn => {
// Operands: (fn_id, arg_count)
let (fn_id_u32, arg_count_u32) = instr
.imm_u32x2()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
let fn_id = fn_id_u32 as usize;
let arg_count = arg_count_u32 as usize;
let callee = self.program.functions.get(fn_id).ok_or_else(|| {
self.trap(
TRAP_INVALID_FUNC,
opcode as u16,
format!("Invalid func_id {} in SPAWN", fn_id),
start_pc as u32,
)
})?;
let param_slots: u16 = callee.param_slots;
let local_slots: u16 = callee.local_slots;
let entry_pc = callee.code_offset as usize;
if arg_count as u16 != param_slots {
return Err(self.trap(
TRAP_TYPE,
opcode as u16,
format!(
"SPAWN arg_count mismatch for func {}: expected {}, got {}",
fn_id, param_slots, arg_count
),
start_pc as u32,
));
}
if self.operand_stack.len() < arg_count {
return Err(self.trap(
TRAP_STACK_UNDERFLOW,
opcode as u16,
format!(
"Stack underflow during SPAWN to func {}: expected at least {} arguments, got {}",
fn_id,
arg_count,
self.operand_stack.len()
),
start_pc as u32,
));
}
// Pop args top-first, then reverse to logical order arg1..argN
let mut args: Vec<Value> = Vec::with_capacity(arg_count);
for _ in 0..arg_count {
args.push(self.pop_trap(
opcode,
start_pc as u32,
format!("SPAWN to func {} argument stack underflow", fn_id),
)?);
}
args.reverse();
// Build operand stack for the new coroutine: params followed by zeroed locals
let mut new_stack: Vec<Value> =
Vec::with_capacity((param_slots + local_slots) as usize);
// Place user args as parameters
for v in args {
new_stack.push(v);
}
// Zero-init locals
for _ in 0..local_slots {
new_stack.push(Value::Null);
}
// Initial frame for the coroutine (sentinel-like return to end-of-rom)
let frames = vec![CallFrame {
return_pc: self.program.rom.len() as u32,
stack_base: 0,
func_idx: fn_id,
}];
let href = self.heap.allocate_coroutine(
entry_pc,
CoroutineState::Ready,
0,
new_stack,
frames,
);
self.scheduler.enqueue_ready(href);
}
OpCode::Yield => {
// Cooperative yield: record intent; actual switching only at FRAME_SYNC.
self.yield_requested = true;
// Do not end the slice here; we continue executing until a safepoint.
}
OpCode::Sleep => {
// Immediate is duration in ticks.
//
// Canonical semantics:
// SLEEP N => suspend for exactly N full scheduler ticks AFTER the current
// FRAME_SYNC completes. If executed at tick T, resume in the frame whose
// end-of-frame tick will be T + N + 1.
//
// Implementation rule:
// wake_tick = current_tick + duration + 1
let duration = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
as u64;
let wake = self.current_tick.saturating_add(duration).saturating_add(1);
self.sleep_requested_until = Some(wake);
// End the logical frame right after the instruction completes
// to ensure no further instructions run until at least next tick.
self.cycles += OpCode::FrameSync.cycles();
self.handle_safepoint();
return Err(LogicalFrameEndingReason::FrameSync);
}
OpCode::Hostcall => {
let sysc_index = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
return Err(LogicalFrameEndingReason::Panic(format!(
"HOSTCALL {} reached execution without loader patching",
sysc_index
)));
}
OpCode::MakeClosure => {
// Immediate carries (fn_id, capture_count)
let (fn_id, cap_count) = instr
.imm_u32x2()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
// Pop cap_count values from the operand stack, top-first.
let mut temp: Vec<Value> = Vec::with_capacity(cap_count as usize);
for _ in 0..cap_count {
let v = self.pop_trap(
opcode,
start_pc as u32,
"MAKE_CLOSURE capture stack underflow",
)?;
temp.push(v);
}
// Preserve order so that env[0] corresponds to captured_1 (the bottom-most
// among the popped values): reverse the temp vector.
temp.reverse();
// Allocate closure on heap and push its reference.
let href = self.heap.alloc_closure(fn_id, &temp);
self.push(Value::HeapRef(href));
}
OpCode::CallClosure => {
// Operand carries the number of user-supplied arguments (arg1..argN).
let user_arg_count = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
as usize;
// Pop the closure reference from the stack (top of stack).
let clos_val = self.pop_trap(
opcode,
start_pc as u32,
"CALL_CLOSURE requires a closure handle on the stack",
)?;
let href = match clos_val {
Value::HeapRef(h) => h,
other => {
return Err(self.trap(
TRAP_TYPE,
opcode as u16,
format!(
"CALL_CLOSURE expects a closure handle at TOS, got {:?}",
other
),
start_pc as u32,
));
}
};
// Validate that the heap object is indeed a Closure.
let header = self.heap.header(href).ok_or_else(|| {
self.trap(
TRAP_OOB,
opcode as u16,
format!("Invalid heap handle in CALL_CLOSURE: {:?}", href),
start_pc as u32,
)
})?;
if header.kind != ObjectKind::Closure {
return Err(self.trap(
TRAP_TYPE,
opcode as u16,
format!("CALL_CLOSURE on non-closure object kind {:?}", header.kind),
start_pc as u32,
));
}
// Pop user arguments from the operand stack (top-first), then fix order.
let mut user_args: Vec<Value> = Vec::with_capacity(user_arg_count);
for _ in 0..user_arg_count {
user_args.push(self.pop_trap(
opcode,
start_pc as u32,
"CALL_CLOSURE argument stack underflow",
)?);
}
user_args.reverse(); // Now in logical order: arg1..argN
// Resolve target function id from the closure payload.
let fn_id = self.heap.closure_fn_id(href).ok_or_else(|| {
LogicalFrameEndingReason::Panic(
"Internal error: malformed closure object (missing fn_id)".into(),
)
})? as usize;
let callee = self.program.functions.get(fn_id).ok_or_else(|| {
self.trap(
TRAP_INVALID_FUNC,
opcode as u16,
format!("Invalid func_id {} from closure", fn_id),
start_pc as u32,
)
})?;
// Copy required fields to drop the immutable borrow before mutating self
let callee_param_slots = callee.param_slots as usize;
let callee_local_slots = callee.local_slots as usize;
let callee_code_offset = callee.code_offset as usize;
// Validate arity: param_slots must equal hidden arg0 + user_arg_count.
let expected_params = 1usize + user_arg_count;
if callee_param_slots != expected_params {
return Err(self.trap(
TRAP_TYPE,
opcode as u16,
format!(
"CALL_CLOSURE arg_count mismatch: function expects {} total params (including hidden arg0), got hidden+{}",
callee_param_slots, expected_params
),
start_pc as u32,
));
}
// Prepare the operand stack to match the direct CALL convention:
// push hidden arg0 (closure_ref) followed by arg1..argN.
self.push(Value::HeapRef(href));
for v in user_args.into_iter() {
self.push(v);
}
let stack_base = self
.operand_stack
.len()
.checked_sub(callee_param_slots)
.ok_or_else(|| LogicalFrameEndingReason::Panic("Stack underflow".into()))?;
// Allocate and zero-init local slots
for _ in 0..callee_local_slots {
self.operand_stack.push(Value::Null);
}
self.call_stack.push(CallFrame {
return_pc: self.pc as u32,
stack_base,
func_idx: fn_id,
});
self.pc = callee_code_offset;
}
OpCode::PushConst => {
let idx = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
as usize;
let val = self.program.constant_pool.get(idx).cloned().ok_or_else(|| {
self.trap(
TRAP_OOB,
opcode as u16,
format!("Invalid constant index {}", idx),
start_pc as u32,
)
})?;
self.push(val);
}
OpCode::PushI64 => {
let val = instr
.imm_i64()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
self.push(Value::Int64(val));
}
OpCode::PushI32 => {
let val = instr
.imm_i32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
self.push(Value::Int32(val));
}
OpCode::PushF64 => {
let val = instr
.imm_f64()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
self.push(Value::Float(val));
}
OpCode::PushBool => {
let val = instr
.imm_u8()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
self.push(Value::Boolean(val != 0));
}
OpCode::Pop => {
self.pop_trap(opcode, start_pc as u32, "POP requires one operand")?;
}
OpCode::PopN => {
let n = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
for _ in 0..n {
self.pop_trap(opcode, start_pc as u32, "POPN operand stack underflow")?;
}
}
OpCode::Dup => {
let val =
self.peek_trap(opcode, start_pc as u32, "DUP requires one operand")?.clone();
self.push(val);
}
OpCode::Swap => {
let a = self.pop_trap(opcode, start_pc as u32, "SWAP requires two operands")?;
let b = self.pop_trap(opcode, start_pc as u32, "SWAP requires two operands")?;
self.push(a);
self.push(b);
}
OpCode::Add => self.binary_op(opcode, start_pc as u32, |a, b| match (&a, &b) {
(Value::String(_), _) | (_, Value::String(_)) => {
Ok(Value::String(format!("{}{}", a.to_string(), b.to_string())))
}
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_add(*b))),
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_add(*b))),
(Value::Int32(a), Value::Int64(b)) => {
Ok(Value::Int64((*a as i64).wrapping_add(*b)))
}
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_add(*b as i64))),
(Value::Float(a), Value::Float(b)) => Ok(Value::Float(a + b)),
(Value::Int32(a), Value::Float(b)) => Ok(Value::Float(*a as f64 + b)),
(Value::Float(a), Value::Int32(b)) => Ok(Value::Float(a + *b as f64)),
(Value::Int64(a), Value::Float(b)) => Ok(Value::Float(*a as f64 + b)),
(Value::Float(a), Value::Int64(b)) => Ok(Value::Float(a + *b as f64)),
_ => Err(OpError::Trap(TRAP_TYPE, "Invalid types for ADD".into())),
})?,
OpCode::Sub => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_sub(b))),
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_sub(b))),
(Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64).wrapping_sub(b))),
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_sub(b as i64))),
(Value::Float(a), Value::Float(b)) => Ok(Value::Float(a - b)),
(Value::Int32(a), Value::Float(b)) => Ok(Value::Float(a as f64 - b)),
(Value::Float(a), Value::Int32(b)) => Ok(Value::Float(a - b as f64)),
(Value::Int64(a), Value::Float(b)) => Ok(Value::Float(a as f64 - b)),
(Value::Float(a), Value::Int64(b)) => Ok(Value::Float(a - b as f64)),
_ => Err(OpError::Trap(TRAP_TYPE, "Invalid types for SUB".into())),
})?,
OpCode::Mul => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_mul(b))),
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_mul(b))),
(Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64).wrapping_mul(b))),
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_mul(b as i64))),
(Value::Float(a), Value::Float(b)) => Ok(Value::Float(a * b)),
(Value::Int32(a), Value::Float(b)) => Ok(Value::Float(a as f64 * b)),
(Value::Float(a), Value::Int32(b)) => Ok(Value::Float(a * b as f64)),
(Value::Int64(a), Value::Float(b)) => Ok(Value::Float(a as f64 * b)),
(Value::Float(a), Value::Int64(b)) => Ok(Value::Float(a * b as f64)),
_ => Err(OpError::Trap(TRAP_TYPE, "Invalid types for MUL".into())),
})?,
OpCode::Div => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
(Value::Int32(a), Value::Int32(b)) => {
if b == 0 {
return Err(OpError::Trap(
TRAP_DIV_ZERO,
"Integer division by zero".into(),
));
}
Ok(Value::Int32(a / b))
}
(Value::Int64(a), Value::Int64(b)) => {
if b == 0 {
return Err(OpError::Trap(
TRAP_DIV_ZERO,
"Integer division by zero".into(),
));
}
Ok(Value::Int64(a / b))
}
(Value::Int32(a), Value::Int64(b)) => {
if b == 0 {
return Err(OpError::Trap(
TRAP_DIV_ZERO,
"Integer division by zero".into(),
));
}
Ok(Value::Int64(a as i64 / b))
}
(Value::Int64(a), Value::Int32(b)) => {
if b == 0 {
return Err(OpError::Trap(
TRAP_DIV_ZERO,
"Integer division by zero".into(),
));
}
Ok(Value::Int64(a / b as i64))
}
(Value::Float(a), Value::Float(b)) => {
if b == 0.0 {
return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into()));
}
Ok(Value::Float(a / b))
}
(Value::Int32(a), Value::Float(b)) => {
if b == 0.0 {
return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into()));
}
Ok(Value::Float(a as f64 / b))
}
(Value::Float(a), Value::Int32(b)) => {
if b == 0 {
return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into()));
}
Ok(Value::Float(a / b as f64))
}
(Value::Int64(a), Value::Float(b)) => {
if b == 0.0 {
return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into()));
}
Ok(Value::Float(a as f64 / b))
}
(Value::Float(a), Value::Int64(b)) => {
if b == 0 {
return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into()));
}
Ok(Value::Float(a / b as f64))
}
_ => Err(OpError::Trap(TRAP_TYPE, "Invalid types for DIV".into())),
})?,
OpCode::Mod => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
(Value::Int32(a), Value::Int32(b)) => {
if b == 0 {
return Err(OpError::Trap(TRAP_DIV_ZERO, "Integer modulo by zero".into()));
}
Ok(Value::Int32(a % b))
}
(Value::Int64(a), Value::Int64(b)) => {
if b == 0 {
return Err(OpError::Trap(TRAP_DIV_ZERO, "Integer modulo by zero".into()));
}
Ok(Value::Int64(a % b))
}
_ => Err(OpError::Trap(TRAP_TYPE, "Invalid types for MOD".into())),
})?,
OpCode::Eq => {
self.binary_op(opcode, start_pc as u32, |a, b| Ok(Value::Boolean(a == b)))?
}
OpCode::Neq => {
self.binary_op(opcode, start_pc as u32, |a, b| Ok(Value::Boolean(a != b)))?
}
OpCode::Lt => self.binary_op(opcode, start_pc as u32, |a, b| {
a.partial_cmp(&b)
.map(|o| Value::Boolean(o == std::cmp::Ordering::Less))
.ok_or_else(|| OpError::Trap(TRAP_TYPE, "Invalid types for LT".into()))
})?,
OpCode::Gt => self.binary_op(opcode, start_pc as u32, |a, b| {
a.partial_cmp(&b)
.map(|o| Value::Boolean(o == std::cmp::Ordering::Greater))
.ok_or_else(|| OpError::Trap(TRAP_TYPE, "Invalid types for GT".into()))
})?,
OpCode::Lte => self.binary_op(opcode, start_pc as u32, |a, b| {
a.partial_cmp(&b)
.map(|o| Value::Boolean(o != std::cmp::Ordering::Greater))
.ok_or_else(|| OpError::Trap(TRAP_TYPE, "Invalid types for LTE".into()))
})?,
OpCode::Gte => self.binary_op(opcode, start_pc as u32, |a, b| {
a.partial_cmp(&b)
.map(|o| Value::Boolean(o != std::cmp::Ordering::Less))
.ok_or_else(|| OpError::Trap(TRAP_TYPE, "Invalid types for GTE".into()))
})?,
OpCode::And => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
(Value::Boolean(a), Value::Boolean(b)) => Ok(Value::Boolean(a && b)),
_ => Err(OpError::Trap(TRAP_TYPE, "Invalid types for AND".into())),
})?,
OpCode::Or => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
(Value::Boolean(a), Value::Boolean(b)) => Ok(Value::Boolean(a || b)),
_ => Err(OpError::Trap(TRAP_TYPE, "Invalid types for OR".into())),
})?,
OpCode::Not => {
let val = self.pop_trap(opcode, start_pc as u32, "NOT requires one operand")?;
if let Value::Boolean(b) = val {
self.push(Value::Boolean(!b));
} else {
return Err(self.trap(
TRAP_TYPE,
opcode as u16,
"Invalid type for NOT".into(),
start_pc as u32,
));
}
}
OpCode::BitAnd => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a & b)),
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a & b)),
(Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64) & b)),
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a & (b as i64))),
_ => Err(OpError::Trap(TRAP_TYPE, "Invalid types for BitAnd".into())),
})?,
OpCode::BitOr => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a | b)),
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a | b)),
(Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64) | b)),
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a | (b as i64))),
_ => Err(OpError::Trap(TRAP_TYPE, "Invalid types for BitOr".into())),
})?,
OpCode::BitXor => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a ^ b)),
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a ^ b)),
(Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64) ^ b)),
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a ^ (b as i64))),
_ => Err(OpError::Trap(TRAP_TYPE, "Invalid types for BitXor".into())),
})?,
OpCode::Shl => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_shl(b as u32))),
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_shl(b as u32))),
(Value::Int32(a), Value::Int64(b)) => {
Ok(Value::Int64((a as i64).wrapping_shl(b as u32)))
}
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_shl(b as u32))),
_ => Err(OpError::Trap(TRAP_TYPE, "Invalid types for Shl".into())),
})?,
OpCode::Shr => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) {
(Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_shr(b as u32))),
(Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_shr(b as u32))),
(Value::Int32(a), Value::Int64(b)) => {
Ok(Value::Int64((a as i64).wrapping_shr(b as u32)))
}
(Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_shr(b as u32))),
_ => Err(OpError::Trap(TRAP_TYPE, "Invalid types for Shr".into())),
})?,
OpCode::Neg => {
let val = self.pop_trap(opcode, start_pc as u32, "NEG requires one operand")?;
match val {
Value::Int32(a) => self.push(Value::Int32(a.wrapping_neg())),
Value::Int64(a) => self.push(Value::Int64(a.wrapping_neg())),
Value::Float(a) => self.push(Value::Float(-a)),
_ => {
return Err(self.trap(
TRAP_TYPE,
opcode as u16,
"Invalid type for Neg".into(),
start_pc as u32,
));
}
}
}
OpCode::GetGlobal => {
let idx = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
as usize;
if idx >= self.globals.len() {
self.globals.resize(idx + 1, Value::Int32(0));
}
let val = self.globals[idx].clone();
self.push(val);
}
OpCode::SetGlobal => {
let idx = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
as usize;
let val =
self.pop_trap(opcode, start_pc as u32, "SET_GLOBAL requires one operand")?;
if idx >= self.globals.len() {
self.globals.resize(idx + 1, Value::Int32(0));
}
self.globals[idx] = val;
}
OpCode::GetLocal => {
let slot = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
let frame = self.call_stack.last().ok_or_else(|| {
LogicalFrameEndingReason::Panic("No active call frame".into())
})?;
let func = &self.program.functions[frame.func_idx];
crate::local_addressing::check_local_slot(
func,
slot,
opcode as u16,
start_pc as u32,
)
.map_err(|trap_info| {
self.trap(trap_info.code, trap_info.opcode, trap_info.message, trap_info.pc)
})?;
let stack_idx = crate::local_addressing::local_index(frame, slot);
let val = self.operand_stack.get(stack_idx).cloned().ok_or_else(|| {
LogicalFrameEndingReason::Panic(
"Internal error: validated local slot not found in stack".into(),
)
})?;
self.push(val);
}
OpCode::SetLocal => {
let slot = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
let val =
self.pop_trap(opcode, start_pc as u32, "SET_LOCAL requires one operand")?;
let frame = self.call_stack.last().ok_or_else(|| {
LogicalFrameEndingReason::Panic("No active call frame".into())
})?;
let func = &self.program.functions[frame.func_idx];
crate::local_addressing::check_local_slot(
func,
slot,
opcode as u16,
start_pc as u32,
)
.map_err(|trap_info| {
self.trap(trap_info.code, trap_info.opcode, trap_info.message, trap_info.pc)
})?;
let stack_idx = crate::local_addressing::local_index(frame, slot);
self.operand_stack[stack_idx] = val;
}
OpCode::Call => {
let func_id = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?
as usize;
let callee = self.program.functions.get(func_id).ok_or_else(|| {
self.trap(
TRAP_INVALID_FUNC,
opcode as u16,
format!("Invalid func_id {}", func_id),
start_pc as u32,
)
})?;
if self.operand_stack.len() < callee.param_slots as usize {
return Err(self.trap(
TRAP_STACK_UNDERFLOW,
opcode as u16,
format!(
"Stack underflow during CALL to func {}: expected at least {} arguments, got {}",
func_id,
callee.param_slots,
self.operand_stack.len()
),
start_pc as u32,
));
}
let stack_base = self.operand_stack.len() - callee.param_slots as usize;
// Allocate and zero-init local_slots
for _ in 0..callee.local_slots {
self.operand_stack.push(Value::Null);
}
self.call_stack.push(CallFrame {
return_pc: self.pc as u32,
stack_base,
func_idx: func_id,
});
self.pc = callee.code_offset as usize;
}
OpCode::Ret => {
let frame = self.call_stack.pop().ok_or_else(|| {
self.trap(
TRAP_STACK_UNDERFLOW,
opcode as u16,
"RET with empty call stack".into(),
start_pc as u32,
)
})?;
let func = &self.program.functions[frame.func_idx];
let return_slots = func.return_slots as usize;
let current_height = self.operand_stack.len();
let expected_height = frame.stack_base
+ func.param_slots as usize
+ func.local_slots as usize
+ return_slots;
if current_height != expected_height {
return Err(self.trap(TRAP_BAD_RET_SLOTS, opcode as u16, format!(
"Incorrect stack height at RET in func {}: expected {} slots (stack_base={} + params={} + locals={} + returns={}), got {}",
frame.func_idx, expected_height, frame.stack_base, func.param_slots, func.local_slots, return_slots, current_height
), start_pc as u32));
}
// Copy return values (preserving order: pop return_slots values, then reverse to push back)
let mut return_vals = Vec::with_capacity(return_slots);
for _ in 0..return_slots {
return_vals.push(self.pop().map_err(LogicalFrameEndingReason::Panic)?);
}
return_vals.reverse();
self.operand_stack.truncate(frame.stack_base);
for val in return_vals {
self.push(val);
}
self.pc = frame.return_pc as usize;
}
OpCode::Syscall => {
let pc_at_syscall = start_pc as u32;
let id = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
let syscall = prometeu_hal::syscalls::Syscall::from_u32(id).ok_or_else(|| {
self.trap(
TRAP_INVALID_SYSCALL,
OpCode::Syscall as u16,
format!("Unknown syscall: 0x{:08X}", id),
pc_at_syscall,
)
})?;
// Capability check before any side effects or argument consumption.
let meta = prometeu_hal::syscalls::meta_for(syscall);
if (self.capabilities & meta.caps) != meta.caps {
return Err(self.trap(
TRAP_INVALID_SYSCALL,
OpCode::Syscall as u16,
format!(
"Missing capability for syscall {} (required=0x{:X})",
syscall.name(),
meta.caps
),
pc_at_syscall,
));
}
let args_count = syscall.args_count();
let mut args = Vec::with_capacity(args_count);
for _ in 0..args_count {
let v = self.pop().map_err(|_e| {
self.trap(
TRAP_STACK_UNDERFLOW,
OpCode::Syscall as u16,
"Syscall argument stack underflow".to_string(),
pc_at_syscall,
)
})?;
args.push(v);
}
args.reverse();
let stack_height_before = self.operand_stack.len();
let mut ret = crate::HostReturn::new(&mut self.operand_stack);
native.syscall(id, &args, &mut ret, ctx).map_err(|fault| match fault {
VmFault::Trap(code, msg) => {
self.trap(code, OpCode::Syscall as u16, msg, pc_at_syscall)
}
VmFault::Panic(msg) => LogicalFrameEndingReason::Panic(msg),
VmFault::Unavailable => {
LogicalFrameEndingReason::Panic("Host feature unavailable".into())
}
})?;
let stack_height_after = self.operand_stack.len();
let results_pushed = stack_height_after - stack_height_before;
if results_pushed != syscall.results_count() {
return Err(LogicalFrameEndingReason::Panic(format!(
"Syscall {} (0x{:08X}) results mismatch: expected {}, got {}",
syscall.name(),
id,
syscall.results_count(),
results_pushed
)));
}
}
OpCode::Intrinsic => {
let pc_at_intrinsic = start_pc as u32;
let id = instr
.imm_u32()
.map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?;
let intrinsic = lookup_intrinsic_by_id(id).ok_or_else(|| {
self.trap(
TRAP_INVALID_INTRINSIC,
OpCode::Intrinsic as u16,
format!("Unknown intrinsic: 0x{:08X}", id),
pc_at_intrinsic,
)
})?;
let args_count = intrinsic.arg_slots();
let mut args = Vec::with_capacity(args_count);
for _ in 0..args_count {
let value = self.pop().map_err(|_e| {
self.trap(
TRAP_STACK_UNDERFLOW,
OpCode::Intrinsic as u16,
"Intrinsic argument stack underflow".to_string(),
pc_at_intrinsic,
)
})?;
args.push(value);
}
args.reverse();
let results = (intrinsic.implementation)(&args, ctx).map_err(|err| match err {
crate::IntrinsicExecutionError::ArityMismatch { expected, got } => self.trap(
TRAP_INVALID_INTRINSIC,
OpCode::Intrinsic as u16,
format!(
"Intrinsic {}.{} argument mismatch: expected {}, got {}",
intrinsic.owner, intrinsic.name, expected, got
),
pc_at_intrinsic,
),
crate::IntrinsicExecutionError::TypeMismatch { index, expected } => self.trap(
TRAP_TYPE,
OpCode::Intrinsic as u16,
format!(
"Intrinsic {}.{} argument {} type mismatch: expected {:?}",
intrinsic.owner, intrinsic.name, index, expected
),
pc_at_intrinsic,
),
crate::IntrinsicExecutionError::InvalidBuiltinCarrier {
owner,
name,
carrier,
} => self.trap(
TRAP_INVALID_INTRINSIC,
OpCode::Intrinsic as u16,
format!(
"Intrinsic {}.{} received invalid builtin carrier {} for {}.{}",
intrinsic.owner, intrinsic.name, carrier, owner, name
),
pc_at_intrinsic,
),
crate::IntrinsicExecutionError::HardwareUnavailable => {
LogicalFrameEndingReason::Panic("Host feature unavailable".into())
}
})?;
intrinsic.validate_result_values(&results).map_err(|err| match err {
crate::IntrinsicExecutionError::ArityMismatch { expected, got } => self.trap(
TRAP_INVALID_INTRINSIC,
OpCode::Intrinsic as u16,
format!(
"Intrinsic {}.{} results mismatch: expected {}, got {}",
intrinsic.owner, intrinsic.name, expected, got
),
pc_at_intrinsic,
),
crate::IntrinsicExecutionError::TypeMismatch { index, expected } => self.trap(
TRAP_INVALID_INTRINSIC,
OpCode::Intrinsic as u16,
format!(
"Intrinsic {}.{} result {} type mismatch: expected {:?}",
intrinsic.owner, intrinsic.name, index, expected
),
pc_at_intrinsic,
),
crate::IntrinsicExecutionError::InvalidBuiltinCarrier {
owner,
name,
carrier,
} => self.trap(
TRAP_INVALID_INTRINSIC,
OpCode::Intrinsic as u16,
format!(
"Intrinsic {}.{} produced invalid builtin carrier {} for {}.{}",
intrinsic.owner, intrinsic.name, carrier, owner, name
),
pc_at_intrinsic,
),
crate::IntrinsicExecutionError::HardwareUnavailable => {
LogicalFrameEndingReason::Panic(
"Host feature unavailable while validating intrinsic results".into(),
)
}
})?;
for value in results {
self.push(value);
}
}
OpCode::FrameSync => {
// Marks the logical end of a frame: consume cycles and signal to the driver
self.cycles += OpCode::FrameSync.cycles();
self.handle_safepoint();
return Err(LogicalFrameEndingReason::FrameSync);
}
}
// Apply the instruction cost to the cycle counter
self.cycles += opcode.cycles();
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
fn new_test_vm(rom: Vec<u8>, constant_pool: Vec<Value>) -> VirtualMachine {
let rom_len = rom.len() as u32;
let mut vm = VirtualMachine::new(rom, constant_pool);
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
code_offset: 0,
code_len: rom_len,
..Default::default()
}]);
// Ensure tests start with a properly initialized main coroutine at func 0
vm.prepare_call("0");
vm
}
use crate::HostReturn;
use prometeu_bytecode::model::{BytecodeModule, SyscallDecl};
use prometeu_bytecode::{
FunctionMeta, TRAP_EXPLICIT, TRAP_INVALID_LOCAL, TRAP_OOB, TRAP_STACK_UNDERFLOW, TRAP_TYPE,
assemble, disassemble,
};
use prometeu_hal::expect_int;
struct MockNative;
impl NativeInterface for MockNative {
fn syscall(
&mut self,
_id: u32,
_args: &[Value],
_ret: &mut HostReturn,
_ctx: &mut HostContext,
) -> Result<(), VmFault> {
Ok(())
}
}
fn serialized_single_function_module(code: Vec<u8>, syscalls: Vec<SyscallDecl>) -> Vec<u8> {
BytecodeModule {
version: 0,
const_pool: vec![],
functions: vec![FunctionMeta {
code_offset: 0,
code_len: code.len() as u32,
..Default::default()
}],
code,
debug_info: None,
exports: vec![],
syscalls,
}
.serialize()
}
fn serialized_single_hostcall_module(syscall: SyscallDecl) -> Vec<u8> {
let mut source = String::new();
for _ in 0..syscall.arg_slots {
source.push_str("PUSH_I32 0\n");
}
source.push_str("HOSTCALL 0\nHALT");
let code = assemble(&source).expect("assemble");
serialized_single_function_module(code, vec![syscall])
}
#[test]
fn sleep_delays_execution_by_ticks() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Program:
// SLEEP 2
// PUSH_I32 123
// FRAME_SYNC
// HALT
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::Sleep as u16).to_le_bytes());
rom.extend_from_slice(&(2u32).to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&123i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom, vec![]);
// Frame 1: executing SLEEP 2 will force a frame end and advance tick to 1
let rep1 = vm.run_budget(100, &mut native, &mut ctx).expect("run ok");
assert!(matches!(rep1.reason, LogicalFrameEndingReason::FrameSync));
assert!(vm.operand_stack.is_empty());
assert_eq!(vm.current_tick, 1);
// Frame 2: still sleeping (tick 1 < wake 3), immediate FrameSync, tick -> 2
let rep2 = vm.run_budget(100, &mut native, &mut ctx).expect("run ok");
assert!(matches!(rep2.reason, LogicalFrameEndingReason::FrameSync));
// In the per-coroutine model, the VM may keep current context intact across idle frames;
// we must not observe any new values pushed before wake. Stack height must be unchanged.
assert_eq!(vm.operand_stack.len(), 0);
assert_eq!(vm.current_tick, 2);
// Frame 3: still sleeping (tick 2 < wake 3), immediate FrameSync, tick -> 3
let rep3 = vm.run_budget(100, &mut native, &mut ctx).expect("run ok");
assert!(matches!(rep3.reason, LogicalFrameEndingReason::FrameSync));
assert_eq!(vm.operand_stack.len(), 0);
assert_eq!(vm.current_tick, 3);
// Frame 4: wake condition met (current_tick >= wake), execute PUSH_I32 then FRAME_SYNC
let rep4 = vm.run_budget(100, &mut native, &mut ctx).expect("run ok");
assert!(matches!(rep4.reason, LogicalFrameEndingReason::FrameSync));
// Value should now be on the stack
assert_eq!(vm.peek().unwrap(), &Value::Int32(123));
// Next frame should hit HALT without errors
let res = vm.run_budget(100, &mut native, &mut ctx);
assert!(res.is_ok());
}
#[test]
fn test_deterministic_pc_and_tick_trace_across_runs() {
// Program:
// PUSH_I32 1; YIELD; FrameSync;
// PUSH_I32 2; YIELD; FrameSync;
// PUSH_I32 3; FrameSync; HALT
// We collect (pc, tick, stack_height) after each run_budget slice and
// compare two independent VMs initialized from the same ROM.
let mut rom = Vec::new();
// PUSH 1
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&1i32.to_le_bytes());
// YIELD + FrameSync
rom.extend_from_slice(&(OpCode::Yield as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
// PUSH 2
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&2i32.to_le_bytes());
// YIELD + FrameSync
rom.extend_from_slice(&(OpCode::Yield as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
// PUSH 3 + FrameSync + HALT
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&3i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm1 = new_test_vm(rom.clone(), vec![]);
let mut vm2 = new_test_vm(rom.clone(), vec![]);
let mut native = MockNative;
let mut ctx1 = HostContext::new(None);
let mut ctx2 = HostContext::new(None);
let mut trace1 = Vec::new();
let mut trace2 = Vec::new();
// Run both VMs in lockstep slices until both halt
for _ in 0..10 {
if !vm1.halted {
let rep = vm1.run_budget(4, &mut native, &mut ctx1).expect("vm1 ok");
trace1.push((
vm1.pc,
vm1.current_tick,
vm1.operand_stack.len(),
format!("{:?}", rep.reason),
));
}
if !vm2.halted {
let rep = vm2.run_budget(4, &mut native, &mut ctx2).expect("vm2 ok");
trace2.push((
vm2.pc,
vm2.current_tick,
vm2.operand_stack.len(),
format!("{:?}", rep.reason),
));
}
if vm1.halted && vm2.halted {
break;
}
}
assert!(vm1.halted && vm2.halted, "Both VMs should reach HALT deterministically");
assert_eq!(trace1, trace2, "Per-slice traces must be identical across runs");
// Also verify final stack content deterministic
assert_eq!(vm1.pop().unwrap(), Value::Int32(3));
assert_eq!(vm1.pop().unwrap(), Value::Int32(2));
assert_eq!(vm1.pop().unwrap(), Value::Int32(1));
assert!(vm1.operand_stack.is_empty());
assert_eq!(vm2.pop().unwrap(), Value::Int32(3));
assert_eq!(vm2.pop().unwrap(), Value::Int32(2));
assert_eq!(vm2.pop().unwrap(), Value::Int32(1));
assert!(vm2.operand_stack.is_empty());
}
#[test]
fn test_sleep_wake_determinism_across_runs() {
// Program:
// SLEEP 2; PUSH 7; FrameSync; HALT
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::Sleep as u16).to_le_bytes());
rom.extend_from_slice(&(2u32).to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&7i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut native = MockNative;
let mut vm_a = new_test_vm(rom.clone(), vec![]);
let mut vm_b = new_test_vm(rom.clone(), vec![]);
let mut ctx_a = HostContext::new(None);
let mut ctx_b = HostContext::new(None);
let mut ticks_a = Vec::new();
let mut ticks_b = Vec::new();
// Slice 1
let ra1 = vm_a.run_budget(100, &mut native, &mut ctx_a).unwrap();
let rb1 = vm_b.run_budget(100, &mut native, &mut ctx_b).unwrap();
ticks_a.push((vm_a.pc, vm_a.current_tick, format!("{:?}", ra1.reason)));
ticks_b.push((vm_b.pc, vm_b.current_tick, format!("{:?}", rb1.reason)));
// Slice 2
let ra2 = vm_a.run_budget(100, &mut native, &mut ctx_a).unwrap();
let rb2 = vm_b.run_budget(100, &mut native, &mut ctx_b).unwrap();
ticks_a.push((vm_a.pc, vm_a.current_tick, format!("{:?}", ra2.reason)));
ticks_b.push((vm_b.pc, vm_b.current_tick, format!("{:?}", rb2.reason)));
// Slice 3
let ra3 = vm_a.run_budget(100, &mut native, &mut ctx_a).unwrap();
let rb3 = vm_b.run_budget(100, &mut native, &mut ctx_b).unwrap();
ticks_a.push((vm_a.pc, vm_a.current_tick, format!("{:?}", ra3.reason)));
ticks_b.push((vm_b.pc, vm_b.current_tick, format!("{:?}", rb3.reason)));
// Slice 4 (wakes and pushes)
let ra4 = vm_a.run_budget(100, &mut native, &mut ctx_a).unwrap();
let rb4 = vm_b.run_budget(100, &mut native, &mut ctx_b).unwrap();
ticks_a.push((vm_a.pc, vm_a.current_tick, format!("{:?}", ra4.reason)));
ticks_b.push((vm_b.pc, vm_b.current_tick, format!("{:?}", rb4.reason)));
assert_eq!(ticks_a, ticks_b, "Sleep/wake slices must match across runs");
assert_eq!(vm_a.peek().unwrap(), &Value::Int32(7));
assert_eq!(vm_b.peek().unwrap(), &Value::Int32(7));
}
#[test]
fn test_gc_many_coroutines_and_wake_order_determinism() {
use crate::heap::CoroutineState;
use crate::object::ObjectKind;
// ROM: FrameSync; FrameSync; Halt (two deterministic safepoints back-to-back)
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.gc_alloc_threshold = 1; // force GC at first safepoint
// Allocate many coroutine objects: half Ready, half Sleeping with differing wake ticks.
let coro_count = 128u32;
for i in 0..coro_count {
let state = if i % 2 == 0 { CoroutineState::Ready } else { CoroutineState::Sleeping };
let wake = if state == CoroutineState::Sleeping { (i / 2) as u64 } else { 0 };
let _c = vm.heap.allocate_coroutine(0, state, wake, vec![], vec![]);
// Also allocate a tiny byte object to increase GC pressure.
let _b = vm.heap.allocate_object(ObjectKind::Bytes, &[i as u8]);
}
// Sanity: allocations present
assert!(
vm.heap.len() as u32 >= coro_count,
"heap should contain coroutine objects and bytes"
);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Reaching FrameSync should run GC; Ready/Sleeping coroutines are treated as roots, so
// only unreferenced byte objects can be reclaimed. We just assert determinism: heap size
// should be stable across two consecutive FrameSyncs with no new allocations in between.
let before = vm.heap.len();
match vm.step(&mut native, &mut ctx) {
Err(LogicalFrameEndingReason::FrameSync) => {}
other => panic!("Expected FrameSync, got {:?}", other),
}
let after_first = vm.heap.len();
match vm.step(&mut native, &mut ctx) {
Err(LogicalFrameEndingReason::FrameSync) => {}
other => panic!("Expected FrameSync, got {:?}", other),
}
let after_second = vm.heap.len();
// GC effect should be deterministic and idempotent at steady state
assert!(after_first <= before);
assert_eq!(after_second, after_first);
}
// fn test_arithmetic_chain() {
// let mut native = MockNative;
// let mut ctx = HostContext::new(None);
//
// // (10 + 20) * 2 / 5 % 4 = 12 * 2 / 5 % 4 = 60 / 5 % 4 = 12 % 4 = 0
// // wait: (10 + 20) = 30. 30 * 2 = 60. 60 / 5 = 12. 12 % 4 = 0.
// let mut rom = Vec::new();
// rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
// rom.extend_from_slice(&10i32.to_le_bytes());
// rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
// rom.extend_from_slice(&20i32.to_le_bytes());
// rom.extend_from_slice(&(OpCode::Add as u16).to_le_bytes());
// rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
// rom.extend_from_slice(&2i32.to_le_bytes());
// rom.extend_from_slice(&(OpCode::Mul as u16).to_le_bytes());
// rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
// rom.extend_from_slice(&5i32.to_le_bytes());
// rom.extend_from_slice(&(OpCode::Div as u16).to_le_bytes());
// rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
// rom.extend_from_slice(&4i32.to_le_bytes());
// rom.extend_from_slice(&(OpCode::Mod as u16).to_le_bytes());
// rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
//
// let mut vm = new_test_vm(rom.clone(), vec![]);
// vm.run_budget(100, &mut native, &mut ctx).unwrap();
//
// assert_eq!(vm.pop().unwrap(), Value::Int32(0));
// }
#[test]
fn test_div_by_zero_trap() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&10i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&0i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Div as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_DIV_ZERO);
assert_eq!(trap.opcode, OpCode::Div as u16);
}
_ => panic!("Expected Trap, got {:?}", report.reason),
}
}
#[test]
fn test_comparisons_polymorphic() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// 10 < 20.5 (true)
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&10i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushF64 as u16).to_le_bytes());
rom.extend_from_slice(&20.5f64.to_le_bytes());
rom.extend_from_slice(&(OpCode::Lt as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(vm.pop().unwrap(), Value::Boolean(true));
}
#[test]
fn test_push_i64_immediate() {
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes());
rom.extend_from_slice(&42i64.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.step(&mut native, &mut ctx).unwrap();
assert_eq!(vm.peek().unwrap(), &Value::Int64(42));
}
#[test]
fn test_push_f64_immediate() {
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushF64 as u16).to_le_bytes());
rom.extend_from_slice(&3.14f64.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.step(&mut native, &mut ctx).unwrap();
assert_eq!(vm.peek().unwrap(), &Value::Float(3.14));
}
#[test]
fn test_push_bool_immediate() {
let mut rom = Vec::new();
// True
rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
rom.push(1);
// False
rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
rom.push(0);
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.step(&mut native, &mut ctx).unwrap(); // Push true
assert_eq!(vm.peek().unwrap(), &Value::Boolean(true));
vm.step(&mut native, &mut ctx).unwrap(); // Push false
assert_eq!(vm.peek().unwrap(), &Value::Boolean(false));
}
#[test]
fn test_push_const_string() {
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushConst as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let cp = vec![Value::String("hello".into())];
let mut vm = new_test_vm(rom, cp);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.step(&mut native, &mut ctx).unwrap();
assert_eq!(vm.peek().unwrap(), &Value::String("hello".into()));
}
#[test]
fn test_push_const_invalid_index_traps_oob() {
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushConst as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_OOB);
assert_eq!(trap.opcode, OpCode::PushConst as u16);
assert!(trap.message.contains("Invalid constant index"));
}
other => panic!("Expected Trap(OOB), got {:?}", other),
}
}
#[test]
fn test_call_ret_scope_separation() {
let mut rom = Vec::new();
// entrypoint:
// PUSH_I64 10
// CALL func_id 1
// HALT
rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes());
rom.extend_from_slice(&10i64.to_le_bytes());
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes()); // func_id 1
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let func_addr = rom.len();
// func:
// PUSH_I64 20
// GET_LOCAL 0 -- should be 10 (arg)
// ADD -- 10 + 20 = 30
// SET_LOCAL 0 -- store result in local 0 (the arg slot)
// GET_LOCAL 0 -- read 30 back
// RET
rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes());
rom.extend_from_slice(&20i64.to_le_bytes());
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Add as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::SetLocal as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
let functions = vec![
FunctionMeta { code_offset: 0, code_len: func_addr as u32, ..Default::default() },
FunctionMeta {
code_offset: func_addr as u32,
code_len: (rom.len() - func_addr) as u32,
param_slots: 1,
return_slots: 1,
..Default::default()
},
];
let mut vm = VirtualMachine {
program: ProgramImage::new(
rom,
vec![],
functions,
None,
std::collections::HashMap::new(),
),
..Default::default()
};
vm.prepare_call("0");
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Run until Halt
let mut steps = 0;
while !vm.halted && steps < 100 {
vm.step(&mut native, &mut ctx).unwrap();
steps += 1;
}
assert!(vm.halted);
assert_eq!(vm.pop_integer().unwrap(), 30);
assert_eq!(vm.operand_stack.len(), 0);
assert_eq!(vm.call_stack.len(), 1);
// Scope frames removed: no scope stack to assert on
}
#[test]
fn test_ret_mandatory_value() {
let mut rom = Vec::new();
// entrypoint: CALL func_id 1; HALT
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes()); // func_id 1
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let func_addr = rom.len();
// func: RET (SEM VALOR ANTES)
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
let functions = vec![
FunctionMeta { code_offset: 0, code_len: func_addr as u32, ..Default::default() },
FunctionMeta {
code_offset: func_addr as u32,
code_len: (rom.len() - func_addr) as u32,
param_slots: 0,
return_slots: 1,
..Default::default()
},
];
let mut vm = VirtualMachine {
program: ProgramImage::new(
rom,
vec![],
functions,
None,
std::collections::HashMap::new(),
),
..Default::default()
};
vm.prepare_call("0");
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.step(&mut native, &mut ctx).unwrap(); // CALL
let res = vm.step(&mut native, &mut ctx); // RET -> should fail
assert!(res.is_err());
match res.unwrap_err() {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_BAD_RET_SLOTS);
}
_ => panic!("Expected Trap(TRAP_BAD_RET_SLOTS)"),
}
// Agora com valor de retorno
let mut rom2 = Vec::new();
rom2.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
rom2.extend_from_slice(&1u32.to_le_bytes());
rom2.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let func_addr2 = rom2.len();
rom2.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes());
rom2.extend_from_slice(&123i64.to_le_bytes());
rom2.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
let functions2 = vec![
FunctionMeta { code_offset: 0, code_len: func_addr2 as u32, ..Default::default() },
FunctionMeta {
code_offset: func_addr2 as u32,
code_len: (rom2.len() - func_addr2) as u32,
param_slots: 0,
return_slots: 1,
..Default::default()
},
];
let mut vm2 = VirtualMachine {
program: ProgramImage::new(
rom2,
vec![],
functions2,
None,
std::collections::HashMap::new(),
),
..Default::default()
};
vm2.prepare_call("0");
vm2.step(&mut native, &mut ctx).unwrap(); // CALL
vm2.step(&mut native, &mut ctx).unwrap(); // PUSH_I64
vm2.step(&mut native, &mut ctx).unwrap(); // RET
assert_eq!(vm2.operand_stack.len(), 1);
assert_eq!(vm2.pop().unwrap(), Value::Int64(123));
}
// Scope tests removed under PR-2.1 (scope frames eliminated)
#[test]
fn test_push_i32() {
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&42i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.step(&mut native, &mut ctx).unwrap();
assert_eq!(vm.peek().unwrap(), &Value::Int32(42));
}
#[test]
fn test_bitwise_promotion() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// i32 & i32 -> i32
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&0xF0i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&0x0Fi32.to_le_bytes());
rom.extend_from_slice(&(OpCode::BitAnd as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.step(&mut native, &mut ctx).unwrap();
vm.step(&mut native, &mut ctx).unwrap();
vm.step(&mut native, &mut ctx).unwrap();
assert_eq!(vm.pop().unwrap(), Value::Int32(0));
// i32 | i64 -> i64
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&0xF0i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes());
rom.extend_from_slice(&0x0Fi64.to_le_bytes());
rom.extend_from_slice(&(OpCode::BitOr as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.step(&mut native, &mut ctx).unwrap();
vm.step(&mut native, &mut ctx).unwrap();
vm.step(&mut native, &mut ctx).unwrap();
assert_eq!(vm.pop().unwrap(), Value::Int64(0xFF));
}
#[test]
fn test_comparisons_lte_gte() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// 10 <= 20 (true)
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&10i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&20i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Lte as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.step(&mut native, &mut ctx).unwrap();
vm.step(&mut native, &mut ctx).unwrap();
vm.step(&mut native, &mut ctx).unwrap();
assert_eq!(vm.pop().unwrap(), Value::Boolean(true));
// 20 >= 20 (true)
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&20i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&20i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Gte as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.step(&mut native, &mut ctx).unwrap();
vm.step(&mut native, &mut ctx).unwrap();
vm.step(&mut native, &mut ctx).unwrap();
assert_eq!(vm.pop().unwrap(), Value::Boolean(true));
}
#[test]
fn test_negation() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&42i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Neg as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.step(&mut native, &mut ctx).unwrap();
vm.step(&mut native, &mut ctx).unwrap();
assert_eq!(vm.pop().unwrap(), Value::Int32(-42));
}
#[test]
fn test_jmp_if_true() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Corrected Calculations:
// 0-1: PushBool
// 2: 1 (u8)
// 3-4: JmpIfTrue
// 5-8: addr (u32)
// 9-10: Halt (Offset 9)
// 11-12: PushI32 (Offset 11)
// 13-16: 100 (i32)
// 17-18: Halt
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
rom.push(1);
rom.extend_from_slice(&(OpCode::JmpIfTrue as u16).to_le_bytes());
rom.extend_from_slice(&(11u32).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); // Offset 9
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); // Offset 11
rom.extend_from_slice(&100i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.step(&mut native, &mut ctx).unwrap(); // PushBool
vm.step(&mut native, &mut ctx).unwrap(); // JmpIfTrue
assert_eq!(vm.pc, 11);
vm.step(&mut native, &mut ctx).unwrap(); // PushI32
assert_eq!(vm.pop().unwrap(), Value::Int32(100));
}
#[test]
fn test_trap_opcode() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&42i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Trap as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_EXPLICIT);
assert_eq!(trap.opcode, OpCode::Trap as u16);
assert_eq!(trap.pc, 6);
assert!(trap.message.contains("Program requested trap"));
}
other => panic!("Expected Trap(TRAP_EXPLICIT), got {:?}", other),
}
assert_eq!(vm.pc, 8); // PushI32 (6 bytes) + Trap (2 bytes)
assert_eq!(vm.peek().unwrap(), &Value::Int32(42));
}
#[test]
fn test_debugger_breakpoint_remains_distinct_from_trap_opcode() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&42i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Trap as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom, vec![]);
vm.insert_breakpoint(6);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(report.reason, LogicalFrameEndingReason::Breakpoint);
assert_eq!(vm.pc, 6);
assert_eq!(vm.peek().unwrap(), &Value::Int32(42));
}
#[test]
fn test_pop_n_opcode() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&1i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&2i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&3i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PopN as u16).to_le_bytes());
rom.extend_from_slice(&2u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(vm.pop().unwrap(), Value::Int32(1));
assert!(vm.pop().is_err()); // Stack should be empty
}
#[test]
fn test_entry_point_ret_with_prepare_call() {
// PushI32 0 (0x17), then Ret (0x51)
let rom = vec![
0x17, 0x00, // PushI32
0x00, 0x00, 0x00, 0x00, // value 0
0x11, 0x00, // Pop
0x51, 0x00, // Ret
];
let mut vm = VirtualMachine::new(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
code_offset: 0,
code_len: rom.len() as u32,
..Default::default()
}]);
let mut ctx = HostContext::new(None);
struct TestNative;
impl NativeInterface for TestNative {
fn syscall(
&mut self,
_id: u32,
_args: &[Value],
_ret: &mut HostReturn,
_ctx: &mut HostContext,
) -> Result<(), VmFault> {
Ok(())
}
}
let mut native = TestNative;
vm.prepare_call("0");
let result = vm.run_budget(100, &mut native, &mut ctx).expect("VM run failed");
assert_eq!(result.reason, LogicalFrameEndingReason::EndOfRom);
}
#[test]
fn test_syscall_abi_multi_slot_return() {
let rom = vec![
0x70, 0x00, // Syscall + Reserved
0x01, 0x00, 0x00, 0x00, // Syscall ID 1
];
struct MultiReturnNative;
impl NativeInterface for MultiReturnNative {
fn syscall(
&mut self,
_id: u32,
_args: &[Value],
ret: &mut HostReturn,
_ctx: &mut HostContext,
) -> Result<(), VmFault> {
ret.push_bool(true);
ret.push_int(42);
ret.push_int(255);
Ok(())
}
}
let mut vm = VirtualMachine::new(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
code_offset: 0,
code_len: rom.len() as u32,
..Default::default()
}]);
let mut native = MultiReturnNative;
let mut ctx = HostContext::new(None);
vm.prepare_call("0");
// Ensure we have SYSTEM capability so we pass capability gate
vm.set_capabilities(prometeu_hal::syscalls::caps::SYSTEM);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
// Under PR5, VM enforces return-slot count based on SyscallMeta during syscall
// execution. A mismatch yields a Panic with a descriptive message.
match report.reason {
LogicalFrameEndingReason::Panic(msg) => {
assert!(msg.contains("results mismatch"));
}
_ => panic!("Expected Panic with results mismatch, got {:?}", report.reason),
}
}
#[test]
fn test_syscall_abi_void_return() {
let rom = vec![
0x70, 0x00, // Syscall + Reserved
0x01, 0x00, 0x00, 0x00, // Syscall ID 1
];
struct VoidReturnNative;
impl NativeInterface for VoidReturnNative {
fn syscall(
&mut self,
_id: u32,
_args: &[Value],
_ret: &mut HostReturn,
_ctx: &mut HostContext,
) -> Result<(), VmFault> {
Ok(())
}
}
let mut vm = VirtualMachine::new(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
code_offset: 0,
code_len: rom.len() as u32,
..Default::default()
}]);
let mut native = VoidReturnNative;
let mut ctx = HostContext::new(None);
vm.prepare_call("0");
vm.operand_stack.push(Value::Int32(100));
vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(vm.pop().unwrap(), Value::Int32(100));
assert!(vm.operand_stack.is_empty());
}
#[test]
fn test_syscall_arg_type_mismatch_trap() {
// GfxClear (0x1001) takes 1 argument
let rom = vec![
0x16, 0x00, // PushBool + Reserved
0x01, // value 1 (true)
0x70, 0x00, // Syscall + Reserved
0x01, 0x10, 0x00, 0x00, // Syscall ID 0x1001
];
struct ArgCheckNative;
impl NativeInterface for ArgCheckNative {
fn syscall(
&mut self,
_id: u32,
args: &[Value],
_ret: &mut HostReturn,
_ctx: &mut HostContext,
) -> Result<(), VmFault> {
expect_int(args, 0)?;
Ok(())
}
}
let mut vm = VirtualMachine::new(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
code_offset: 0,
code_len: rom.len() as u32,
..Default::default()
}]);
let mut native = ArgCheckNative;
let mut ctx = HostContext::new(None);
vm.prepare_call("0");
// Ensure we have GFX capability so we reach type checking inside native handler
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_TYPE);
assert_eq!(trap.opcode, OpCode::Syscall as u16);
}
_ => panic!("Expected Trap, got {:?}", report.reason),
}
}
#[test]
fn test_add_invalid_types_traps_type() {
let rom = assemble("PUSH_I32 1\nPUSH_BOOL 1\nADD\nHALT").expect("assemble");
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_TYPE);
assert_eq!(trap.opcode, OpCode::Add as u16);
assert!(trap.message.contains("Invalid types for ADD"));
}
other => panic!("Expected Trap(TYPE), got {:?}", other),
}
}
#[test]
fn test_add_stack_underflow_traps() {
let rom = assemble("PUSH_I32 1\nADD\nHALT").expect("assemble");
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_STACK_UNDERFLOW);
assert_eq!(trap.opcode, OpCode::Add as u16);
assert!(trap.message.contains("requires two operands"));
}
other => panic!("Expected Trap(STACK_UNDERFLOW), got {:?}", other),
}
}
#[test]
fn test_and_invalid_types_traps_type() {
let rom = assemble("PUSH_I32 1\nPUSH_BOOL 1\nAND\nHALT").expect("assemble");
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_TYPE);
assert_eq!(trap.opcode, OpCode::And as u16);
assert!(trap.message.contains("Invalid types for AND"));
}
other => panic!("Expected Trap(TYPE), got {:?}", other),
}
}
#[test]
fn test_not_invalid_type_traps_type() {
let rom = assemble("PUSH_I32 1\nNOT\nHALT").expect("assemble");
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_TYPE);
assert_eq!(trap.opcode, OpCode::Not as u16);
assert!(trap.message.contains("Invalid type for NOT"));
}
other => panic!("Expected Trap(TYPE), got {:?}", other),
}
}
#[test]
fn test_neg_invalid_type_traps_type() {
let rom = assemble("PUSH_BOOL 1\nNEG\nHALT").expect("assemble");
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_TYPE);
assert_eq!(trap.opcode, OpCode::Neg as u16);
assert!(trap.message.contains("Invalid type for Neg"));
}
other => panic!("Expected Trap(TYPE), got {:?}", other),
}
}
#[test]
fn test_pop_underflow_traps() {
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::Pop as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_STACK_UNDERFLOW);
assert_eq!(trap.opcode, OpCode::Pop as u16);
assert!(trap.message.contains("POP requires one operand"));
}
other => panic!("Expected Trap(STACK_UNDERFLOW), got {:?}", other),
}
}
#[test]
fn test_invalid_syscall_trap() {
let rom = vec![
0x70, 0x00, // Syscall + Reserved
0xEF, 0xBE, 0xAD, 0xDE, // 0xDEADBEEF
];
let mut vm = new_test_vm(rom.clone(), vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.prepare_call("0");
// Grant GFX capability so arg underflow is checked (capability gate is first)
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_INVALID_SYSCALL);
assert_eq!(trap.opcode, OpCode::Syscall as u16);
assert!(trap.message.contains("Unknown syscall"));
assert_eq!(trap.pc, 0);
}
_ => panic!("Expected Trap, got {:?}", report.reason),
}
}
#[test]
fn test_intrinsic_vec2_dot_executes_without_syscalls() {
let mut rom = Vec::new();
for value in [1.0f64, 2.0, 3.0, 4.0] {
rom.extend_from_slice(&(OpCode::PushF64 as u16).to_le_bytes());
rom.extend_from_slice(&value.to_bits().to_le_bytes());
}
rom.extend_from_slice(&(OpCode::Intrinsic as u16).to_le_bytes());
rom.extend_from_slice(&0x1000u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert!(matches!(report.reason, LogicalFrameEndingReason::Halted));
assert_eq!(vm.operand_stack, vec![Value::Float(11.0)]);
}
#[test]
fn test_intrinsic_vec2_length_executes_without_syscalls() {
let mut rom = Vec::new();
for value in [3.0f64, 4.0] {
rom.extend_from_slice(&(OpCode::PushF64 as u16).to_le_bytes());
rom.extend_from_slice(&value.to_bits().to_le_bytes());
}
rom.extend_from_slice(&(OpCode::Intrinsic as u16).to_le_bytes());
rom.extend_from_slice(&0x1001u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert!(matches!(report.reason, LogicalFrameEndingReason::Halted));
assert_eq!(vm.operand_stack, vec![Value::Float(5.0)]);
}
#[test]
fn test_intrinsic_input_pad_returns_builtin_carrier() {
let rom = assemble("INTRINSIC 0x2000\nHALT").expect("assemble");
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert!(matches!(report.reason, LogicalFrameEndingReason::Halted));
assert_eq!(vm.operand_stack, vec![Value::Int64(1)]);
}
#[test]
fn test_intrinsic_input_touch_x_requires_hardware_context() {
let rom = assemble("INTRINSIC 0x2001\nINTRINSIC 0x2021\nHALT").expect("assemble");
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Panic(msg) => {
assert!(msg.contains("Host feature unavailable"));
}
other => panic!("Expected Panic due to missing hardware context, got {:?}", other),
}
}
#[test]
fn test_invalid_intrinsic_trap_is_distinct_from_syscall() {
let rom = assemble("INTRINSIC 0xDEADBEEF\nHALT").expect("assemble");
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_INVALID_INTRINSIC);
assert_eq!(trap.opcode, OpCode::Intrinsic as u16);
assert!(trap.message.contains("Unknown intrinsic"));
}
_ => panic!("Expected intrinsic trap, got {:?}", report.reason),
}
}
#[test]
fn test_intrinsic_argument_underflow_trap() {
let rom = assemble("PUSH_I32 1\nINTRINSIC 0x1000\nHALT").expect("assemble");
let mut vm = new_test_vm(rom, vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_STACK_UNDERFLOW);
assert_eq!(trap.opcode, OpCode::Intrinsic as u16);
assert!(trap.message.contains("Intrinsic argument stack underflow"));
}
_ => panic!("Expected intrinsic underflow trap, got {:?}", report.reason),
}
}
#[test]
fn test_syscall_arg_underflow_trap() {
// GfxClear (0x1001) expects 1 arg
let rom = vec![
0x70, 0x00, // Syscall + Reserved
0x01, 0x10, 0x00, 0x00, // Syscall ID 0x1001
];
let mut vm = new_test_vm(rom.clone(), vec![]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.prepare_call("0");
// Grant GFX capability so arg underflow is checked (capability gate is first)
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_STACK_UNDERFLOW);
assert_eq!(trap.opcode, OpCode::Syscall as u16);
assert!(trap.message.contains("underflow"));
assert_eq!(trap.pc, 0);
}
_ => panic!("Expected Trap, got {:?}", report.reason),
}
}
#[test]
fn test_syscall_missing_capability_trap() {
// Program: directly call GfxClear (0x1001). We check caps before args, so no underflow.
let rom = vec![
0x70, 0x00, // Syscall + Reserved
0x01, 0x10, 0x00, 0x00, // Syscall ID 0x1001 (LE)
];
let mut vm = new_test_vm(rom.clone(), vec![]);
// Remove all capabilities
vm.set_capabilities(0);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_INVALID_SYSCALL);
assert_eq!(trap.opcode, OpCode::Syscall as u16);
assert!(trap.message.contains("Missing capability"));
assert_eq!(trap.pc, 0);
}
other => panic!("Expected Trap, got {:?}", other),
}
}
#[test]
fn test_syscall_with_capability_success() {
// Program: push arg 0; call GfxClear (0x1001)
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&0i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Syscall as u16).to_le_bytes());
rom.extend_from_slice(&0x1001u32.to_le_bytes()); // GfxClear
let mut vm = new_test_vm(rom.clone(), vec![]);
// Grant only GFX capability
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
// Any non-trap outcome is considered success here
match report.reason {
LogicalFrameEndingReason::Trap(trap) => panic!("Unexpected trap: {:?}", trap),
_ => {}
}
}
#[test]
fn test_syscall_results_count_mismatch_panic() {
// GfxClear565 (0x1010) expects 0 results
let rom = vec![
0x17, 0x00, // PushI32
0x00, 0x00, 0x00, 0x00, // value 0
0x70, 0x00, // Syscall + Reserved
0x10, 0x10, 0x00, 0x00, // Syscall ID 0x1010
];
struct BadNative;
impl NativeInterface for BadNative {
fn syscall(
&mut self,
_id: u32,
_args: &[Value],
ret: &mut HostReturn,
_ctx: &mut HostContext,
) -> Result<(), VmFault> {
// Wrong: GfxClear565 is void but we push something
ret.push_int(42);
Ok(())
}
}
let mut vm = new_test_vm(rom.clone(), vec![]);
// Grant GFX capability so results mismatch path is exercised
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
let mut native = BadNative;
let mut ctx = HostContext::new(None);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Panic(msg) => assert!(msg.contains("results mismatch")),
_ => panic!("Expected Panic, got {:?}", report.reason),
}
}
#[test]
fn test_loader_hardening_invalid_magic() {
let mut vm = VirtualMachine::default();
let res = vm.initialize(vec![0, 0, 0, 0], "");
assert_eq!(res, Err(VmInitError::InvalidFormat));
// VM should remain empty
assert_eq!(vm.program.rom.len(), 0);
}
#[test]
fn test_loader_hardening_unsupported_version() {
let mut vm = VirtualMachine::default();
let mut header = vec![0u8; 32];
header[0..4].copy_from_slice(b"PBS\0");
header[4..6].copy_from_slice(&1u16.to_le_bytes()); // version 1 (unsupported)
let res = vm.initialize(header, "");
assert_eq!(res, Err(VmInitError::UnsupportedFormat));
}
#[test]
fn test_loader_hardening_malformed_pbs_v0() {
let mut vm = VirtualMachine::default();
let mut header = vec![0u8; 32];
header[0..4].copy_from_slice(b"PBS\0");
header[8..12].copy_from_slice(&1u32.to_le_bytes()); // 1 section claimed but none provided
let res = vm.initialize(header, "");
match res {
Err(VmInitError::ImageLoadFailed(prometeu_bytecode::LoadError::UnexpectedEof)) => {}
_ => panic!("Expected PbsV0LoadFailed(UnexpectedEof), got {:?}", res),
}
}
#[test]
fn test_loader_hardening_entrypoint_not_found() {
let mut vm = VirtualMachine::default();
let header = prometeu_bytecode::model::BytecodeModule {
version: 0,
const_pool: vec![],
functions: vec![],
code: vec![],
debug_info: None,
exports: vec![],
syscalls: vec![],
}
.serialize();
// Try to initialize with numeric entrypoint 10 (out of bounds for empty ROM)
let res = vm.initialize(header, "10");
assert_eq!(res, Err(VmInitError::EntrypointNotFound));
// VM state should not be updated
assert_eq!(vm.pc, 0);
assert_eq!(vm.program.rom.len(), 0);
}
#[test]
fn test_loader_hardening_successful_init() {
let mut vm = VirtualMachine::default();
vm.pc = 123; // Pollution
let header = prometeu_bytecode::model::BytecodeModule {
version: 0,
const_pool: vec![],
functions: vec![],
code: vec![],
debug_info: None,
exports: vec![],
syscalls: vec![],
}
.serialize();
let res = vm.initialize(header, "");
assert!(res.is_ok());
assert_eq!(vm.pc, 0);
assert_eq!(vm.program.rom.len(), 0);
assert_eq!(vm.cycles, 0);
}
#[test]
fn test_loader_patching_rewrites_hostcall_before_verification() {
let mut vm = VirtualMachine::default();
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
let code = assemble("PUSH_I32 0\nHOSTCALL 0\nHALT").expect("assemble");
let bytes = serialized_single_function_module(
code,
vec![SyscallDecl {
module: "gfx".into(),
name: "clear".into(),
version: 1,
arg_slots: 1,
ret_slots: 0,
}],
);
let res = vm.initialize(bytes, "");
assert!(res.is_ok(), "patched hostcall should initialize");
let text = disassemble(&vm.program.rom).expect("disassemble patched rom");
assert!(text.contains("SYSCALL 0x1001"));
assert!(!text.contains("HOSTCALL"));
}
#[test]
fn test_loader_patching_rejects_hostcall_index_out_of_bounds() {
let mut vm = VirtualMachine::default();
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
let code = assemble("HOSTCALL 1\nHALT").expect("assemble");
let bytes = serialized_single_function_module(
code,
vec![SyscallDecl {
module: "gfx".into(),
name: "clear".into(),
version: 1,
arg_slots: 1,
ret_slots: 0,
}],
);
let res = vm.initialize(bytes, "");
assert_eq!(
res,
Err(VmInitError::LoaderPatchFailed(
crate::vm_init_error::LoaderPatchError::HostcallIndexOutOfBounds {
pc: 0,
sysc_index: 1,
syscalls_len: 1,
},
))
);
}
#[test]
fn test_loader_patching_rejects_unused_syscall_decl() {
let mut vm = VirtualMachine::default();
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
let code = assemble("PUSH_I32 0\nHOSTCALL 0\nHALT").expect("assemble");
let bytes = serialized_single_function_module(
code,
vec![
SyscallDecl {
module: "gfx".into(),
name: "clear".into(),
version: 1,
arg_slots: 1,
ret_slots: 0,
},
SyscallDecl {
module: "gfx".into(),
name: "draw_text".into(),
version: 1,
arg_slots: 4,
ret_slots: 0,
},
],
);
let res = vm.initialize(bytes, "");
assert_eq!(
res,
Err(VmInitError::LoaderPatchFailed(
crate::vm_init_error::LoaderPatchError::UnusedSyscallDecl {
sysc_index: 1,
module: "gfx".into(),
name: "draw_text".into(),
version: 1,
},
))
);
}
#[test]
fn test_loader_patching_rejects_raw_syscall_in_preload_artifact() {
let mut vm = VirtualMachine::default();
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
let code = assemble("PUSH_I32 0\nSYSCALL 0x1001\nHALT").expect("assemble");
let bytes = serialized_single_function_module(
code,
vec![SyscallDecl {
module: "gfx".into(),
name: "clear".into(),
version: 1,
arg_slots: 1,
ret_slots: 0,
}],
);
let res = vm.initialize(bytes, "");
assert_eq!(
res,
Err(VmInitError::LoaderPatchFailed(
crate::vm_init_error::LoaderPatchError::RawSyscallInPreloadArtifact {
pc: 6,
syscall_id: 0x1001,
},
))
);
}
#[test]
fn test_loader_patching_propagates_resolution_failure() {
let mut vm = VirtualMachine::default();
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
let code = assemble("HOSTCALL 0\nHALT").expect("assemble");
let bytes = serialized_single_function_module(
code,
vec![SyscallDecl {
module: "gfx".into(),
name: "missing".into(),
version: 1,
arg_slots: 0,
ret_slots: 0,
}],
);
let res = vm.initialize(bytes, "");
assert_eq!(
res,
Err(VmInitError::LoaderPatchFailed(
crate::vm_init_error::LoaderPatchError::ResolveFailed(
prometeu_hal::syscalls::DeclaredLoadError::UnknownSyscall {
module: "gfx".into(),
name: "missing".into(),
version: 1,
},
),
))
);
}
#[test]
fn test_loader_patching_propagates_missing_capability() {
let mut vm = VirtualMachine::default();
vm.set_capabilities(prometeu_hal::syscalls::caps::NONE);
let code = assemble("HOSTCALL 0\nHALT").expect("assemble");
let bytes = serialized_single_function_module(
code,
vec![SyscallDecl {
module: "gfx".into(),
name: "clear".into(),
version: 1,
arg_slots: 1,
ret_slots: 0,
}],
);
let res = vm.initialize(bytes, "");
assert_eq!(
res,
Err(VmInitError::LoaderPatchFailed(
crate::vm_init_error::LoaderPatchError::ResolveFailed(
prometeu_hal::syscalls::DeclaredLoadError::MissingCapability {
required: prometeu_hal::syscalls::caps::GFX,
provided: prometeu_hal::syscalls::caps::NONE,
module: "gfx".into(),
name: "clear".into(),
version: 1,
},
),
))
);
}
#[test]
fn test_loader_patching_propagates_abi_mismatch() {
let mut vm = VirtualMachine::default();
vm.set_capabilities(prometeu_hal::syscalls::caps::GFX);
let code = assemble("HOSTCALL 0\nHALT").expect("assemble");
let bytes = serialized_single_function_module(
code,
vec![SyscallDecl {
module: "gfx".into(),
name: "draw_line".into(),
version: 1,
arg_slots: 4,
ret_slots: 0,
}],
);
let res = vm.initialize(bytes, "");
assert_eq!(
res,
Err(VmInitError::LoaderPatchFailed(
crate::vm_init_error::LoaderPatchError::ResolveFailed(
prometeu_hal::syscalls::DeclaredLoadError::AbiMismatch {
module: "gfx".into(),
name: "draw_line".into(),
version: 1,
declared_arg_slots: 4,
declared_ret_slots: 0,
expected_arg_slots: 5,
expected_ret_slots: 0,
},
),
))
);
}
#[test]
fn test_loader_patching_accepts_status_first_signatures() {
let cases = vec![
SyscallDecl {
module: "gfx".into(),
name: "set_sprite".into(),
version: 1,
arg_slots: 10,
ret_slots: 1,
},
SyscallDecl {
module: "audio".into(),
name: "play_sample".into(),
version: 1,
arg_slots: 5,
ret_slots: 1,
},
SyscallDecl {
module: "audio".into(),
name: "play".into(),
version: 1,
arg_slots: 7,
ret_slots: 1,
},
SyscallDecl {
module: "asset".into(),
name: "load".into(),
version: 1,
arg_slots: 3,
ret_slots: 2,
},
SyscallDecl {
module: "asset".into(),
name: "commit".into(),
version: 1,
arg_slots: 1,
ret_slots: 1,
},
SyscallDecl {
module: "asset".into(),
name: "cancel".into(),
version: 1,
arg_slots: 1,
ret_slots: 1,
},
];
for syscall in cases {
let mut vm = VirtualMachine::default();
vm.set_capabilities(prometeu_hal::syscalls::caps::ALL);
let bytes = serialized_single_hostcall_module(syscall);
let res = vm.initialize(bytes, "");
assert!(res.is_ok(), "status-first signature must be accepted: {:?}", res);
}
}
#[test]
fn test_loader_patching_rejects_legacy_status_first_ret_slots() {
let cases = vec![
SyscallDecl {
module: "gfx".into(),
name: "set_sprite".into(),
version: 1,
arg_slots: 10,
ret_slots: 0,
},
SyscallDecl {
module: "audio".into(),
name: "play_sample".into(),
version: 1,
arg_slots: 5,
ret_slots: 0,
},
SyscallDecl {
module: "audio".into(),
name: "play".into(),
version: 1,
arg_slots: 7,
ret_slots: 0,
},
SyscallDecl {
module: "asset".into(),
name: "load".into(),
version: 1,
arg_slots: 3,
ret_slots: 1,
},
SyscallDecl {
module: "asset".into(),
name: "commit".into(),
version: 1,
arg_slots: 1,
ret_slots: 0,
},
SyscallDecl {
module: "asset".into(),
name: "cancel".into(),
version: 1,
arg_slots: 1,
ret_slots: 0,
},
];
for syscall in cases {
let mut vm = VirtualMachine::default();
vm.set_capabilities(prometeu_hal::syscalls::caps::ALL);
let bytes = serialized_single_hostcall_module(syscall.clone());
let err = vm.initialize(bytes, "").expect_err("legacy ABI must be rejected");
match err {
VmInitError::LoaderPatchFailed(crate::vm_init_error::LoaderPatchError::ResolveFailed(
prometeu_hal::syscalls::DeclaredLoadError::AbiMismatch {
module,
name,
version,
declared_arg_slots,
declared_ret_slots,
expected_arg_slots,
expected_ret_slots,
},
)) => {
assert_eq!(module, syscall.module);
assert_eq!(name, syscall.name);
assert_eq!(version, syscall.version);
assert_eq!(declared_arg_slots, syscall.arg_slots);
assert_eq!(declared_ret_slots, syscall.ret_slots);
assert_eq!(expected_arg_slots, declared_arg_slots);
assert_ne!(expected_ret_slots, declared_ret_slots);
}
other => panic!("expected ABI mismatch, got {:?}", other),
}
}
}
#[test]
fn test_loader_hardening_missing_sysc_section() {
let mut vm = VirtualMachine::default();
let mut header = vec![0u8; 32];
header[0..4].copy_from_slice(b"PBS\0");
let res = vm.initialize(header, "");
assert_eq!(
res,
Err(VmInitError::ImageLoadFailed(prometeu_bytecode::LoadError::MissingSyscallSection))
);
}
#[test]
fn test_loader_hardening_duplicate_sysc_identity() {
let mut vm = VirtualMachine::default();
let bytes = serialized_single_function_module(
vec![],
vec![
SyscallDecl {
module: "system".into(),
name: "has_cart".into(),
version: 1,
arg_slots: 0,
ret_slots: 1,
},
SyscallDecl {
module: "system".into(),
name: "has_cart".into(),
version: 1,
arg_slots: 0,
ret_slots: 1,
},
],
);
let res = vm.initialize(bytes, "");
assert_eq!(
res,
Err(VmInitError::ImageLoadFailed(
prometeu_bytecode::LoadError::DuplicateSyscallIdentity
))
);
}
#[test]
fn test_calling_convention_add() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// F0 (entry):
// PUSH_I32 10
// PUSH_I32 20
// CALL 1 (add)
// HALT
// F1 (add):
// GET_LOCAL 0 (a)
// GET_LOCAL 1 (b)
// ADD
// RET (1 slot)
let mut rom = Vec::new();
// F0
let f0_start = 0;
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&10i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&20i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let f0_len = rom.len() - f0_start;
// F1
let f1_start = rom.len() as u32;
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Add as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
let f1_len = rom.len() as u32 - f1_start;
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![
FunctionMeta {
code_offset: f0_start as u32,
code_len: f0_len as u32,
..Default::default()
},
FunctionMeta {
code_offset: f1_start,
code_len: f1_len,
param_slots: 2,
return_slots: 1,
..Default::default()
},
]);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
assert_eq!(vm.operand_stack.last().unwrap(), &Value::Int32(30));
}
#[test]
fn test_calling_convention_multi_slot_return() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// F0:
// CALL 1
// HALT
// F1:
// PUSH_I32 100
// PUSH_I32 200
// RET (2 slots)
let mut rom = Vec::new();
// F0
let f0_start = 0;
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let f0_len = rom.len() - f0_start;
// F1
let f1_start = rom.len() as u32;
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&100i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&200i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
let f1_len = rom.len() as u32 - f1_start;
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![
FunctionMeta {
code_offset: f0_start as u32,
code_len: f0_len as u32,
..Default::default()
},
FunctionMeta {
code_offset: f1_start,
code_len: f1_len,
param_slots: 0,
return_slots: 2,
..Default::default()
},
]);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
// Stack should be [100, 200]
assert_eq!(vm.operand_stack.len(), 2);
assert_eq!(vm.operand_stack[0], Value::Int32(100));
assert_eq!(vm.operand_stack[1], Value::Int32(200));
}
#[test]
fn test_calling_convention_void_call() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// F0:
// PUSH_I32 42
// CALL 1
// HALT
// F1:
// POP
// RET (0 slots)
let mut rom = Vec::new();
let f0_start = 0;
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&42i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let f0_len = rom.len() - f0_start;
let f1_start = rom.len() as u32;
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
let f1_len = rom.len() as u32 - f1_start;
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![
FunctionMeta {
code_offset: f0_start as u32,
code_len: f0_len as u32,
..Default::default()
},
FunctionMeta {
code_offset: f1_start,
code_len: f1_len,
param_slots: 1,
return_slots: 0,
..Default::default()
},
]);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
assert_eq!(vm.operand_stack.len(), 0);
}
#[test]
fn test_trap_invalid_func() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// CALL 99 (invalid)
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
rom.extend_from_slice(&99u32.to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_INVALID_FUNC);
assert_eq!(trap.opcode, OpCode::Call as u16);
}
_ => panic!("Expected Trap(TRAP_INVALID_FUNC), got {:?}", report.reason),
}
}
#[test]
fn test_trap_bad_ret_slots() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// F0: CALL 1; HALT
// F1: PUSH_I32 42; RET (expected 0 slots)
let mut rom = Vec::new();
let f0_start = 0;
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let f0_len = rom.len() - f0_start;
let f1_start = rom.len() as u32;
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&42i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
let f1_len = rom.len() as u32 - f1_start;
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![
FunctionMeta {
code_offset: f0_start as u32,
code_len: f0_len as u32,
..Default::default()
},
FunctionMeta {
code_offset: f1_start,
code_len: f1_len,
param_slots: 0,
return_slots: 0, // ERROR: function pushes 42 but returns 0
..Default::default()
},
]);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_BAD_RET_SLOTS);
assert_eq!(trap.opcode, OpCode::Ret as u16);
assert!(trap.message.contains("Incorrect stack height"));
}
_ => panic!("Expected Trap(TRAP_BAD_RET_SLOTS), got {:?}", report.reason),
}
}
#[test]
fn test_locals_round_trip() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// PUSH_I32 42
// SET_LOCAL 0
// PUSH_I32 0 (garbage)
// GET_LOCAL 0
// RET (1 slot)
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&42i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::SetLocal as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&0i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Pop as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
code_offset: 0,
code_len: 20,
local_slots: 1,
return_slots: 1,
..Default::default()
}]);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(report.reason, LogicalFrameEndingReason::EndOfRom);
// RET pops return values and pushes them back on the caller stack (which is the sentinel frame's stack here).
assert_eq!(vm.operand_stack, vec![Value::Int32(42)]);
}
#[test]
fn test_locals_per_call_isolation() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Function 0 (entry):
// CALL 1
// POP
// CALL 1
// HALT
// Function 1:
// GET_LOCAL 0 (should be Null initially)
// PUSH_I32 42
// SET_LOCAL 0
// RET (1 slot: the initial Null)
let mut rom = Vec::new();
// F0
let f0_start = 0;
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Pop as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let f0_len = rom.len() - f0_start;
// F1
let f1_start = rom.len() as u32;
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&42i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::SetLocal as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
let f1_len = rom.len() as u32 - f1_start;
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![
FunctionMeta {
code_offset: f0_start as u32,
code_len: f0_len as u32,
..Default::default()
},
FunctionMeta {
code_offset: f1_start,
code_len: f1_len,
local_slots: 1,
return_slots: 1,
..Default::default()
},
]);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
// The last value on stack is the return of the second CALL 1,
// which should be Value::Null because locals are zero-initialized on each call.
assert_eq!(vm.operand_stack.last().unwrap(), &Value::Null);
}
#[test]
fn test_invalid_local_index_traps() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Function with 0 params, 1 local.
// GET_LOCAL 1 (OOB)
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
code_offset: 0,
code_len: 8,
local_slots: 1,
..Default::default()
}]);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_INVALID_LOCAL);
assert_eq!(trap.opcode, OpCode::GetLocal as u16);
assert!(trap.message.contains("out of bounds"));
}
_ => panic!("Expected Trap, got {:?}", report.reason),
}
}
#[test]
fn test_nested_if() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// if (true) {
// if (false) {
// PUSH 1
// } else {
// PUSH 2
// }
// } else {
// PUSH 3
// }
// HALT
let mut rom = Vec::new();
// 0: PUSH_BOOL true
rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
rom.push(1);
// 3: JMP_IF_FALSE -> ELSE1 (offset 42)
rom.extend_from_slice(&(OpCode::JmpIfFalse as u16).to_le_bytes());
rom.extend_from_slice(&42u32.to_le_bytes());
// INNER IF:
// 9: PUSH_BOOL false
rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
rom.push(0);
// 12: JMP_IF_FALSE -> ELSE2 (offset 30)
rom.extend_from_slice(&(OpCode::JmpIfFalse as u16).to_le_bytes());
rom.extend_from_slice(&30u32.to_le_bytes());
// 18: PUSH_I32 1
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&1i32.to_le_bytes());
// 24: JMP -> END (offset 48)
rom.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
rom.extend_from_slice(&48u32.to_le_bytes());
// ELSE2:
// 30: PUSH_I32 2
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&2i32.to_le_bytes());
// 36: JMP -> END (offset 48)
rom.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
rom.extend_from_slice(&48u32.to_le_bytes());
// ELSE1:
// 42: PUSH_I32 3
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&3i32.to_le_bytes());
// END:
// 48: HALT
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
// We need to set up the function meta for absolute jumps to work correctly
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
code_offset: 0,
code_len: 50,
..Default::default()
}]);
vm.prepare_call("0");
vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(vm.pop().unwrap(), Value::Int32(2));
}
#[test]
fn test_if_with_empty_branches() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// PUSH_BOOL true
// JMP_IF_FALSE -> ELSE (offset 15)
// // Empty then
// JMP -> END (offset 15)
// ELSE:
// // Empty else
// END:
// HALT
let mut rom = Vec::new();
// 0-2: PUSH_BOOL true
rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
rom.push(1);
// 3-8: JMP_IF_FALSE -> 15
rom.extend_from_slice(&(OpCode::JmpIfFalse as u16).to_le_bytes());
rom.extend_from_slice(&15u32.to_le_bytes());
// 9-14: JMP -> 15
rom.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
rom.extend_from_slice(&15u32.to_le_bytes());
// 15-16: HALT
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
code_offset: 0,
code_len: 17,
..Default::default()
}]);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
assert_eq!(vm.operand_stack.len(), 0);
}
#[test]
fn test_jmp_if_non_boolean_trap() {
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// PUSH_I32 1
// JMP_IF_TRUE 9
// HALT
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&1i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::JmpIfTrue as u16).to_le_bytes());
rom.extend_from_slice(&9u32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
code_offset: 0,
code_len: 14,
..Default::default()
}]);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(trap) => {
assert_eq!(trap.code, TRAP_TYPE);
assert_eq!(trap.opcode, OpCode::JmpIfTrue as u16);
assert!(trap.message.contains("Expected boolean"));
}
_ => panic!("Expected Trap, got {:?}", report.reason),
}
}
#[test]
fn test_gc_triggers_only_at_frame_sync() {
use crate::object::ObjectKind;
// ROM: NOP; FRAME_SYNC; HALT
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::Nop as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
code_offset: 0,
code_len: rom.len() as u32,
..Default::default()
}]);
// Set a very low threshold to trigger GC as soon as we hit FRAME_SYNC
vm.gc_alloc_threshold = 1;
// Allocate an unreachable object (no roots referencing it)
let _orphan = vm.heap.allocate_object(ObjectKind::Bytes, &[1, 2, 3]);
// +1 for the main coroutine allocated by new_test_vm
assert_eq!(vm.heap.len(), 2);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Step 1: NOP — should not run GC
vm.step(&mut native, &mut ctx).unwrap();
assert_eq!(vm.heap.len(), 2, "GC must not run except at safepoints");
// Step 2: FRAME_SYNC — GC should run and reclaim the unreachable object
match vm.step(&mut native, &mut ctx) {
Err(LogicalFrameEndingReason::FrameSync) => {}
other => panic!("Expected FrameSync, got {:?}", other),
}
// Main coroutine remains
assert_eq!(vm.heap.len(), 1, "Unreachable object must be reclaimed at FRAME_SYNC");
}
#[test]
fn test_gc_keeps_roots_and_collects_unreachable_at_frame_sync() {
use crate::object::ObjectKind;
// ROM: FRAME_SYNC; FRAME_SYNC; HALT
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
code_offset: 0,
code_len: rom.len() as u32,
..Default::default()
}]);
vm.gc_alloc_threshold = 1;
// Allocate two objects; make one a root by placing it on the operand stack
let rooted = vm.heap.allocate_object(ObjectKind::Bytes, &[9, 9]);
let unreachable = vm.heap.allocate_object(ObjectKind::Bytes, &[8, 8, 8]);
// +1 for main coroutine
assert_eq!(vm.heap.len(), 3);
vm.operand_stack.push(Value::HeapRef(rooted));
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Execute FRAME_SYNC: should trigger GC
match vm.step(&mut native, &mut ctx) {
Err(LogicalFrameEndingReason::FrameSync) => {}
other => panic!("Expected FrameSync, got {:?}", other),
}
// Rooted must survive; unreachable must be collected; main coroutine remains
assert_eq!(vm.heap.len(), 2);
assert!(vm.heap.is_valid(rooted));
assert!(!vm.heap.is_valid(unreachable));
}
#[test]
fn test_gc_simple_allocation_collection_cycle() {
use crate::object::ObjectKind;
// ROM: FRAME_SYNC; FRAME_SYNC; HALT
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
code_offset: 0,
code_len: rom.len() as u32,
..Default::default()
}]);
// Make GC trigger on any allocation delta
vm.gc_alloc_threshold = 1;
// Cycle 1: allocate one unreachable object
let _h1 = vm.heap.allocate_object(ObjectKind::Bytes, &[1]);
// +1 for main coroutine
assert_eq!(vm.heap.len(), 2);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// FRAME_SYNC should collect it (first FRAME_SYNC)
match vm.step(&mut native, &mut ctx) {
Err(LogicalFrameEndingReason::FrameSync) => {}
other => panic!("Expected FrameSync, got {:?}", other),
}
// Main coroutine remains
assert_eq!(vm.heap.len(), 1);
// Cycle 2: allocate again and collect again deterministically
let _h2 = vm.heap.allocate_object(ObjectKind::Bytes, &[2]);
assert_eq!(vm.heap.len(), 2);
// Second FRAME_SYNC should also be reached deterministically
match vm.step(&mut native, &mut ctx) {
Err(LogicalFrameEndingReason::FrameSync) => {}
other => panic!("Expected FrameSync, got {:?}", other),
}
assert_eq!(vm.heap.len(), 1);
}
#[test]
fn test_gc_many_short_lived_objects_stress() {
use crate::object::ObjectKind;
// ROM: FRAME_SYNC; HALT
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
code_offset: 0,
code_len: rom.len() as u32,
..Default::default()
}]);
// Deterministic: trigger collection when any growth since last sweep occurs
vm.gc_alloc_threshold = 1;
// Allocate many small, unreferenced objects
let count = 2048usize; // stress but still quick
for i in 0..count {
let byte = (i & 0xFF) as u8;
let _ = vm.heap.allocate_object(ObjectKind::Bytes, &[byte]);
}
// +1 for main coroutine
assert_eq!(vm.heap.len(), count + 1);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Single FRAME_SYNC should reclaim all since there are no roots
match vm.step(&mut native, &mut ctx) {
Err(LogicalFrameEndingReason::FrameSync) => {}
other => panic!("Expected FrameSync, got {:?}", other),
}
assert_eq!(
vm.heap.len(),
1,
"All short-lived objects except main coroutine must be reclaimed deterministically"
);
}
#[test]
fn test_gc_keeps_objects_captured_by_suspended_coroutines() {
use crate::heap::CoroutineState;
use crate::object::ObjectKind;
// ROM: FRAME_SYNC; HALT (trigger GC at safepoint)
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
code_offset: 0,
code_len: rom.len() as u32,
..Default::default()
}]);
// Trigger GC at first FRAME_SYNC
vm.gc_alloc_threshold = 1;
// Allocate a heap object and a suspended coroutine that captures it on its stack
let captured = vm.heap.allocate_object(ObjectKind::Bytes, &[0xAA, 0xBB]);
let _coro = vm.heap.allocate_coroutine(
0,
CoroutineState::Ready,
0,
vec![Value::HeapRef(captured)],
vec![],
);
assert_eq!(
vm.heap.len(),
3,
"object + suspended coroutine + main coroutine must be allocated"
);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// FRAME_SYNC: GC runs and should keep both alive via suspended coroutine root
match vm.step(&mut native, &mut ctx) {
Err(LogicalFrameEndingReason::FrameSync) => {}
other => panic!("Expected FrameSync, got {:?}", other),
}
assert!(vm.heap.is_valid(captured), "captured object must remain alive");
// Captured object + suspended coroutine + main coroutine
assert_eq!(vm.heap.len(), 3, "both coroutine and captured object must survive (plus main)");
}
#[test]
fn test_gc_collects_finished_coroutine() {
use crate::heap::CoroutineState;
// ROM: FRAME_SYNC; HALT (trigger GC at safepoint)
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta {
code_offset: 0,
code_len: rom.len() as u32,
..Default::default()
}]);
vm.gc_alloc_threshold = 1;
// Allocate a finished coroutine with no external references
let finished = vm.heap.allocate_coroutine(0, CoroutineState::Finished, 0, vec![], vec![]);
assert!(vm.heap.is_valid(finished));
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// FRAME_SYNC: GC should collect the finished coroutine since it's not a root
match vm.step(&mut native, &mut ctx) {
Err(LogicalFrameEndingReason::FrameSync) => {}
other => panic!("Expected FrameSync, got {:?}", other),
}
assert!(!vm.heap.is_valid(finished), "finished coroutine must be collected");
// Main coroutine remains allocated
assert_eq!(vm.heap.len(), 1, "only main coroutine should remain");
}
#[test]
fn test_coroutines_strict_alternation_with_yield() {
use prometeu_bytecode::FunctionMeta;
// Build function A: PUSH 1; YIELD; FRAME_SYNC; JMP 0 (loop)
let mut fn_a = Vec::new();
fn_a.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
fn_a.extend_from_slice(&1i32.to_le_bytes());
fn_a.extend_from_slice(&(OpCode::Yield as u16).to_le_bytes());
fn_a.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
fn_a.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
fn_a.extend_from_slice(&0u32.to_le_bytes());
// Build function B: PUSH 2; YIELD; FRAME_SYNC; JMP 0 (loop)
let mut fn_b = Vec::new();
fn_b.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
fn_b.extend_from_slice(&2i32.to_le_bytes());
fn_b.extend_from_slice(&(OpCode::Yield as u16).to_le_bytes());
fn_b.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
fn_b.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
fn_b.extend_from_slice(&0u32.to_le_bytes());
// Main: SPAWN A; SPAWN B; SLEEP 100; HALT
let mut main = Vec::new();
main.extend_from_slice(&(OpCode::Spawn as u16).to_le_bytes());
main.extend_from_slice(&1u32.to_le_bytes()); // fn A idx
main.extend_from_slice(&0u32.to_le_bytes()); // arg count
main.extend_from_slice(&(OpCode::Spawn as u16).to_le_bytes());
main.extend_from_slice(&2u32.to_le_bytes()); // fn B idx
main.extend_from_slice(&0u32.to_le_bytes()); // arg count
main.extend_from_slice(&(OpCode::Sleep as u16).to_le_bytes());
main.extend_from_slice(&100u32.to_le_bytes());
main.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
// Assemble ROM: [main][A][B]
let off_main = 0usize;
let off_a = main.len();
let off_b = off_a + fn_a.len();
let mut rom = Vec::with_capacity(main.len() + fn_a.len() + fn_b.len());
rom.extend_from_slice(&main);
rom.extend_from_slice(&fn_a);
rom.extend_from_slice(&fn_b);
// VM with three functions (0=main, 1=A, 2=B)
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![
FunctionMeta {
code_offset: off_main as u32,
code_len: main.len() as u32,
param_slots: 0,
local_slots: 0,
return_slots: 0,
max_stack_slots: 8,
},
FunctionMeta {
code_offset: off_a as u32,
code_len: fn_a.len() as u32,
param_slots: 0,
local_slots: 0,
return_slots: 0,
max_stack_slots: 8,
},
FunctionMeta {
code_offset: off_b as u32,
code_len: fn_b.len() as u32,
param_slots: 0,
local_slots: 0,
return_slots: 0,
max_stack_slots: 8,
},
]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Frame 1: main sleeps; from now on A and B should strictly alternate.
let _ = vm.run_budget(100, &mut native, &mut ctx).unwrap();
// Locate coroutine handles for A (fn_idx=1) and B (fn_idx=2)
let mut a_href = None;
let mut b_href = None;
// Consider currently running coroutine
if let Some(cur) = vm.current_coro {
if let Some(f) = vm.call_stack.last() {
if f.func_idx == 1 {
a_href = Some(cur);
}
if f.func_idx == 2 {
b_href = Some(cur);
}
}
}
// And also consider suspended (Ready/Sleeping) coroutines
for h in vm.heap.suspended_coroutine_handles() {
if let Some(co) = vm.heap.coroutine_data(h) {
if let Some(f) = co.frames.last() {
if f.func_idx == 1 {
a_href = Some(h);
}
if f.func_idx == 2 {
b_href = Some(h);
}
}
}
}
let a_href = a_href.expect("coroutine A not found");
let b_href = b_href.expect("coroutine B not found");
let mut prev_a = vm.heap.coroutine_data(a_href).unwrap().stack.len();
let mut prev_b = vm.heap.coroutine_data(b_href).unwrap().stack.len();
let mut trace = Vec::new();
for _ in 0..6 {
let _ = vm.run_budget(100, &mut native, &mut ctx).unwrap();
let a_now = vm.heap.coroutine_data(a_href).unwrap().stack.len();
let b_now = vm.heap.coroutine_data(b_href).unwrap().stack.len();
if a_now > prev_a {
trace.push(1);
} else if b_now > prev_b {
trace.push(2);
} else {
panic!("no coroutine progress detected this frame");
}
prev_a = a_now;
prev_b = b_now;
}
assert_eq!(trace, vec![1, 2, 1, 2, 1, 2], "Coroutines must strictly alternate under Yield");
}
#[test]
fn test_sleep_does_not_stall_others_and_wakes_at_exact_tick() {
use prometeu_bytecode::FunctionMeta;
// Function A: SLEEP N; PUSH 100; YIELD; FRAME_SYNC; HALT
let sleep_n: u32 = 3;
let mut fn_a = Vec::new();
fn_a.extend_from_slice(&(OpCode::Sleep as u16).to_le_bytes());
fn_a.extend_from_slice(&sleep_n.to_le_bytes());
fn_a.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
fn_a.extend_from_slice(&100i32.to_le_bytes());
fn_a.extend_from_slice(&(OpCode::Yield as u16).to_le_bytes());
fn_a.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
fn_a.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
// Function B: PUSH 1; YIELD; FRAME_SYNC; JMP 0 (increments every frame)
let mut fn_b = Vec::new();
fn_b.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
fn_b.extend_from_slice(&1i32.to_le_bytes());
fn_b.extend_from_slice(&(OpCode::Yield as u16).to_le_bytes());
fn_b.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
fn_b.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
fn_b.extend_from_slice(&0u32.to_le_bytes());
// Main: SPAWN A; SPAWN B; SLEEP big; HALT
let mut main = Vec::new();
main.extend_from_slice(&(OpCode::Spawn as u16).to_le_bytes());
main.extend_from_slice(&1u32.to_le_bytes());
main.extend_from_slice(&0u32.to_le_bytes());
main.extend_from_slice(&(OpCode::Spawn as u16).to_le_bytes());
main.extend_from_slice(&2u32.to_le_bytes());
main.extend_from_slice(&0u32.to_le_bytes());
main.extend_from_slice(&(OpCode::Sleep as u16).to_le_bytes());
main.extend_from_slice(&100u32.to_le_bytes());
main.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let off_main = 0usize;
let off_a = main.len();
let off_b = off_a + fn_a.len();
let mut rom = Vec::new();
rom.extend_from_slice(&main);
rom.extend_from_slice(&fn_a);
rom.extend_from_slice(&fn_b);
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![
FunctionMeta {
code_offset: off_main as u32,
code_len: main.len() as u32,
param_slots: 0,
local_slots: 0,
return_slots: 0,
max_stack_slots: 8,
},
FunctionMeta {
code_offset: off_a as u32,
code_len: fn_a.len() as u32,
param_slots: 0,
local_slots: 0,
return_slots: 0,
max_stack_slots: 8,
},
FunctionMeta {
code_offset: off_b as u32,
code_len: fn_b.len() as u32,
param_slots: 0,
local_slots: 0,
return_slots: 0,
max_stack_slots: 8,
},
]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Frame 1: main sleeps, tick -> 1
let _ = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(vm.current_tick, 1);
// Identify A and B coroutine handles (consider both running and suspended)
let mut a_href = None;
let mut b_href = None;
if let Some(cur) = vm.current_coro {
if let Some(f) = vm.call_stack.last() {
if f.func_idx == 1 {
a_href = Some(cur);
}
if f.func_idx == 2 {
b_href = Some(cur);
}
}
}
for h in vm.heap.suspended_coroutine_handles() {
if let Some(co) = vm.heap.coroutine_data(h) {
if let Some(f) = co.frames.last() {
if f.func_idx == 1 {
a_href = Some(h);
}
if f.func_idx == 2 {
b_href = Some(h);
}
}
}
}
let a_href = a_href.expect("A not found");
let b_href = b_href.expect("B not found");
// Count how many frames B runs while A sleeps using the scheduler's next-to-run handle.
// Stop when A is scheduled to run, then execute that frame and record its end-of-frame tick.
let mut ones_before = 0usize;
let mut woke_at_tick = 0u64;
let mut seen_a_once = false;
for _ in 0..1000 {
if let Some(next) = vm.scheduler.current() {
if next == a_href {
if seen_a_once {
// A has slept before and is about to run again (wake). Run and record.
let _ = vm.run_budget(100, &mut native, &mut ctx).unwrap();
woke_at_tick = vm.current_tick;
break;
} else {
// First time A runs (to execute SLEEP N); do not count as wake yet.
seen_a_once = true;
}
} else if next == b_href {
ones_before += 1;
}
}
let _ = vm.run_budget(100, &mut native, &mut ctx).unwrap();
}
// Canonical semantics: wake_tick = current_tick_at_sleep + N + 1.
// The scheduler wakes sleepers at the end of that tick, so the coroutine runs
// in the following frame, and we observe its heap stack update at end tick = wake_tick + 1.
// A executes SLEEP at its first run (tick 1), so wake_tick = 1 + N + 1, observed tick = +1.
let expected_observed_end_tick = 1u64 + sleep_n as u64 + 2u64;
assert_eq!(
woke_at_tick, expected_observed_end_tick,
"A must wake at the exact tick (+1 frame to observe)"
);
// And B must have produced at least N items (one per frame) before A's wake.
assert!(ones_before as u64 >= sleep_n as u64, "B must keep running while A sleeps");
}
#[test]
fn test_multi_coroutine_determinism_across_runs() {
use prometeu_bytecode::FunctionMeta;
// Reuse alternation program from previous test
let mut fn_a = Vec::new();
fn_a.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
fn_a.extend_from_slice(&1i32.to_le_bytes());
fn_a.extend_from_slice(&(OpCode::Yield as u16).to_le_bytes());
fn_a.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
fn_a.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
fn_a.extend_from_slice(&0u32.to_le_bytes());
let mut fn_b = Vec::new();
fn_b.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
fn_b.extend_from_slice(&2i32.to_le_bytes());
fn_b.extend_from_slice(&(OpCode::Yield as u16).to_le_bytes());
fn_b.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
fn_b.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
fn_b.extend_from_slice(&0u32.to_le_bytes());
let mut main = Vec::new();
main.extend_from_slice(&(OpCode::Spawn as u16).to_le_bytes());
main.extend_from_slice(&1u32.to_le_bytes());
main.extend_from_slice(&0u32.to_le_bytes());
main.extend_from_slice(&(OpCode::Spawn as u16).to_le_bytes());
main.extend_from_slice(&2u32.to_le_bytes());
main.extend_from_slice(&0u32.to_le_bytes());
main.extend_from_slice(&(OpCode::Sleep as u16).to_le_bytes());
main.extend_from_slice(&100u32.to_le_bytes());
main.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let off_main = 0usize;
let off_a = main.len();
let off_b = off_a + fn_a.len();
let mut rom = Vec::new();
rom.extend_from_slice(&main);
rom.extend_from_slice(&fn_a);
rom.extend_from_slice(&fn_b);
let mut vm1 = new_test_vm(rom.clone(), vec![]);
let mut vm2 = new_test_vm(rom.clone(), vec![]);
let fm: std::sync::Arc<[prometeu_bytecode::FunctionMeta]> = std::sync::Arc::from(vec![
FunctionMeta {
code_offset: off_main as u32,
code_len: main.len() as u32,
param_slots: 0,
local_slots: 0,
return_slots: 0,
max_stack_slots: 8,
},
FunctionMeta {
code_offset: off_a as u32,
code_len: fn_a.len() as u32,
param_slots: 0,
local_slots: 0,
return_slots: 0,
max_stack_slots: 8,
},
FunctionMeta {
code_offset: off_b as u32,
code_len: fn_b.len() as u32,
param_slots: 0,
local_slots: 0,
return_slots: 0,
max_stack_slots: 8,
},
]);
vm1.program.functions = fm.clone();
vm2.program.functions = fm;
let mut native = MockNative;
let mut c1 = HostContext::new(None);
let mut c2 = HostContext::new(None);
// Burn first frame (main sleeps)
let _ = vm1.run_budget(100, &mut native, &mut c1).unwrap();
let _ = vm2.run_budget(100, &mut native, &mut c2).unwrap();
// Discover A/B handles in both VMs
let find_ab = |vm: &VirtualMachine| {
let mut a = None;
let mut b = None;
// running
if let Some(cur) = vm.current_coro {
if let Some(f) = vm.call_stack.last() {
if f.func_idx == 1 {
a = Some(cur);
}
if f.func_idx == 2 {
b = Some(cur);
}
}
}
// suspended
for h in vm.heap.suspended_coroutine_handles() {
if let Some(co) = vm.heap.coroutine_data(h) {
if let Some(f) = co.frames.last() {
if f.func_idx == 1 {
a = Some(h);
}
if f.func_idx == 2 {
b = Some(h);
}
}
}
}
(a.expect("A missing"), b.expect("B missing"))
};
let (a1, b1) = find_ab(&vm1);
let (a2, b2) = find_ab(&vm2);
let mut a1_prev = vm1.heap.coroutine_data(a1).unwrap().stack.len();
let mut b1_prev = vm1.heap.coroutine_data(b1).unwrap().stack.len();
let mut a2_prev = vm2.heap.coroutine_data(a2).unwrap().stack.len();
let mut b2_prev = vm2.heap.coroutine_data(b2).unwrap().stack.len();
let mut trace1 = Vec::new();
let mut trace2 = Vec::new();
for _ in 0..8 {
let _ = vm1.run_budget(100, &mut native, &mut c1).unwrap();
let a_now = vm1.heap.coroutine_data(a1).unwrap().stack.len();
let b_now = vm1.heap.coroutine_data(b1).unwrap().stack.len();
if a_now > a1_prev {
trace1.push(1);
} else if b_now > b1_prev {
trace1.push(2);
} else {
panic!("no progress 1");
}
a1_prev = a_now;
b1_prev = b_now;
let _ = vm2.run_budget(100, &mut native, &mut c2).unwrap();
let a2_now = vm2.heap.coroutine_data(a2).unwrap().stack.len();
let b2_now = vm2.heap.coroutine_data(b2).unwrap().stack.len();
if a2_now > a2_prev {
trace2.push(1);
} else if b2_now > b2_prev {
trace2.push(2);
} else {
panic!("no progress 2");
}
a2_prev = a2_now;
b2_prev = b2_now;
}
assert_eq!(
trace1, trace2,
"Execution trace (coroutine IDs) must match exactly across runs"
);
}
#[test]
fn test_gc_with_suspended_coroutine_runtime() {
use crate::object::ObjectKind;
use prometeu_bytecode::FunctionMeta;
// Function F (idx 1): SLEEP 10; FRAME_SYNC; HALT
let mut fn_f = Vec::new();
fn_f.extend_from_slice(&(OpCode::Sleep as u16).to_le_bytes());
fn_f.extend_from_slice(&10u32.to_le_bytes());
fn_f.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
fn_f.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
// Main (idx 0): SPAWN F with 1 argument (the HeapRef we preload); FRAME_SYNC; HALT
let mut main = Vec::new();
main.extend_from_slice(&(OpCode::Spawn as u16).to_le_bytes());
main.extend_from_slice(&1u32.to_le_bytes()); // func idx
main.extend_from_slice(&1u32.to_le_bytes()); // arg count = 1
main.extend_from_slice(&(OpCode::FrameSync as u16).to_le_bytes());
main.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let off_main = 0usize;
let off_f = main.len();
let mut rom = Vec::new();
rom.extend_from_slice(&main);
rom.extend_from_slice(&fn_f);
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![
FunctionMeta {
code_offset: off_main as u32,
code_len: main.len() as u32,
param_slots: 0,
local_slots: 0,
return_slots: 0,
max_stack_slots: 8,
},
// Function F takes 1 parameter (the HeapRef) which stays on its stack while sleeping
FunctionMeta {
code_offset: off_f as u32,
code_len: fn_f.len() as u32,
param_slots: 1,
local_slots: 0,
return_slots: 0,
max_stack_slots: 8,
},
]);
// Force GC at first safepoint to stress retention
vm.gc_alloc_threshold = 1;
// Allocate a heap object and preload it onto main's operand stack so SPAWN consumes it as arg.
let captured = vm.heap.allocate_object(ObjectKind::Bytes, &[0xAB, 0xCD, 0xEF]);
vm.operand_stack.push(Value::HeapRef(captured));
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Run main: SPAWN consumes the HeapRef as arg and creates sleeping coroutine; FRAME_SYNC triggers GC
let rep = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert!(matches!(rep.reason, LogicalFrameEndingReason::FrameSync));
// The captured object must remain alive because it is referenced by the sleeping coroutine's stack
assert!(
vm.heap.is_valid(captured),
"captured object must remain alive while coroutine sleeps"
);
}
#[test]
fn test_make_closure_zero_captures() {
use prometeu_bytecode::{FunctionMeta, Value};
// ROM: MAKE_CLOSURE fn_id=7, cap=0; HALT
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::MakeClosure as u16).to_le_bytes());
rom.extend_from_slice(&7u32.to_le_bytes()); // fn_id
rom.extend_from_slice(&0u32.to_le_bytes()); // capture_count
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
code_offset: 0,
code_len: rom.len() as u32,
..Default::default()
}]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// step MAKE_CLOSURE
vm.step(&mut native, &mut ctx).unwrap();
// step HALT
vm.step(&mut native, &mut ctx).unwrap();
assert!(vm.halted);
assert_eq!(vm.operand_stack.len(), 1);
let top = vm.peek().unwrap().clone();
let href = match top {
Value::HeapRef(h) => h,
_ => panic!("Expected HeapRef on stack"),
};
assert!(vm.heap.is_valid(href));
assert_eq!(vm.heap.closure_fn_id(href), Some(7));
let env = vm.heap.closure_env_slice(href).expect("env slice");
assert_eq!(env.len(), 0);
}
#[test]
fn test_make_closure_multiple_captures_and_order() {
use prometeu_bytecode::{FunctionMeta, Value};
// Build ROM:
// PUSH_I32 1; PUSH_I32 2; PUSH_I32 3; // Stack: [1,2,3]
// MAKE_CLOSURE fn_id=9, cap=3; // Pops 3 (3,2,1), env = [1,2,3]
// HALT
let mut rom = Vec::new();
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&1i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&2i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&3i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::MakeClosure as u16).to_le_bytes());
rom.extend_from_slice(&9u32.to_le_bytes()); // fn_id
rom.extend_from_slice(&3u32.to_le_bytes()); // capture_count
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
code_offset: 0,
code_len: rom.len() as u32,
..Default::default()
}]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
// Execute instructions until HALT
while !vm.halted {
vm.step(&mut native, &mut ctx).unwrap();
}
// After HALT, stack must contain only the closure ref
assert_eq!(vm.operand_stack.len(), 1);
let href = match vm.pop().unwrap() {
Value::HeapRef(h) => h,
_ => panic!("Expected HeapRef"),
};
assert_eq!(vm.heap.closure_fn_id(href), Some(9));
let env = vm.heap.closure_env_slice(href).expect("env slice");
assert_eq!(env.len(), 3);
assert_eq!(env[0], Value::Int32(1));
assert_eq!(env[1], Value::Int32(2));
assert_eq!(env[2], Value::Int32(3));
}
#[test]
fn test_call_closure_returns_constant() {
use prometeu_bytecode::{FunctionMeta, Value};
// F0 (entry): MAKE_CLOSURE fn=1, cap=0; CALL_CLOSURE argc=0; HALT
// F1 (callee): PUSH_I32 7; RET
let mut rom = Vec::new();
let f0_start = 0usize;
rom.extend_from_slice(&(OpCode::MakeClosure as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes()); // fn_id
rom.extend_from_slice(&0u32.to_le_bytes()); // capture_count
rom.extend_from_slice(&(OpCode::CallClosure as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes()); // argc = 0 user args
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let f0_len = rom.len() - f0_start;
// F1 code
let f1_start = rom.len() as u32;
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&7i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
let f1_len = rom.len() as u32 - f1_start;
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![
FunctionMeta {
code_offset: f0_start as u32,
code_len: f0_len as u32,
..Default::default()
},
FunctionMeta {
code_offset: f1_start,
code_len: f1_len,
param_slots: 1,
return_slots: 1,
..Default::default()
},
]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
assert_eq!(vm.operand_stack.len(), 1);
assert_eq!(vm.operand_stack[0], Value::Int32(7));
}
#[test]
fn test_call_closure_with_captures_ignored() {
use prometeu_bytecode::{FunctionMeta, Value};
// F0: PUSH_I32 123; MAKE_CLOSURE fn=1 cap=1; CALL_CLOSURE 0; HALT
// F1: PUSH_I32 42; RET
let mut rom = Vec::new();
let f0_start = 0usize;
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&123i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::MakeClosure as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes()); // fn_id
rom.extend_from_slice(&1u32.to_le_bytes()); // capture_count
rom.extend_from_slice(&(OpCode::CallClosure as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes()); // argc = 0
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let f0_len = rom.len() - f0_start;
let f1_start = rom.len() as u32;
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&42i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
let f1_len = rom.len() as u32 - f1_start;
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![
FunctionMeta {
code_offset: f0_start as u32,
code_len: f0_len as u32,
..Default::default()
},
FunctionMeta {
code_offset: f1_start,
code_len: f1_len,
param_slots: 1,
return_slots: 1,
..Default::default()
},
]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.prepare_call("0");
let report = vm.run_budget(100, &mut native, &mut ctx).unwrap();
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
assert_eq!(vm.operand_stack, vec![Value::Int32(42)]);
}
#[test]
fn test_call_closure_on_non_closure_traps() {
use prometeu_bytecode::FunctionMeta;
// F0: PUSH_I32 1; CALL_CLOSURE 0; HALT -> should TRAP_TYPE on CALL_CLOSURE
let mut rom = Vec::new();
let f0_start = 0usize;
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&1i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::CallClosure as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes());
// Leave HALT for after run to ensure we trap before
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let f0_len = rom.len() - f0_start;
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![FunctionMeta {
code_offset: f0_start as u32,
code_len: f0_len as u32,
..Default::default()
}]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.prepare_call("0");
let report = vm.run_budget(10, &mut native, &mut ctx).unwrap();
match report.reason {
LogicalFrameEndingReason::Trap(info) => {
assert_eq!(info.code, TRAP_TYPE);
assert_eq!(info.opcode, OpCode::CallClosure as u16);
}
other => {
panic!("Expected Trap(TYPE) from CALL_CLOSURE on non-closure, got {:?}", other)
}
}
}
#[test]
fn test_nested_call_closure() {
use prometeu_bytecode::{FunctionMeta, Value};
// F0: MAKE_CLOSURE fn=1 cap=0; CALL_CLOSURE 0; CALL_CLOSURE 0; HALT
// F1: MAKE_CLOSURE fn=2 cap=0; RET // returns a closure
// F2: PUSH_I32 55; RET // returns constant
let mut rom = Vec::new();
// F0
let f0_start = 0usize;
rom.extend_from_slice(&(OpCode::MakeClosure as u16).to_le_bytes());
rom.extend_from_slice(&1u32.to_le_bytes()); // fn_id = 1
rom.extend_from_slice(&0u32.to_le_bytes()); // cap=0
rom.extend_from_slice(&(OpCode::CallClosure as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes()); // argc=0 -> pushes a closure from F1
rom.extend_from_slice(&(OpCode::CallClosure as u16).to_le_bytes());
rom.extend_from_slice(&0u32.to_le_bytes()); // argc=0 -> call returned closure F2
rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
let f0_len = rom.len() - f0_start;
// F1
let f1_start = rom.len() as u32;
rom.extend_from_slice(&(OpCode::MakeClosure as u16).to_le_bytes());
rom.extend_from_slice(&2u32.to_le_bytes()); // fn_id = 2
rom.extend_from_slice(&0u32.to_le_bytes()); // cap=0
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes()); // return the HeapRef on stack
let f1_len = rom.len() as u32 - f1_start;
// F2
let f2_start = rom.len() as u32;
rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
rom.extend_from_slice(&55i32.to_le_bytes());
rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
let f2_len = rom.len() as u32 - f2_start;
let mut vm = new_test_vm(rom.clone(), vec![]);
vm.program.functions = std::sync::Arc::from(vec![
FunctionMeta {
code_offset: f0_start as u32,
code_len: f0_len as u32,
..Default::default()
},
FunctionMeta {
code_offset: f1_start,
code_len: f1_len,
param_slots: 1,
return_slots: 1,
..Default::default()
},
FunctionMeta {
code_offset: f2_start,
code_len: f2_len,
param_slots: 1,
return_slots: 1,
..Default::default()
},
]);
let mut native = MockNative;
let mut ctx = HostContext::new(None);
vm.prepare_call("0");
let report = vm.run_budget(200, &mut native, &mut ctx).unwrap();
assert_eq!(report.reason, LogicalFrameEndingReason::Halted);
assert_eq!(vm.operand_stack, vec![Value::Int32(55)]);
}
}