use crate::call_frame::CallFrame; use crate::scope_frame::ScopeFrame; use crate::Value; use crate::{HostContext, NativeInterface, ProgramImage, VmInitError}; use prometeu_bytecode::abi::{TrapInfo, TRAP_BAD_RET_SLOTS, TRAP_DIV_ZERO, TRAP_INVALID_FUNC, TRAP_OOB, TRAP_TYPE}; use prometeu_bytecode::opcode::OpCode; /// Reason why the Virtual Machine stopped execution during a specific run. /// This allows the system to decide if it should continue execution in the next tick /// or if the frame is finalized. #[derive(Debug, Clone, PartialEq, Eq)] pub enum LogicalFrameEndingReason { /// Execution reached a `FRAME_SYNC` instruction, marking the end of the logical frame. FrameSync, /// The cycle budget for the current host tick was exhausted before reaching `FRAME_SYNC`. BudgetExhausted, /// A `HALT` instruction was executed, terminating the program. Halted, /// The Program Counter (PC) reached the end of the available bytecode. EndOfRom, /// Execution hit a registered breakpoint. Breakpoint, /// A runtime trap occurred (e.g., OOB, invalid gate). Trap(TrapInfo), /// A fatal error occurred that cannot be recovered (e.g., stack underflow). Panic(String), } pub enum OpError { Trap(u32, String), Panic(String), } impl From for LogicalFrameEndingReason { fn from(info: TrapInfo) -> Self { LogicalFrameEndingReason::Trap(info) } } /// A report detailing the results of an execution slice (run_budget). #[derive(Debug, Clone, PartialEq, Eq)] pub struct BudgetReport { /// Total virtual cycles consumed during this run. pub cycles_used: u64, /// Number of VM instructions executed. pub steps_executed: u32, /// The reason why this execution slice ended. pub reason: LogicalFrameEndingReason, } /// The PVM (PROMETEU Virtual Machine). /// /// A deterministic, stack-based virtual machine designed for game logic and /// educational simulation. The PVM executes bytecode compiled from TypeScript/JS /// and interacts with virtual hardware through a specialized instruction set. /// /// ### Architecture Highlights: /// - **Stack-Based**: Most operations pop values from the stack and push results back. /// - **Deterministic**: Execution is cycle-aware, allowing for precise performance budgeting. /// - **Sandboxed**: No direct access to the host system; all I/O goes through Syscalls. /// - **Type-Aware**: Supports integers, floats, booleans, and strings with automatic promotion. /// /// ### Memory Regions: /// - **ROM**: Immutable instruction storage. /// - **Operand Stack**: Fast-access temporary storage for calculations. /// - **Global Pool**: Persistent storage for cross-frame variables. /// - **Heap**: Dynamic memory for complex data (simplified version). pub struct VirtualMachine { /// Program Counter (PC): The absolute byte offset in ROM for the next instruction. pub pc: usize, /// Operand Stack: The primary workspace for all mathematical and logical operations. pub operand_stack: Vec, /// Call Stack: Manages function call context (return addresses, frame limits). pub call_stack: Vec, /// Scope Stack: Handles block-level local variable visibility (scopes). pub scope_stack: Vec, /// Global Variable Store: Variables that persist for the lifetime of the program. pub globals: Vec, /// The loaded executable (Bytecode + Constant Pool), that is the ROM translated. pub program: ProgramImage, /// Heap Memory: Dynamic allocation pool. pub heap: Vec, /// Total virtual cycles consumed since the VM started. pub cycles: u64, /// Stop flag: true if a `HALT` opcode was encountered. pub halted: bool, /// Set of ROM addresses used for software breakpoints in the debugger. pub breakpoints: std::collections::HashSet, } impl VirtualMachine { /// Creates a new VM instance with the provided bytecode and constants. pub fn new(rom: Vec, constant_pool: Vec) -> Self { Self { pc: 0, operand_stack: Vec::new(), call_stack: Vec::new(), scope_stack: Vec::new(), globals: Vec::new(), program: ProgramImage::new(rom, constant_pool, vec![], None, std::collections::HashMap::new()), heap: Vec::new(), cycles: 0, halted: false, breakpoints: std::collections::HashSet::new(), } } /// Resets the VM state and loads a new program. /// This is typically called by the Firmware when starting a new App/Cartridge. pub fn initialize(&mut self, program_bytes: Vec, entrypoint: &str) -> Result<(), VmInitError> { // Fail fast: reset state upfront. If we return early with an error, // the VM is left in a "halted and empty" state. self.program = ProgramImage::default(); self.pc = 0; self.operand_stack.clear(); self.call_stack.clear(); self.scope_stack.clear(); self.globals.clear(); self.heap.clear(); self.cycles = 0; self.halted = true; // execution is impossible until successful load // Only recognized format is loadable: PBS v0 industrial format let program = if program_bytes.starts_with(b"PBS\0") { match prometeu_bytecode::BytecodeLoader::load(&program_bytes) { Ok(module) => { // Run verifier on the module let max_stacks = crate::verifier::Verifier::verify(&module.code, &module.functions) .map_err(VmInitError::VerificationFailed)?; let mut program = ProgramImage::from(module); let mut functions = program.functions.as_ref().to_vec(); for (func, max_stack) in functions.iter_mut().zip(max_stacks) { func.max_stack_slots = max_stack; } program.functions = std::sync::Arc::from(functions); program } Err(prometeu_bytecode::LoadError::InvalidVersion) => return Err(VmInitError::UnsupportedFormat), Err(e) => { return Err(VmInitError::PbsV0LoadFailed(e)); } } } else { return Err(VmInitError::InvalidFormat); }; // Resolve the entrypoint: empty (defaults to func 0), numeric func_idx, or symbol name. let pc = if entrypoint.is_empty() { program.functions.get(0).map(|f| f.code_offset as usize).unwrap_or(0) } else if let Ok(func_idx) = entrypoint.parse::() { program.functions.get(func_idx) .map(|f| f.code_offset as usize) .ok_or(VmInitError::EntrypointNotFound)? } else { // Try to resolve as a symbol name from the exports map if let Some(&func_idx) = program.exports.get(entrypoint) { program.functions.get(func_idx as usize) .map(|f| f.code_offset as usize) .ok_or(VmInitError::EntrypointNotFound)? } else { return Err(VmInitError::EntrypointNotFound); } }; // Finalize initialization by applying the new program and PC. self.program = program; self.pc = pc; self.halted = false; // Successfully loaded, execution is now possible Ok(()) } /// Prepares the VM to execute a specific entrypoint by setting the PC and /// pushing an initial call frame. pub fn prepare_call(&mut self, entrypoint: &str) { let func_idx = if let Ok(idx) = entrypoint.parse::() { idx } else { // Try to resolve as a symbol name self.program.exports.get(entrypoint) .map(|&idx| idx as usize) .ok_or(()).unwrap_or(0) // Default to 0 if not found }; let callee = self.program.functions.get(func_idx).cloned().unwrap_or_default(); let addr = callee.code_offset as usize; self.pc = addr; self.halted = false; // Pushing a sentinel frame so RET works at the top level. // The return address is set to the end of ROM, which will naturally // cause the VM to stop after returning from the entrypoint. self.operand_stack.clear(); self.call_stack.clear(); self.scope_stack.clear(); // Entrypoint also needs locals allocated. // For the sentinel frame, stack_base is always 0. if let Some(func) = self.program.functions.get(func_idx) { let total_slots = func.param_slots as u32 + func.local_slots as u32; for _ in 0..total_slots { self.operand_stack.push(Value::Null); } } self.call_stack.push(CallFrame { return_pc: self.program.rom.len() as u32, stack_base: 0, func_idx, }); } } impl Default for VirtualMachine { fn default() -> Self { Self::new(vec![], vec![]) } } impl VirtualMachine { /// Executes the VM for a limited number of cycles (budget). /// /// This is the heart of the deterministic execution model. Instead of running /// indefinitely, the VM runs until it consumes its allocated budget or reaches /// a synchronization point (`FRAME_SYNC`). /// /// # Arguments /// * `budget` - Maximum number of cycles allowed for this execution slice. /// * `native` - Interface for handling syscalls (Firmware/OS). /// * `hw` - Access to virtual hardware peripherals. pub fn run_budget( &mut self, budget: u64, native: &mut dyn NativeInterface, ctx: &mut HostContext, ) -> Result { let start_cycles = self.cycles; let mut steps_executed = 0; let mut ending_reason: Option = None; while (self.cycles - start_cycles) < budget && !self.halted && self.pc < self.program.rom.len() { // Debugger support: stop before executing an instruction if there's a breakpoint. // Note: we skip the check for the very first step of a slice to avoid // getting stuck on the same breakpoint repeatedly. if steps_executed > 0 && self.breakpoints.contains(&self.pc) { ending_reason = Some(LogicalFrameEndingReason::Breakpoint); break; } let pc_before = self.pc; let cycles_before = self.cycles; // Fast-path for FRAME_SYNC: // This instruction is special because it marks the end of a logical game frame. // We peak ahead to handle it efficiently. let opcode_val = self.peek_u16()?; let opcode = OpCode::try_from(opcode_val)?; if opcode == OpCode::FrameSync { self.pc += 2; // Advance PC past the opcode self.cycles += OpCode::FrameSync.cycles(); steps_executed += 1; ending_reason = Some(LogicalFrameEndingReason::FrameSync); break; } if opcode == OpCode::Trap { self.pc += 2; // Advance PC past the opcode self.cycles += OpCode::Trap.cycles(); steps_executed += 1; ending_reason = Some(LogicalFrameEndingReason::Breakpoint); break; } // Execute a single step (Fetch-Decode-Execute) if let Err(reason) = self.step(native, ctx) { ending_reason = Some(reason); break; } steps_executed += 1; // Integrity check: ensure real progress is being made to avoid infinite loops // caused by zero-cycle instructions or stuck PC. if self.pc == pc_before && self.cycles == cycles_before && !self.halted { ending_reason = Some(LogicalFrameEndingReason::Panic(format!("VM stuck at PC 0x{:08X}", self.pc))); break; } } // Determine why we stopped if no explicit reason (FrameSync/Breakpoint) was set. if ending_reason.is_none() { if self.halted { ending_reason = Some(LogicalFrameEndingReason::Halted); } else if self.pc >= self.program.rom.len() { ending_reason = Some(LogicalFrameEndingReason::EndOfRom); } else { ending_reason = Some(LogicalFrameEndingReason::BudgetExhausted); } } Ok(BudgetReport { cycles_used: self.cycles - start_cycles, steps_executed, reason: ending_reason.unwrap(), }) } /// Peeks at the next 16-bit value in the ROM without advancing the PC. fn peek_u16(&self) -> Result { if self.pc + 2 > self.program.rom.len() { return Err("Unexpected end of ROM".into()); } let bytes = [ self.program.rom[self.pc], self.program.rom[self.pc + 1], ]; Ok(u16::from_le_bytes(bytes)) } /// Executes a single instruction at the current Program Counter (PC). /// /// This follows the classic CPU cycle: /// 1. Fetch: Read the opcode from memory. /// 2. Decode: Identify what operation to perform. /// 3. Execute: Perform the operation, updating stacks, memory, or calling peripherals. pub fn step(&mut self, native: &mut dyn NativeInterface, ctx: &mut HostContext) -> Result<(), LogicalFrameEndingReason> { self.step_impl(native, ctx) } fn step_impl(&mut self, native: &mut dyn NativeInterface, ctx: &mut HostContext) -> Result<(), LogicalFrameEndingReason> { if self.halted || self.pc >= self.program.rom.len() { return Ok(()); } let start_pc = self.pc; // Fetch & Decode let instr = crate::bytecode::decoder::decode_at(&self.program.rom, self.pc) .map_err(|e| LogicalFrameEndingReason::Panic(format!("{:?}", e)))?; let opcode = instr.opcode; self.pc = instr.next_pc; // Execute match opcode { OpCode::Nop => {} OpCode::Halt => { self.halted = true; } OpCode::Jmp => { let target = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()) as usize; let func_start = self.call_stack.last().map(|f| self.program.functions[f.func_idx].code_offset as usize).unwrap_or(0); self.pc = func_start + target; } OpCode::JmpIfFalse => { let target = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()) as usize; let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; match val { Value::Boolean(false) => { let func_start = self.call_stack.last().map(|f| self.program.functions[f.func_idx].code_offset as usize).unwrap_or(0); self.pc = func_start + target; } Value::Boolean(true) => {} _ => { return Err(self.trap(TRAP_TYPE, opcode as u16, format!("Expected boolean for JMP_IF_FALSE, got {:?}", val), start_pc as u32)); } } } OpCode::JmpIfTrue => { let target = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()) as usize; let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; match val { Value::Boolean(true) => { let func_start = self.call_stack.last().map(|f| self.program.functions[f.func_idx].code_offset as usize).unwrap_or(0); self.pc = func_start + target; } Value::Boolean(false) => {} _ => { return Err(self.trap(TRAP_TYPE, opcode as u16, format!("Expected boolean for JMP_IF_TRUE, got {:?}", val), start_pc as u32)); } } } OpCode::Trap => { // Handled in run_budget for interruption } OpCode::PushConst => { let idx = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()) as usize; let val = self.program.constant_pool.get(idx).cloned().ok_or_else(|| LogicalFrameEndingReason::Panic("Invalid constant index".into()))?; self.push(val); } OpCode::PushI64 => { let val = i64::from_le_bytes(instr.imm[0..8].try_into().unwrap()); self.push(Value::Int64(val)); } OpCode::PushI32 => { let val = i32::from_le_bytes(instr.imm[0..4].try_into().unwrap()); self.push(Value::Int32(val)); } OpCode::PushBounded => { let val = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()); if val > 0xFFFF { return Err(self.trap(TRAP_OOB, opcode as u16, format!("Bounded value overflow: {} > 0xFFFF", val), start_pc as u32)); } self.push(Value::Bounded(val)); } OpCode::PushF64 => { let val = f64::from_le_bytes(instr.imm[0..8].try_into().unwrap()); self.push(Value::Float(val)); } OpCode::PushBool => { let val = instr.imm[0]; self.push(Value::Boolean(val != 0)); } OpCode::Pop => { self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; } OpCode::PopN => { let n = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()); for _ in 0..n { self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; } } OpCode::Dup => { let val = self.peek().map_err(|e| LogicalFrameEndingReason::Panic(e))?.clone(); self.push(val); } OpCode::Swap => { let a = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; let b = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; self.push(a); self.push(b); } OpCode::Add => self.binary_op(opcode, start_pc as u32, |a, b| match (&a, &b) { (Value::String(_), _) | (_, Value::String(_)) => { Ok(Value::String(format!("{}{}", a.to_string(), b.to_string()))) } (Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_add(*b))), (Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_add(*b))), (Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((*a as i64).wrapping_add(*b))), (Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_add(*b as i64))), (Value::Float(a), Value::Float(b)) => Ok(Value::Float(a + b)), (Value::Int32(a), Value::Float(b)) => Ok(Value::Float(*a as f64 + b)), (Value::Float(a), Value::Int32(b)) => Ok(Value::Float(a + *b as f64)), (Value::Int64(a), Value::Float(b)) => Ok(Value::Float(*a as f64 + b)), (Value::Float(a), Value::Int64(b)) => Ok(Value::Float(a + *b as f64)), (Value::Bounded(a), Value::Bounded(b)) => { let res = a.saturating_add(*b); if res > 0xFFFF { Err(OpError::Trap(TRAP_OOB, format!("Bounded addition overflow: {} + {} = {}", a, b, res))) } else { Ok(Value::Bounded(res)) } } _ => Err(OpError::Panic("Invalid types for ADD".into())), })?, OpCode::Sub => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) { (Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_sub(b))), (Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_sub(b))), (Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64).wrapping_sub(b))), (Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_sub(b as i64))), (Value::Float(a), Value::Float(b)) => Ok(Value::Float(a - b)), (Value::Int32(a), Value::Float(b)) => Ok(Value::Float(a as f64 - b)), (Value::Float(a), Value::Int32(b)) => Ok(Value::Float(a - b as f64)), (Value::Int64(a), Value::Float(b)) => Ok(Value::Float(a as f64 - b)), (Value::Float(a), Value::Int64(b)) => Ok(Value::Float(a - b as f64)), (Value::Bounded(a), Value::Bounded(b)) => { if a < b { Err(OpError::Trap(TRAP_OOB, format!("Bounded subtraction underflow: {} - {} < 0", a, b))) } else { Ok(Value::Bounded(a - b)) } } _ => Err(OpError::Panic("Invalid types for SUB".into())), })?, OpCode::Mul => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) { (Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_mul(b))), (Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_mul(b))), (Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64).wrapping_mul(b))), (Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_mul(b as i64))), (Value::Float(a), Value::Float(b)) => Ok(Value::Float(a * b)), (Value::Int32(a), Value::Float(b)) => Ok(Value::Float(a as f64 * b)), (Value::Float(a), Value::Int32(b)) => Ok(Value::Float(a * b as f64)), (Value::Int64(a), Value::Float(b)) => Ok(Value::Float(a as f64 * b)), (Value::Float(a), Value::Int64(b)) => Ok(Value::Float(a * b as f64)), (Value::Bounded(a), Value::Bounded(b)) => { let res = a as u64 * b as u64; if res > 0xFFFF { Err(OpError::Trap(TRAP_OOB, format!("Bounded multiplication overflow: {} * {} = {}", a, b, res))) } else { Ok(Value::Bounded(res as u32)) } } _ => Err(OpError::Panic("Invalid types for MUL".into())), })?, OpCode::Div => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) { (Value::Int32(a), Value::Int32(b)) => { if b == 0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Integer division by zero".into())); } Ok(Value::Int32(a / b)) } (Value::Int64(a), Value::Int64(b)) => { if b == 0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Integer division by zero".into())); } Ok(Value::Int64(a / b)) } (Value::Int32(a), Value::Int64(b)) => { if b == 0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Integer division by zero".into())); } Ok(Value::Int64(a as i64 / b)) } (Value::Int64(a), Value::Int32(b)) => { if b == 0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Integer division by zero".into())); } Ok(Value::Int64(a / b as i64)) } (Value::Float(a), Value::Float(b)) => { if b == 0.0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into())); } Ok(Value::Float(a / b)) } (Value::Int32(a), Value::Float(b)) => { if b == 0.0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into())); } Ok(Value::Float(a as f64 / b)) } (Value::Float(a), Value::Int32(b)) => { if b == 0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into())); } Ok(Value::Float(a / b as f64)) } (Value::Int64(a), Value::Float(b)) => { if b == 0.0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into())); } Ok(Value::Float(a as f64 / b)) } (Value::Float(a), Value::Int64(b)) => { if b == 0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Float division by zero".into())); } Ok(Value::Float(a / b as f64)) } (Value::Bounded(a), Value::Bounded(b)) => { if b == 0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Bounded division by zero".into())); } Ok(Value::Bounded(a / b)) } _ => Err(OpError::Panic("Invalid types for DIV".into())), })?, OpCode::Mod => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) { (Value::Int32(a), Value::Int32(b)) => { if b == 0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Integer modulo by zero".into())); } Ok(Value::Int32(a % b)) } (Value::Int64(a), Value::Int64(b)) => { if b == 0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Integer modulo by zero".into())); } Ok(Value::Int64(a % b)) } (Value::Bounded(a), Value::Bounded(b)) => { if b == 0 { return Err(OpError::Trap(TRAP_DIV_ZERO, "Bounded modulo by zero".into())); } Ok(Value::Bounded(a % b)) } _ => Err(OpError::Panic("Invalid types for MOD".into())), })?, OpCode::BoundToInt => { let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; if let Value::Bounded(b) = val { self.push(Value::Int64(b as i64)); } else { return Err(LogicalFrameEndingReason::Panic("Expected bounded for BOUND_TO_INT".into())); } } OpCode::IntToBoundChecked => { let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; let int_val = val.as_integer().ok_or_else(|| LogicalFrameEndingReason::Panic("Expected integer for INT_TO_BOUND_CHECKED".into()))?; if int_val < 0 || int_val > 0xFFFF { return Err(self.trap(TRAP_OOB, OpCode::IntToBoundChecked as u16, format!("Integer to bounded conversion out of range: {}", int_val), start_pc as u32)); } self.push(Value::Bounded(int_val as u32)); } OpCode::Eq => self.binary_op(opcode, start_pc as u32, |a, b| Ok(Value::Boolean(a == b)))?, OpCode::Neq => self.binary_op(opcode, start_pc as u32, |a, b| Ok(Value::Boolean(a != b)))?, OpCode::Lt => self.binary_op(opcode, start_pc as u32, |a, b| { a.partial_cmp(&b) .map(|o| Value::Boolean(o == std::cmp::Ordering::Less)) .ok_or_else(|| OpError::Panic("Invalid types for LT".into())) })?, OpCode::Gt => self.binary_op(opcode, start_pc as u32, |a, b| { a.partial_cmp(&b) .map(|o| Value::Boolean(o == std::cmp::Ordering::Greater)) .ok_or_else(|| OpError::Panic("Invalid types for GT".into())) })?, OpCode::Lte => self.binary_op(opcode, start_pc as u32, |a, b| { a.partial_cmp(&b) .map(|o| Value::Boolean(o != std::cmp::Ordering::Greater)) .ok_or_else(|| OpError::Panic("Invalid types for LTE".into())) })?, OpCode::Gte => self.binary_op(opcode, start_pc as u32, |a, b| { a.partial_cmp(&b) .map(|o| Value::Boolean(o != std::cmp::Ordering::Less)) .ok_or_else(|| OpError::Panic("Invalid types for GTE".into())) })?, OpCode::And => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) { (Value::Boolean(a), Value::Boolean(b)) => Ok(Value::Boolean(a && b)), _ => Err(OpError::Panic("Invalid types for AND".into())), })?, OpCode::Or => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) { (Value::Boolean(a), Value::Boolean(b)) => Ok(Value::Boolean(a || b)), _ => Err(OpError::Panic("Invalid types for OR".into())), })?, OpCode::Not => { let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; if let Value::Boolean(b) = val { self.push(Value::Boolean(!b)); } else { return Err(LogicalFrameEndingReason::Panic("Invalid type for NOT".into())); } } OpCode::BitAnd => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) { (Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a & b)), (Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a & b)), (Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64) & b)), (Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a & (b as i64))), _ => Err(OpError::Panic("Invalid types for BitAnd".into())), })?, OpCode::BitOr => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) { (Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a | b)), (Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a | b)), (Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64) | b)), (Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a | (b as i64))), _ => Err(OpError::Panic("Invalid types for BitOr".into())), })?, OpCode::BitXor => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) { (Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a ^ b)), (Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a ^ b)), (Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64) ^ b)), (Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a ^ (b as i64))), _ => Err(OpError::Panic("Invalid types for BitXor".into())), })?, OpCode::Shl => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) { (Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_shl(b as u32))), (Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_shl(b as u32))), (Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64).wrapping_shl(b as u32))), (Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_shl(b as u32))), _ => Err(OpError::Panic("Invalid types for Shl".into())), })?, OpCode::Shr => self.binary_op(opcode, start_pc as u32, |a, b| match (a, b) { (Value::Int32(a), Value::Int32(b)) => Ok(Value::Int32(a.wrapping_shr(b as u32))), (Value::Int64(a), Value::Int64(b)) => Ok(Value::Int64(a.wrapping_shr(b as u32))), (Value::Int32(a), Value::Int64(b)) => Ok(Value::Int64((a as i64).wrapping_shr(b as u32))), (Value::Int64(a), Value::Int32(b)) => Ok(Value::Int64(a.wrapping_shr(b as u32))), _ => Err(OpError::Panic("Invalid types for Shr".into())), })?, OpCode::Neg => { let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; match val { Value::Int32(a) => self.push(Value::Int32(a.wrapping_neg())), Value::Int64(a) => self.push(Value::Int64(a.wrapping_neg())), Value::Float(a) => self.push(Value::Float(-a)), _ => return Err(LogicalFrameEndingReason::Panic("Invalid type for Neg".into())), } } OpCode::GetGlobal => { let idx = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()) as usize; let val = self.globals.get(idx).cloned().ok_or_else(|| LogicalFrameEndingReason::Panic("Invalid global index".into()))?; self.push(val); } OpCode::SetGlobal => { let idx = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()) as usize; let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; if idx >= self.globals.len() { self.globals.resize(idx + 1, Value::Null); } self.globals[idx] = val; } OpCode::GetLocal => { let slot = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()); let frame = self.call_stack.last().ok_or_else(|| LogicalFrameEndingReason::Panic("No active call frame".into()))?; let func = &self.program.functions[frame.func_idx]; crate::local_addressing::check_local_slot(func, slot, opcode as u16, start_pc as u32) .map_err(|trap_info| self.trap(trap_info.code, trap_info.opcode, trap_info.message, trap_info.pc))?; let stack_idx = crate::local_addressing::local_index(frame, slot); let val = self.operand_stack.get(stack_idx).cloned().ok_or_else(|| LogicalFrameEndingReason::Panic("Internal error: validated local slot not found in stack".into()))?; self.push(val); } OpCode::SetLocal => { let slot = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()); let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; let frame = self.call_stack.last().ok_or_else(|| LogicalFrameEndingReason::Panic("No active call frame".into()))?; let func = &self.program.functions[frame.func_idx]; crate::local_addressing::check_local_slot(func, slot, opcode as u16, start_pc as u32) .map_err(|trap_info| self.trap(trap_info.code, trap_info.opcode, trap_info.message, trap_info.pc))?; let stack_idx = crate::local_addressing::local_index(frame, slot); self.operand_stack[stack_idx] = val; } OpCode::Call => { let func_id = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()) as usize; let callee = self.program.functions.get(func_id).ok_or_else(|| { self.trap(TRAP_INVALID_FUNC, opcode as u16, format!("Invalid func_id {}", func_id), start_pc as u32) })?; if self.operand_stack.len() < callee.param_slots as usize { return Err(LogicalFrameEndingReason::Panic(format!( "Stack underflow during CALL to func {}: expected at least {} arguments, got {}", func_id, callee.param_slots, self.operand_stack.len() ))); } let stack_base = self.operand_stack.len() - callee.param_slots as usize; // Allocate and zero-init local_slots for _ in 0..callee.local_slots { self.operand_stack.push(Value::Null); } self.call_stack.push(CallFrame { return_pc: self.pc as u32, stack_base, func_idx: func_id, }); self.pc = callee.code_offset as usize; } OpCode::Ret => { let frame = self.call_stack.pop().ok_or_else(|| LogicalFrameEndingReason::Panic("Call stack underflow".into()))?; let func = &self.program.functions[frame.func_idx]; let return_slots = func.return_slots as usize; let current_height = self.operand_stack.len(); let expected_height = frame.stack_base + func.param_slots as usize + func.local_slots as usize + return_slots; if current_height != expected_height { return Err(self.trap(TRAP_BAD_RET_SLOTS, opcode as u16, format!( "Incorrect stack height at RET in func {}: expected {} slots (stack_base={} + params={} + locals={} + returns={}), got {}", frame.func_idx, expected_height, frame.stack_base, func.param_slots, func.local_slots, return_slots, current_height ), start_pc as u32)); } // Copy return values (preserving order: pop return_slots values, then reverse to push back) let mut return_vals = Vec::with_capacity(return_slots); for _ in 0..return_slots { return_vals.push(self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?); } return_vals.reverse(); self.operand_stack.truncate(frame.stack_base); for val in return_vals { self.push(val); } self.pc = frame.return_pc as usize; } OpCode::PushScope => { self.scope_stack.push(ScopeFrame { scope_stack_base: self.operand_stack.len(), }); } OpCode::PopScope => { let frame = self.scope_stack.pop().ok_or_else(|| LogicalFrameEndingReason::Panic("Scope stack underflow".into()))?; self.operand_stack.truncate(frame.scope_stack_base); } OpCode::Alloc => { let _type_id = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()); let slots = u32::from_le_bytes(instr.imm[4..8].try_into().unwrap()) as usize; let ref_idx = self.heap.len(); for _ in 0..slots { self.heap.push(Value::Null); } self.push(Value::Gate(ref_idx)); } OpCode::GateLoad => { let offset = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()) as usize; let ref_val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; if let Value::Gate(base) = ref_val { let val = self.heap.get(base + offset).cloned().ok_or_else(|| { self.trap(prometeu_bytecode::abi::TRAP_OOB, OpCode::GateLoad as u16, format!("Out-of-bounds heap access at offset {}", offset), start_pc as u32) })?; self.push(val); } else { return Err(self.trap(prometeu_bytecode::abi::TRAP_TYPE, OpCode::GateLoad as u16, "Expected gate handle for GATE_LOAD".to_string(), start_pc as u32)); } } OpCode::GateStore => { let offset = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()) as usize; let val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; let ref_val = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; if let Value::Gate(base) = ref_val { if base + offset >= self.heap.len() { return Err(self.trap(prometeu_bytecode::abi::TRAP_OOB, OpCode::GateStore as u16, format!("Out-of-bounds heap access at offset {}", offset), start_pc as u32)); } self.heap[base + offset] = val; } else { return Err(self.trap(prometeu_bytecode::abi::TRAP_TYPE, OpCode::GateStore as u16, "Expected gate handle for GATE_STORE".to_string(), start_pc as u32)); } } OpCode::GateBeginPeek | OpCode::GateEndPeek | OpCode::GateBeginBorrow | OpCode::GateEndBorrow | OpCode::GateBeginMutate | OpCode::GateEndMutate | OpCode::GateRetain => { } OpCode::GateRelease => { self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; } OpCode::Syscall => { let pc_at_syscall = start_pc as u32; let id = u32::from_le_bytes(instr.imm[0..4].try_into().unwrap()); let syscall = prometeu_abi::syscalls::Syscall::from_u32(id).ok_or_else(|| { self.trap(prometeu_bytecode::abi::TRAP_INVALID_SYSCALL, OpCode::Syscall as u16, format!("Unknown syscall: 0x{:08X}", id), pc_at_syscall) })?; let args_count = syscall.args_count(); let mut args = Vec::with_capacity(args_count); for _ in 0..args_count { let v = self.pop().map_err(|_e| { self.trap(prometeu_bytecode::abi::TRAP_STACK_UNDERFLOW, OpCode::Syscall as u16, "Syscall argument stack underflow".to_string(), pc_at_syscall) })?; args.push(v); } args.reverse(); let stack_height_before = self.operand_stack.len(); let mut ret = crate::HostReturn::new(&mut self.operand_stack); native.syscall(id, &args, &mut ret, ctx).map_err(|fault| match fault { crate::VmFault::Trap(code, msg) => self.trap(code, OpCode::Syscall as u16, msg, pc_at_syscall), crate::VmFault::Panic(msg) => LogicalFrameEndingReason::Panic(msg), crate::VmFault::Unavailable => LogicalFrameEndingReason::Panic("Host feature unavailable".into()), })?; let stack_height_after = self.operand_stack.len(); let results_pushed = stack_height_after - stack_height_before; if results_pushed != syscall.results_count() { return Err(LogicalFrameEndingReason::Panic(format!( "Syscall {} (0x{:08X}) results mismatch: expected {}, got {}", syscall.name(), id, syscall.results_count(), results_pushed ))); } } OpCode::FrameSync => { return Ok(()); } } // Apply the instruction cost to the cycle counter self.cycles += opcode.cycles(); Ok(()) } pub fn trap(&self, code: u32, opcode: u16, message: String, pc: u32) -> LogicalFrameEndingReason { LogicalFrameEndingReason::Trap(self.program.create_trap(code, opcode, message, pc)) } pub fn push(&mut self, val: Value) { self.operand_stack.push(val); } pub fn pop(&mut self) -> Result { self.operand_stack.pop().ok_or("Stack underflow".into()) } pub fn pop_number(&mut self) -> Result { let val = self.pop()?; val.as_float().ok_or_else(|| "Expected number".into()) } pub fn pop_integer(&mut self) -> Result { let val = self.pop()?; if let Value::Boolean(b) = val { return Ok(if b { 1 } else { 0 }); } val.as_integer().ok_or_else(|| "Expected integer".into()) } pub fn peek(&self) -> Result<&Value, String> { self.operand_stack.last().ok_or("Stack underflow".into()) } fn binary_op(&mut self, opcode: OpCode, start_pc: u32, f: F) -> Result<(), LogicalFrameEndingReason> where F: FnOnce(Value, Value) -> Result, { let b = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; let a = self.pop().map_err(|e| LogicalFrameEndingReason::Panic(e))?; match f(a, b) { Ok(res) => { self.push(res); Ok(()) } Err(OpError::Trap(code, msg)) => Err(self.trap(code, opcode as u16, msg, start_pc)), Err(OpError::Panic(msg)) => Err(LogicalFrameEndingReason::Panic(msg)), } } } #[cfg(test)] mod tests { use super::*; fn new_test_vm(rom: Vec, constant_pool: Vec) -> VirtualMachine { let rom_len = rom.len() as u32; let mut vm = VirtualMachine::new(rom, constant_pool); vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta { code_offset: 0, code_len: rom_len, ..Default::default() }]); vm } use crate::{HostReturn, Value, VmFault}; use prometeu_bytecode::abi::SourceSpan; use prometeu_bytecode::FunctionMeta; use prometeu_hardware_contract::expect_int; struct MockNative; impl NativeInterface for MockNative { fn syscall(&mut self, _id: u32, _args: &[Value], _ret: &mut HostReturn, _ctx: &mut HostContext) -> Result<(), VmFault> { Ok(()) } } #[test] fn test_arithmetic_chain() { let mut native = MockNative; let mut ctx = HostContext::new(None); // (10 + 20) * 2 / 5 % 4 = 12 * 2 / 5 % 4 = 60 / 5 % 4 = 12 % 4 = 0 // wait: (10 + 20) = 30. 30 * 2 = 60. 60 / 5 = 12. 12 % 4 = 0. let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&10i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&20i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Add as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&2i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Mul as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&5i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Div as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&4i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Mod as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(vm.pop().unwrap(), Value::Int32(0)); } #[test] fn test_div_by_zero_trap() { let mut native = MockNative; let mut ctx = HostContext::new(None); let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&10i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&0i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Div as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, TRAP_DIV_ZERO); assert_eq!(trap.opcode, OpCode::Div as u16); } _ => panic!("Expected Trap, got {:?}", report.reason), } } #[test] fn test_int_to_bound_checked_trap() { let mut native = MockNative; let mut ctx = HostContext::new(None); let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&70000i32.to_le_bytes()); // > 65535 rom.extend_from_slice(&(OpCode::IntToBoundChecked as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, TRAP_OOB); assert_eq!(trap.opcode, OpCode::IntToBoundChecked as u16); } _ => panic!("Expected Trap, got {:?}", report.reason), } } #[test] fn test_bounded_add_overflow_trap() { let mut native = MockNative; let mut ctx = HostContext::new(None); let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushBounded as u16).to_le_bytes()); rom.extend_from_slice(&60000u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushBounded as u16).to_le_bytes()); rom.extend_from_slice(&10000u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Add as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, TRAP_OOB); assert_eq!(trap.opcode, OpCode::Add as u16); } _ => panic!("Expected Trap, got {:?}", report.reason), } } #[test] fn test_comparisons_polymorphic() { let mut native = MockNative; let mut ctx = HostContext::new(None); // 10 < 20.5 (true) let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&10i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushF64 as u16).to_le_bytes()); rom.extend_from_slice(&20.5f64.to_le_bytes()); rom.extend_from_slice(&(OpCode::Lt as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(vm.pop().unwrap(), Value::Boolean(true)); } #[test] fn test_push_i64_immediate() { let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes()); rom.extend_from_slice(&42i64.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); let mut native = MockNative; let mut ctx = HostContext::new(None); vm.step(&mut native, &mut ctx).unwrap(); assert_eq!(vm.peek().unwrap(), &Value::Int64(42)); } #[test] fn test_push_f64_immediate() { let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushF64 as u16).to_le_bytes()); rom.extend_from_slice(&3.14f64.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); let mut native = MockNative; let mut ctx = HostContext::new(None); vm.step(&mut native, &mut ctx).unwrap(); assert_eq!(vm.peek().unwrap(), &Value::Float(3.14)); } #[test] fn test_push_bool_immediate() { let mut rom = Vec::new(); // True rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes()); rom.push(1); // False rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes()); rom.push(0); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); let mut native = MockNative; let mut ctx = HostContext::new(None); vm.step(&mut native, &mut ctx).unwrap(); // Push true assert_eq!(vm.peek().unwrap(), &Value::Boolean(true)); vm.step(&mut native, &mut ctx).unwrap(); // Push false assert_eq!(vm.peek().unwrap(), &Value::Boolean(false)); } #[test] fn test_push_const_string() { let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushConst as u16).to_le_bytes()); rom.extend_from_slice(&0u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let cp = vec![Value::String("hello".into())]; let mut vm = VirtualMachine::new(rom, cp); let mut native = MockNative; let mut ctx = HostContext::new(None); vm.step(&mut native, &mut ctx).unwrap(); assert_eq!(vm.peek().unwrap(), &Value::String("hello".into())); } #[test] fn test_call_ret_scope_separation() { let mut rom = Vec::new(); // entrypoint: // PUSH_I64 10 // CALL func_id 1 // HALT rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes()); rom.extend_from_slice(&10i64.to_le_bytes()); rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes()); rom.extend_from_slice(&1u32.to_le_bytes()); // func_id 1 rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let func_addr = rom.len(); // func: // PUSH_SCOPE // PUSH_I64 20 // GET_LOCAL 0 -- should be 10 (arg) // ADD -- 10 + 20 = 30 // SET_LOCAL 0 -- store result in local 0 (the arg slot) // POP_SCOPE // GET_LOCAL 0 -- read 30 back // RET rom.extend_from_slice(&(OpCode::PushScope as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes()); rom.extend_from_slice(&20i64.to_le_bytes()); rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes()); rom.extend_from_slice(&0u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Add as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::SetLocal as u16).to_le_bytes()); rom.extend_from_slice(&0u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PopScope as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes()); rom.extend_from_slice(&0u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes()); let functions = vec![ FunctionMeta { code_offset: 0, code_len: func_addr as u32, ..Default::default() }, FunctionMeta { code_offset: func_addr as u32, code_len: (rom.len() - func_addr) as u32, param_slots: 1, return_slots: 1, ..Default::default() }, ]; let mut vm = VirtualMachine { program: ProgramImage::new(rom, vec![], functions, None, std::collections::HashMap::new()), ..Default::default() }; vm.prepare_call("0"); let mut native = MockNative; let mut ctx = HostContext::new(None); // Run until Halt let mut steps = 0; while !vm.halted && steps < 100 { vm.step(&mut native, &mut ctx).unwrap(); steps += 1; } assert!(vm.halted); assert_eq!(vm.pop_integer().unwrap(), 30); assert_eq!(vm.operand_stack.len(), 0); assert_eq!(vm.call_stack.len(), 1); assert_eq!(vm.scope_stack.len(), 0); } #[test] fn test_ret_mandatory_value() { let mut rom = Vec::new(); // entrypoint: CALL func_id 1; HALT rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes()); rom.extend_from_slice(&1u32.to_le_bytes()); // func_id 1 rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let func_addr = rom.len(); // func: RET (SEM VALOR ANTES) rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes()); let functions = vec![ FunctionMeta { code_offset: 0, code_len: func_addr as u32, ..Default::default() }, FunctionMeta { code_offset: func_addr as u32, code_len: (rom.len() - func_addr) as u32, param_slots: 0, return_slots: 1, ..Default::default() }, ]; let mut vm = VirtualMachine { program: ProgramImage::new(rom, vec![], functions, None, std::collections::HashMap::new()), ..Default::default() }; vm.prepare_call("0"); let mut native = MockNative; let mut ctx = HostContext::new(None); vm.step(&mut native, &mut ctx).unwrap(); // CALL let res = vm.step(&mut native, &mut ctx); // RET -> should fail assert!(res.is_err()); match res.unwrap_err() { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, TRAP_BAD_RET_SLOTS); } _ => panic!("Expected Trap(TRAP_BAD_RET_SLOTS)"), } // Agora com valor de retorno let mut rom2 = Vec::new(); rom2.extend_from_slice(&(OpCode::Call as u16).to_le_bytes()); rom2.extend_from_slice(&1u32.to_le_bytes()); rom2.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let func_addr2 = rom2.len(); rom2.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes()); rom2.extend_from_slice(&123i64.to_le_bytes()); rom2.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes()); let functions2 = vec![ FunctionMeta { code_offset: 0, code_len: func_addr2 as u32, ..Default::default() }, FunctionMeta { code_offset: func_addr2 as u32, code_len: (rom2.len() - func_addr2) as u32, param_slots: 0, return_slots: 1, ..Default::default() }, ]; let mut vm2 = VirtualMachine { program: ProgramImage::new(rom2, vec![], functions2, None, std::collections::HashMap::new()), ..Default::default() }; vm2.prepare_call("0"); vm2.step(&mut native, &mut ctx).unwrap(); // CALL vm2.step(&mut native, &mut ctx).unwrap(); // PUSH_I64 vm2.step(&mut native, &mut ctx).unwrap(); // RET assert_eq!(vm2.operand_stack.len(), 1); assert_eq!(vm2.pop().unwrap(), Value::Int64(123)); } #[test] fn test_nested_scopes() { let mut rom = Vec::new(); // PUSH_I64 1 // PUSH_SCOPE // PUSH_I64 2 // PUSH_SCOPE // PUSH_I64 3 // POP_SCOPE // POP_SCOPE // HALT rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes()); rom.extend_from_slice(&1i64.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushScope as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes()); rom.extend_from_slice(&2i64.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushScope as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes()); rom.extend_from_slice(&3i64.to_le_bytes()); rom.extend_from_slice(&(OpCode::PopScope as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::PopScope as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); let mut native = MockNative; let mut ctx = HostContext::new(None); // Execute step by step and check stack vm.step(&mut native, &mut ctx).unwrap(); // Push 1 assert_eq!(vm.operand_stack.len(), 1); vm.step(&mut native, &mut ctx).unwrap(); // PushScope 1 assert_eq!(vm.scope_stack.len(), 1); assert_eq!(vm.scope_stack.last().unwrap().scope_stack_base, 1); vm.step(&mut native, &mut ctx).unwrap(); // Push 2 assert_eq!(vm.operand_stack.len(), 2); vm.step(&mut native, &mut ctx).unwrap(); // PushScope 2 assert_eq!(vm.scope_stack.len(), 2); assert_eq!(vm.scope_stack.last().unwrap().scope_stack_base, 2); vm.step(&mut native, &mut ctx).unwrap(); // Push 3 assert_eq!(vm.operand_stack.len(), 3); vm.step(&mut native, &mut ctx).unwrap(); // PopScope 2 assert_eq!(vm.scope_stack.len(), 1); assert_eq!(vm.operand_stack.len(), 2); assert_eq!(vm.operand_stack.last().unwrap(), &Value::Int64(2)); vm.step(&mut native, &mut ctx).unwrap(); // PopScope 1 assert_eq!(vm.scope_stack.len(), 0); assert_eq!(vm.operand_stack.len(), 1); assert_eq!(vm.operand_stack.last().unwrap(), &Value::Int64(1)); } #[test] fn test_pop_scope_does_not_affect_ret() { let mut rom = Vec::new(); // PUSH_I64 100 // CALL func_id 1 // HALT rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes()); rom.extend_from_slice(&100i64.to_le_bytes()); rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes()); rom.extend_from_slice(&1u32.to_le_bytes()); // func_id 1 rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let func_addr = rom.len(); // func: // PUSH_I64 200 // PUSH_SCOPE // PUSH_I64 300 // RET rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes()); rom.extend_from_slice(&200i64.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushScope as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes()); rom.extend_from_slice(&300i64.to_le_bytes()); rom.extend_from_slice(&(OpCode::PopScope as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes()); let functions = vec![ FunctionMeta { code_offset: 0, code_len: func_addr as u32, ..Default::default() }, FunctionMeta { code_offset: func_addr as u32, code_len: (rom.len() - func_addr) as u32, param_slots: 0, return_slots: 1, ..Default::default() }, ]; let mut vm = VirtualMachine { program: ProgramImage::new(rom, vec![], functions, None, std::collections::HashMap::new()), ..Default::default() }; vm.prepare_call("0"); let mut native = MockNative; let mut ctx = HostContext::new(None); let mut steps = 0; while !vm.halted && steps < 100 { vm.step(&mut native, &mut ctx).unwrap(); steps += 1; } assert!(vm.halted); assert_eq!(vm.operand_stack.len(), 2); assert_eq!(vm.operand_stack[0], Value::Int64(100)); assert_eq!(vm.operand_stack[1], Value::Int64(200)); } #[test] fn test_push_i32() { let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&42i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); let mut native = MockNative; let mut ctx = HostContext::new(None); vm.step(&mut native, &mut ctx).unwrap(); assert_eq!(vm.peek().unwrap(), &Value::Int32(42)); } #[test] fn test_bitwise_promotion() { let mut native = MockNative; let mut ctx = HostContext::new(None); // i32 & i32 -> i32 let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&0xF0i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&0x0Fi32.to_le_bytes()); rom.extend_from_slice(&(OpCode::BitAnd as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.step(&mut native, &mut ctx).unwrap(); vm.step(&mut native, &mut ctx).unwrap(); vm.step(&mut native, &mut ctx).unwrap(); assert_eq!(vm.pop().unwrap(), Value::Int32(0)); // i32 | i64 -> i64 let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&0xF0i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI64 as u16).to_le_bytes()); rom.extend_from_slice(&0x0Fi64.to_le_bytes()); rom.extend_from_slice(&(OpCode::BitOr as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.step(&mut native, &mut ctx).unwrap(); vm.step(&mut native, &mut ctx).unwrap(); vm.step(&mut native, &mut ctx).unwrap(); assert_eq!(vm.pop().unwrap(), Value::Int64(0xFF)); } #[test] fn test_comparisons_lte_gte() { let mut native = MockNative; let mut ctx = HostContext::new(None); // 10 <= 20 (true) let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&10i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&20i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Lte as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.step(&mut native, &mut ctx).unwrap(); vm.step(&mut native, &mut ctx).unwrap(); vm.step(&mut native, &mut ctx).unwrap(); assert_eq!(vm.pop().unwrap(), Value::Boolean(true)); // 20 >= 20 (true) let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&20i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&20i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Gte as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.step(&mut native, &mut ctx).unwrap(); vm.step(&mut native, &mut ctx).unwrap(); vm.step(&mut native, &mut ctx).unwrap(); assert_eq!(vm.pop().unwrap(), Value::Boolean(true)); } #[test] fn test_negation() { let mut native = MockNative; let mut ctx = HostContext::new(None); let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&42i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Neg as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.step(&mut native, &mut ctx).unwrap(); vm.step(&mut native, &mut ctx).unwrap(); assert_eq!(vm.pop().unwrap(), Value::Int32(-42)); } #[test] fn test_jmp_if_true() { let mut native = MockNative; let mut ctx = HostContext::new(None); // Corrected Calculations: // 0-1: PushBool // 2: 1 (u8) // 3-4: JmpIfTrue // 5-8: addr (u32) // 9-10: Halt (Offset 9) // 11-12: PushI32 (Offset 11) // 13-16: 100 (i32) // 17-18: Halt let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes()); rom.push(1); rom.extend_from_slice(&(OpCode::JmpIfTrue as u16).to_le_bytes()); rom.extend_from_slice(&(11u32).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); // Offset 9 rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); // Offset 11 rom.extend_from_slice(&100i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.step(&mut native, &mut ctx).unwrap(); // PushBool vm.step(&mut native, &mut ctx).unwrap(); // JmpIfTrue assert_eq!(vm.pc, 11); vm.step(&mut native, &mut ctx).unwrap(); // PushI32 assert_eq!(vm.pop().unwrap(), Value::Int32(100)); } #[test] fn test_trap_opcode() { let mut native = MockNative; let mut ctx = HostContext::new(None); let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&42i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Trap as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(report.reason, LogicalFrameEndingReason::Breakpoint); assert_eq!(vm.pc, 8); // PushI32 (6 bytes) + Trap (2 bytes) assert_eq!(vm.peek().unwrap(), &Value::Int32(42)); } #[test] fn test_pop_n_opcode() { let mut native = MockNative; let mut ctx = HostContext::new(None); let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&1i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&2i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&3i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PopN as u16).to_le_bytes()); rom.extend_from_slice(&2u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(vm.pop().unwrap(), Value::Int32(1)); assert!(vm.pop().is_err()); // Stack should be empty } #[test] fn test_hip_traps_oob() { let mut native = MockNative; let mut ctx = HostContext::new(None); // ALLOC int, 1 -> Gate(0) // GATE_LOAD 1 -> TRAP_OOB (size is 1, offset 1 is invalid) let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::Alloc as u16).to_le_bytes()); rom.extend_from_slice(&0u32.to_le_bytes()); // type_id rom.extend_from_slice(&1u32.to_le_bytes()); // slots rom.extend_from_slice(&(OpCode::GateLoad as u16).to_le_bytes()); rom.extend_from_slice(&1u32.to_le_bytes()); // offset 1 rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, prometeu_bytecode::abi::TRAP_OOB); assert_eq!(trap.opcode, OpCode::GateLoad as u16); assert!(trap.message.contains("Out-of-bounds")); } _ => panic!("Expected Trap, got {:?}", report.reason), } } #[test] fn test_hip_traps_type() { let mut native = MockNative; let mut ctx = HostContext::new(None); // PUSH_I32 42 // GATE_LOAD 0 -> TRAP_TYPE (Expected gate handle, got Int32) let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&42i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::GateLoad as u16).to_le_bytes()); rom.extend_from_slice(&0u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, prometeu_bytecode::abi::TRAP_TYPE); assert_eq!(trap.opcode, OpCode::GateLoad as u16); } _ => panic!("Expected Trap, got {:?}", report.reason), } } #[test] fn test_entry_point_ret_with_prepare_call() { // PushI32 0 (0x17), then Ret (0x51) let rom = vec![ 0x17, 0x00, // PushI32 0x00, 0x00, 0x00, 0x00, // value 0 0x11, 0x00, // Pop 0x51, 0x00 // Ret ]; let mut vm = VirtualMachine::new(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta { code_offset: 0, code_len: rom.len() as u32, ..Default::default() }]); let mut ctx = HostContext::new(None); struct TestNative; impl NativeInterface for TestNative { fn syscall(&mut self, _id: u32, _args: &[Value], _ret: &mut HostReturn, _ctx: &mut HostContext) -> Result<(), VmFault> { Ok(()) } } let mut native = TestNative; vm.prepare_call("0"); let result = vm.run_budget(100, &mut native, &mut ctx).expect("VM run failed"); assert_eq!(result.reason, LogicalFrameEndingReason::EndOfRom); } #[test] fn test_syscall_abi_multi_slot_return() { let rom = vec![ 0x70, 0x00, // Syscall + Reserved 0x01, 0x00, 0x00, 0x00, // Syscall ID 1 ]; struct MultiReturnNative; impl NativeInterface for MultiReturnNative { fn syscall(&mut self, _id: u32, _args: &[Value], ret: &mut HostReturn, _ctx: &mut HostContext) -> Result<(), VmFault> { ret.push_bool(true); ret.push_int(42); ret.push_bounded(255)?; Ok(()) } } let mut vm = VirtualMachine::new(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta { code_offset: 0, code_len: rom.len() as u32, ..Default::default() }]); let mut native = MultiReturnNative; let mut ctx = HostContext::new(None); vm.prepare_call("0"); vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(vm.pop().unwrap(), Value::Bounded(255)); assert_eq!(vm.pop().unwrap(), Value::Int64(42)); assert_eq!(vm.pop().unwrap(), Value::Boolean(true)); } #[test] fn test_syscall_abi_void_return() { let rom = vec![ 0x70, 0x00, // Syscall + Reserved 0x01, 0x00, 0x00, 0x00, // Syscall ID 1 ]; struct VoidReturnNative; impl NativeInterface for VoidReturnNative { fn syscall(&mut self, _id: u32, _args: &[Value], _ret: &mut HostReturn, _ctx: &mut HostContext) -> Result<(), VmFault> { Ok(()) } } let mut vm = VirtualMachine::new(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta { code_offset: 0, code_len: rom.len() as u32, ..Default::default() }]); let mut native = VoidReturnNative; let mut ctx = HostContext::new(None); vm.prepare_call("0"); vm.operand_stack.push(Value::Int32(100)); vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(vm.pop().unwrap(), Value::Int32(100)); assert!(vm.operand_stack.is_empty()); } #[test] fn test_syscall_arg_type_mismatch_trap() { // GfxClear (0x1001) takes 1 argument let rom = vec![ 0x16, 0x00, // PushBool + Reserved 0x01, // value 1 (true) 0x70, 0x00, // Syscall + Reserved 0x01, 0x10, 0x00, 0x00, // Syscall ID 0x1001 ]; struct ArgCheckNative; impl NativeInterface for ArgCheckNative { fn syscall(&mut self, _id: u32, args: &[Value], _ret: &mut HostReturn, _ctx: &mut HostContext) -> Result<(), VmFault> { expect_int(args, 0)?; Ok(()) } } let mut vm = VirtualMachine::new(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![prometeu_bytecode::FunctionMeta { code_offset: 0, code_len: rom.len() as u32, ..Default::default() }]); let mut native = ArgCheckNative; let mut ctx = HostContext::new(None); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, prometeu_bytecode::abi::TRAP_TYPE); assert_eq!(trap.opcode, OpCode::Syscall as u16); } _ => panic!("Expected Trap, got {:?}", report.reason), } } #[test] fn test_invalid_syscall_trap() { let rom = vec![ 0x70, 0x00, // Syscall + Reserved 0xEF, 0xBE, 0xAD, 0xDE, // 0xDEADBEEF ]; let mut vm = new_test_vm(rom.clone(), vec![]); let mut native = MockNative; let mut ctx = HostContext::new(None); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, prometeu_bytecode::abi::TRAP_INVALID_SYSCALL); assert_eq!(trap.opcode, OpCode::Syscall as u16); assert!(trap.message.contains("Unknown syscall")); assert_eq!(trap.pc, 0); } _ => panic!("Expected Trap, got {:?}", report.reason), } } #[test] fn test_syscall_arg_underflow_trap() { // GfxClear (0x1001) expects 1 arg let rom = vec![ 0x70, 0x00, // Syscall + Reserved 0x01, 0x10, 0x00, 0x00, // Syscall ID 0x1001 ]; let mut vm = new_test_vm(rom.clone(), vec![]); let mut native = MockNative; let mut ctx = HostContext::new(None); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, prometeu_bytecode::abi::TRAP_STACK_UNDERFLOW); assert_eq!(trap.opcode, OpCode::Syscall as u16); assert!(trap.message.contains("underflow")); assert_eq!(trap.pc, 0); } _ => panic!("Expected Trap, got {:?}", report.reason), } } #[test] fn test_syscall_results_count_mismatch_panic() { // GfxClear565 (0x1010) expects 0 results let rom = vec![ 0x17, 0x00, // PushI32 0x00, 0x00, 0x00, 0x00, // value 0 0x70, 0x00, // Syscall + Reserved 0x10, 0x10, 0x00, 0x00, // Syscall ID 0x1010 ]; struct BadNative; impl NativeInterface for BadNative { fn syscall(&mut self, _id: u32, _args: &[Value], ret: &mut HostReturn, _ctx: &mut HostContext) -> Result<(), VmFault> { // Wrong: GfxClear565 is void but we push something ret.push_int(42); Ok(()) } } let mut vm = new_test_vm(rom.clone(), vec![]); let mut native = BadNative; let mut ctx = HostContext::new(None); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Panic(msg) => assert!(msg.contains("results mismatch")), _ => panic!("Expected Panic, got {:?}", report.reason), } } #[test] fn test_host_return_bounded_overflow_trap() { let mut stack = Vec::new(); let mut ret = HostReturn::new(&mut stack); let res = ret.push_bounded(65536); assert!(res.is_err()); match res.err().unwrap() { crate::VmFault::Trap(code, _) => { assert_eq!(code, prometeu_bytecode::abi::TRAP_OOB); } _ => panic!("Expected Trap"), } } #[test] fn test_loader_hardening_invalid_magic() { let mut vm = VirtualMachine::default(); let res = vm.initialize(vec![0, 0, 0, 0], ""); assert_eq!(res, Err(VmInitError::InvalidFormat)); // VM should remain empty assert_eq!(vm.program.rom.len(), 0); } #[test] fn test_loader_hardening_unsupported_version() { let mut vm = VirtualMachine::default(); let mut header = vec![0u8; 32]; header[0..4].copy_from_slice(b"PBS\0"); header[4..6].copy_from_slice(&1u16.to_le_bytes()); // version 1 (unsupported) let res = vm.initialize(header, ""); assert_eq!(res, Err(VmInitError::UnsupportedFormat)); } #[test] fn test_loader_hardening_malformed_pbs_v0() { let mut vm = VirtualMachine::default(); let mut header = vec![0u8; 32]; header[0..4].copy_from_slice(b"PBS\0"); header[8..12].copy_from_slice(&1u32.to_le_bytes()); // 1 section claimed but none provided let res = vm.initialize(header, ""); match res { Err(VmInitError::PbsV0LoadFailed(prometeu_bytecode::LoadError::UnexpectedEof)) => {}, _ => panic!("Expected PbsV0LoadFailed(UnexpectedEof), got {:?}", res), } } #[test] fn test_loader_hardening_entrypoint_not_found() { let mut vm = VirtualMachine::default(); // Valid empty PBS v0 module let mut header = vec![0u8; 32]; header[0..4].copy_from_slice(b"PBS\0"); // Try to initialize with numeric entrypoint 10 (out of bounds for empty ROM) let res = vm.initialize(header, "10"); assert_eq!(res, Err(VmInitError::EntrypointNotFound)); // VM state should not be updated assert_eq!(vm.pc, 0); assert_eq!(vm.program.rom.len(), 0); } #[test] fn test_loader_hardening_successful_init() { let mut vm = VirtualMachine::default(); vm.pc = 123; // Pollution let mut header = vec![0u8; 32]; header[0..4].copy_from_slice(b"PBS\0"); let res = vm.initialize(header, ""); assert!(res.is_ok()); assert_eq!(vm.pc, 0); assert_eq!(vm.program.rom.len(), 0); assert_eq!(vm.cycles, 0); } #[test] fn test_calling_convention_add() { let mut native = MockNative; let mut ctx = HostContext::new(None); // F0 (entry): // PUSH_I32 10 // PUSH_I32 20 // CALL 1 (add) // HALT // F1 (add): // GET_LOCAL 0 (a) // GET_LOCAL 1 (b) // ADD // RET (1 slot) let mut rom = Vec::new(); // F0 let f0_start = 0; rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&10i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&20i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes()); rom.extend_from_slice(&1u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let f0_len = rom.len() - f0_start; // F1 let f1_start = rom.len() as u32; rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes()); rom.extend_from_slice(&0u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes()); rom.extend_from_slice(&1u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Add as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes()); let f1_len = rom.len() as u32 - f1_start; let mut vm = new_test_vm(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![ FunctionMeta { code_offset: f0_start as u32, code_len: f0_len as u32, ..Default::default() }, FunctionMeta { code_offset: f1_start, code_len: f1_len, param_slots: 2, return_slots: 1, ..Default::default() }, ]); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(report.reason, LogicalFrameEndingReason::Halted); assert_eq!(vm.operand_stack.last().unwrap(), &Value::Int32(30)); } #[test] fn test_calling_convention_multi_slot_return() { let mut native = MockNative; let mut ctx = HostContext::new(None); // F0: // CALL 1 // HALT // F1: // PUSH_I32 100 // PUSH_I32 200 // RET (2 slots) let mut rom = Vec::new(); // F0 let f0_start = 0; rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes()); rom.extend_from_slice(&1u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let f0_len = rom.len() - f0_start; // F1 let f1_start = rom.len() as u32; rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&100i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&200i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes()); let f1_len = rom.len() as u32 - f1_start; let mut vm = new_test_vm(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![ FunctionMeta { code_offset: f0_start as u32, code_len: f0_len as u32, ..Default::default() }, FunctionMeta { code_offset: f1_start, code_len: f1_len, param_slots: 0, return_slots: 2, ..Default::default() }, ]); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(report.reason, LogicalFrameEndingReason::Halted); // Stack should be [100, 200] assert_eq!(vm.operand_stack.len(), 2); assert_eq!(vm.operand_stack[0], Value::Int32(100)); assert_eq!(vm.operand_stack[1], Value::Int32(200)); } #[test] fn test_calling_convention_void_call() { let mut native = MockNative; let mut ctx = HostContext::new(None); // F0: // PUSH_I32 42 // CALL 1 // HALT // F1: // POP // RET (0 slots) let mut rom = Vec::new(); let f0_start = 0; rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&42i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes()); rom.extend_from_slice(&1u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let f0_len = rom.len() - f0_start; let f1_start = rom.len() as u32; rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes()); let f1_len = rom.len() as u32 - f1_start; let mut vm = new_test_vm(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![ FunctionMeta { code_offset: f0_start as u32, code_len: f0_len as u32, ..Default::default() }, FunctionMeta { code_offset: f1_start, code_len: f1_len, param_slots: 1, return_slots: 0, ..Default::default() }, ]); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(report.reason, LogicalFrameEndingReason::Halted); assert_eq!(vm.operand_stack.len(), 0); } #[test] fn test_trap_invalid_func() { let mut native = MockNative; let mut ctx = HostContext::new(None); // CALL 99 (invalid) let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes()); rom.extend_from_slice(&99u32.to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, TRAP_INVALID_FUNC); assert_eq!(trap.opcode, OpCode::Call as u16); } _ => panic!("Expected Trap(TRAP_INVALID_FUNC), got {:?}", report.reason), } } #[test] fn test_trap_bad_ret_slots() { let mut native = MockNative; let mut ctx = HostContext::new(None); // F0: CALL 1; HALT // F1: PUSH_I32 42; RET (expected 0 slots) let mut rom = Vec::new(); let f0_start = 0; rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes()); rom.extend_from_slice(&1u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let f0_len = rom.len() - f0_start; let f1_start = rom.len() as u32; rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&42i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes()); let f1_len = rom.len() as u32 - f1_start; let mut vm = new_test_vm(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![ FunctionMeta { code_offset: f0_start as u32, code_len: f0_len as u32, ..Default::default() }, FunctionMeta { code_offset: f1_start, code_len: f1_len, param_slots: 0, return_slots: 0, // ERROR: function pushes 42 but returns 0 ..Default::default() }, ]); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, TRAP_BAD_RET_SLOTS); assert_eq!(trap.opcode, OpCode::Ret as u16); assert!(trap.message.contains("Incorrect stack height")); } _ => panic!("Expected Trap(TRAP_BAD_RET_SLOTS), got {:?}", report.reason), } } #[test] fn test_locals_round_trip() { let mut native = MockNative; let mut ctx = HostContext::new(None); // PUSH_I32 42 // SET_LOCAL 0 // PUSH_I32 0 (garbage) // GET_LOCAL 0 // RET (1 slot) let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&42i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::SetLocal as u16).to_le_bytes()); rom.extend_from_slice(&0u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&0i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Pop as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes()); rom.extend_from_slice(&0u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![FunctionMeta { code_offset: 0, code_len: 20, local_slots: 1, return_slots: 1, ..Default::default() }]); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(report.reason, LogicalFrameEndingReason::EndOfRom); // RET pops return values and pushes them back on the caller stack (which is the sentinel frame's stack here). assert_eq!(vm.operand_stack, vec![Value::Int32(42)]); } #[test] fn test_locals_per_call_isolation() { let mut native = MockNative; let mut ctx = HostContext::new(None); // Function 0 (entry): // CALL 1 // POP // CALL 1 // HALT // Function 1: // GET_LOCAL 0 (should be Null initially) // PUSH_I32 42 // SET_LOCAL 0 // RET (1 slot: the initial Null) let mut rom = Vec::new(); // F0 let f0_start = 0; rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes()); rom.extend_from_slice(&1u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Pop as u16).to_le_bytes()); rom.extend_from_slice(&(OpCode::Call as u16).to_le_bytes()); rom.extend_from_slice(&1u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let f0_len = rom.len() - f0_start; // F1 let f1_start = rom.len() as u32; rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes()); rom.extend_from_slice(&0u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&42i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::SetLocal as u16).to_le_bytes()); rom.extend_from_slice(&0u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes()); let f1_len = rom.len() as u32 - f1_start; let mut vm = new_test_vm(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![ FunctionMeta { code_offset: f0_start as u32, code_len: f0_len as u32, ..Default::default() }, FunctionMeta { code_offset: f1_start, code_len: f1_len, local_slots: 1, return_slots: 1, ..Default::default() }, ]); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(report.reason, LogicalFrameEndingReason::Halted); // The last value on stack is the return of the second CALL 1, // which should be Value::Null because locals are zero-initialized on each call. assert_eq!(vm.operand_stack.last().unwrap(), &Value::Null); } #[test] fn test_invalid_local_index_traps() { let mut native = MockNative; let mut ctx = HostContext::new(None); // Function with 0 params, 1 local. // GET_LOCAL 1 (OOB) let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::GetLocal as u16).to_le_bytes()); rom.extend_from_slice(&1u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![FunctionMeta { code_offset: 0, code_len: 8, local_slots: 1, ..Default::default() }]); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, prometeu_bytecode::abi::TRAP_INVALID_LOCAL); assert_eq!(trap.opcode, OpCode::GetLocal as u16); assert!(trap.message.contains("out of bounds")); } _ => panic!("Expected Trap, got {:?}", report.reason), } } #[test] fn test_nested_if() { let mut native = MockNative; let mut ctx = HostContext::new(None); // if (true) { // if (false) { // PUSH 1 // } else { // PUSH 2 // } // } else { // PUSH 3 // } // HALT let mut rom = Vec::new(); // 0: PUSH_BOOL true rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes()); rom.push(1); // 3: JMP_IF_FALSE -> ELSE1 (offset 42) rom.extend_from_slice(&(OpCode::JmpIfFalse as u16).to_le_bytes()); rom.extend_from_slice(&42u32.to_le_bytes()); // INNER IF: // 9: PUSH_BOOL false rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes()); rom.push(0); // 12: JMP_IF_FALSE -> ELSE2 (offset 30) rom.extend_from_slice(&(OpCode::JmpIfFalse as u16).to_le_bytes()); rom.extend_from_slice(&30u32.to_le_bytes()); // 18: PUSH_I32 1 rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&1i32.to_le_bytes()); // 24: JMP -> END (offset 48) rom.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes()); rom.extend_from_slice(&48u32.to_le_bytes()); // ELSE2: // 30: PUSH_I32 2 rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&2i32.to_le_bytes()); // 36: JMP -> END (offset 48) rom.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes()); rom.extend_from_slice(&48u32.to_le_bytes()); // ELSE1: // 42: PUSH_I32 3 rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&3i32.to_le_bytes()); // END: // 48: HALT rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); // We need to set up the function meta for absolute jumps to work correctly vm.program.functions = std::sync::Arc::from(vec![FunctionMeta { code_offset: 0, code_len: 50, ..Default::default() }]); vm.prepare_call("0"); vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(vm.pop().unwrap(), Value::Int32(2)); } #[test] fn test_if_with_empty_branches() { let mut native = MockNative; let mut ctx = HostContext::new(None); // PUSH_BOOL true // JMP_IF_FALSE -> ELSE (offset 15) // // Empty then // JMP -> END (offset 15) // ELSE: // // Empty else // END: // HALT let mut rom = Vec::new(); // 0-2: PUSH_BOOL true rom.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes()); rom.push(1); // 3-8: JMP_IF_FALSE -> 15 rom.extend_from_slice(&(OpCode::JmpIfFalse as u16).to_le_bytes()); rom.extend_from_slice(&15u32.to_le_bytes()); // 9-14: JMP -> 15 rom.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes()); rom.extend_from_slice(&15u32.to_le_bytes()); // 15-16: HALT rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![FunctionMeta { code_offset: 0, code_len: 17, ..Default::default() }]); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); assert_eq!(report.reason, LogicalFrameEndingReason::Halted); assert_eq!(vm.operand_stack.len(), 0); } #[test] fn test_jmp_if_non_boolean_trap() { let mut native = MockNative; let mut ctx = HostContext::new(None); // PUSH_I32 1 // JMP_IF_TRUE 9 // HALT let mut rom = Vec::new(); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&1i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::JmpIfTrue as u16).to_le_bytes()); rom.extend_from_slice(&9u32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes()); let mut vm = new_test_vm(rom.clone(), vec![]); vm.program.functions = std::sync::Arc::from(vec![FunctionMeta { code_offset: 0, code_len: 14, ..Default::default() }]); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, TRAP_TYPE); assert_eq!(trap.opcode, OpCode::JmpIfTrue as u16); assert!(trap.message.contains("Expected boolean")); } _ => panic!("Expected Trap, got {:?}", report.reason), } } #[test] fn test_traceable_trap_with_span() { let mut rom = Vec::new(); // 0: PUSH_I32 10 (6 bytes) // 6: PUSH_I32 0 (6 bytes) // 12: DIV (2 bytes) rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&10i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&0i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Div as u16).to_le_bytes()); let mut pc_to_span = Vec::new(); pc_to_span.push((0, SourceSpan { file_id: 1, start: 10, end: 15 })); pc_to_span.push((6, SourceSpan { file_id: 1, start: 16, end: 20 })); pc_to_span.push((12, SourceSpan { file_id: 1, start: 21, end: 25 })); let debug_info = prometeu_bytecode::DebugInfo { pc_to_span, function_names: vec![(0, "main".to_string())], }; let program = ProgramImage::new(rom.clone(), vec![], vec![FunctionMeta { code_offset: 0, code_len: rom.len() as u32, ..Default::default() }], Some(debug_info), std::collections::HashMap::new()); let mut vm = VirtualMachine { program, ..Default::default() }; let mut native = MockNative; let mut ctx = HostContext::new(None); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, TRAP_DIV_ZERO); assert_eq!(trap.pc, 12); assert_eq!(trap.span, Some(SourceSpan { file_id: 1, start: 21, end: 25 })); } _ => panic!("Expected Trap, got {:?}", report.reason), } } #[test] fn test_traceable_trap_with_function_name() { let mut rom = Vec::new(); // 0: PUSH_I32 10 (6 bytes) // 6: PUSH_I32 0 (6 bytes) // 12: DIV (2 bytes) rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&10i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes()); rom.extend_from_slice(&0i32.to_le_bytes()); rom.extend_from_slice(&(OpCode::Div as u16).to_le_bytes()); let pc_to_span = vec![(12, SourceSpan { file_id: 1, start: 21, end: 25 })]; let function_names = vec![(0, "math_utils::divide".to_string())]; let debug_info = prometeu_bytecode::DebugInfo { pc_to_span, function_names, }; let functions = vec![FunctionMeta { code_offset: 0, code_len: rom.len() as u32, ..Default::default() }]; let program = ProgramImage::new(rom, vec![], functions, Some(debug_info), std::collections::HashMap::new()); let mut vm = VirtualMachine { program, ..Default::default() }; let mut native = MockNative; let mut ctx = HostContext::new(None); vm.prepare_call("0"); let report = vm.run_budget(100, &mut native, &mut ctx).unwrap(); match report.reason { LogicalFrameEndingReason::Trap(trap) => { assert_eq!(trap.code, TRAP_DIV_ZERO); assert!(trap.message.contains("math_utils::divide")); } _ => panic!("Expected Trap, got {:?}", report.reason), } } }