hard reseet
This commit is contained in:
parent
bddd588464
commit
463f72a123
86
Cargo.lock
generated
86
Cargo.lock
generated
@ -1156,14 +1156,6 @@ version = "3.1.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e2db585e1d738fc771bf08a151420d3ed193d9d895a36df7f6f8a9456b911ddc"
|
checksum = "e2db585e1d738fc771bf08a151420d3ed193d9d895a36df7f6f8a9456b911ddc"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "language-api"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"serde",
|
|
||||||
"thiserror",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.180"
|
version = "0.2.180"
|
||||||
@ -1881,20 +1873,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "3eb8486b569e12e2c32ad3e204dbaba5e4b5b216e9367044f25f1dba42341773"
|
checksum = "3eb8486b569e12e2c32ad3e204dbaba5e4b5b216e9367044f25f1dba42341773"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prometeu-abi"
|
name = "prometeu-build-pipeline"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"clap",
|
||||||
|
"pathdiff",
|
||||||
"prometeu-bytecode",
|
"prometeu-bytecode",
|
||||||
|
"prometeu-core",
|
||||||
|
"prometeu-language-api",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
]
|
"tempfile",
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "prometeu-analysis"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1910,31 +1900,32 @@ version = "0.1.0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap",
|
"clap",
|
||||||
"prometeu-compiler",
|
"prometeu-build-pipeline",
|
||||||
"prometeu-host-desktop-winit",
|
"prometeu-host-desktop-winit",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prometeu-compiler"
|
name = "prometeu-core"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
|
||||||
"clap",
|
|
||||||
"language-api",
|
|
||||||
"pathdiff",
|
|
||||||
"prometeu-abi",
|
|
||||||
"prometeu-analysis",
|
|
||||||
"prometeu-bytecode",
|
"prometeu-bytecode",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tempfile",
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "prometeu-deps"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"prometeu-core",
|
||||||
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prometeu-drivers"
|
name = "prometeu-drivers"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"prometeu-abi",
|
"prometeu-core",
|
||||||
"prometeu-hal",
|
"prometeu-hal",
|
||||||
"prometeu-vm",
|
"prometeu-vm",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@ -1944,8 +1935,8 @@ dependencies = [
|
|||||||
name = "prometeu-firmware"
|
name = "prometeu-firmware"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"prometeu-abi",
|
|
||||||
"prometeu-bytecode",
|
"prometeu-bytecode",
|
||||||
|
"prometeu-core",
|
||||||
"prometeu-drivers",
|
"prometeu-drivers",
|
||||||
"prometeu-hal",
|
"prometeu-hal",
|
||||||
"prometeu-system",
|
"prometeu-system",
|
||||||
@ -1958,8 +1949,8 @@ dependencies = [
|
|||||||
name = "prometeu-hal"
|
name = "prometeu-hal"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"prometeu-abi",
|
|
||||||
"prometeu-bytecode",
|
"prometeu-bytecode",
|
||||||
|
"prometeu-core",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
]
|
]
|
||||||
@ -1971,7 +1962,7 @@ dependencies = [
|
|||||||
"clap",
|
"clap",
|
||||||
"cpal",
|
"cpal",
|
||||||
"pixels",
|
"pixels",
|
||||||
"prometeu-abi",
|
"prometeu-core",
|
||||||
"prometeu-drivers",
|
"prometeu-drivers",
|
||||||
"prometeu-firmware",
|
"prometeu-firmware",
|
||||||
"prometeu-hal",
|
"prometeu-hal",
|
||||||
@ -1981,12 +1972,35 @@ dependencies = [
|
|||||||
"winit",
|
"winit",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "prometeu-language-api"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"serde",
|
||||||
|
"thiserror",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "prometeu-lowering"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"clap",
|
||||||
|
"pathdiff",
|
||||||
|
"prometeu-bytecode",
|
||||||
|
"prometeu-core",
|
||||||
|
"prometeu-language-api",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"tempfile",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prometeu-lsp"
|
name = "prometeu-lsp"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"prometeu-analysis",
|
"prometeu-build-pipeline",
|
||||||
"prometeu-compiler",
|
"prometeu-core",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-util",
|
"tokio-util",
|
||||||
"tower-lsp",
|
"tower-lsp",
|
||||||
@ -1996,8 +2010,8 @@ dependencies = [
|
|||||||
name = "prometeu-system"
|
name = "prometeu-system"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"prometeu-abi",
|
|
||||||
"prometeu-bytecode",
|
"prometeu-bytecode",
|
||||||
|
"prometeu-core",
|
||||||
"prometeu-drivers",
|
"prometeu-drivers",
|
||||||
"prometeu-hal",
|
"prometeu-hal",
|
||||||
"prometeu-vm",
|
"prometeu-vm",
|
||||||
@ -2008,8 +2022,8 @@ dependencies = [
|
|||||||
name = "prometeu-vm"
|
name = "prometeu-vm"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"prometeu-abi",
|
|
||||||
"prometeu-bytecode",
|
"prometeu-bytecode",
|
||||||
|
"prometeu-core",
|
||||||
"prometeu-hal",
|
"prometeu-hal",
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
@ -2413,9 +2427,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tempfile"
|
name = "tempfile"
|
||||||
version = "3.24.0"
|
version = "3.25.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c"
|
checksum = "0136791f7c95b1f6dd99f9cc786b91bb81c3800b639b3478e561ddb7be95e5f1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"fastrand",
|
"fastrand",
|
||||||
"getrandom",
|
"getrandom",
|
||||||
|
|||||||
24
Cargo.toml
24
Cargo.toml
@ -1,18 +1,22 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = [
|
||||||
"crates/compiler/prometeu-abi",
|
"crates/compiler/prometeu-build-pipeline",
|
||||||
"crates/console/prometeu-vm",
|
|
||||||
"crates/console/prometeu-system",
|
|
||||||
"crates/console/prometeu-drivers",
|
|
||||||
"crates/host/prometeu-host-desktop-winit",
|
|
||||||
"crates/tools/prometeu-cli",
|
|
||||||
"crates/compiler/prometeu-bytecode",
|
"crates/compiler/prometeu-bytecode",
|
||||||
"crates/compiler/prometeu-compiler",
|
"crates/compiler/prometeu-core",
|
||||||
|
"crates/compiler/prometeu-deps",
|
||||||
|
"crates/compiler/prometeu-language-api",
|
||||||
|
"crates/compiler/prometeu-lowering",
|
||||||
|
|
||||||
|
"crates/console/prometeu-drivers",
|
||||||
"crates/console/prometeu-firmware",
|
"crates/console/prometeu-firmware",
|
||||||
"crates/compiler/prometeu-analysis",
|
|
||||||
"crates/tools/prometeu-lsp",
|
|
||||||
"crates/console/prometeu-hal",
|
"crates/console/prometeu-hal",
|
||||||
"crates/language-api"
|
"crates/console/prometeu-system",
|
||||||
|
"crates/console/prometeu-vm",
|
||||||
|
|
||||||
|
"crates/host/prometeu-host-desktop-winit",
|
||||||
|
|
||||||
|
"crates/tools/prometeu-cli",
|
||||||
|
"crates/tools/prometeu-lsp",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
|
|||||||
0
crates/compiler/languages/.gitkeep
Normal file
0
crates/compiler/languages/.gitkeep
Normal file
@ -1,9 +0,0 @@
|
|||||||
mod value;
|
|
||||||
mod vm_init_error;
|
|
||||||
mod program;
|
|
||||||
mod vm_fault;
|
|
||||||
|
|
||||||
pub use vm_fault::VmFault;
|
|
||||||
pub use program::ProgramImage;
|
|
||||||
pub use vm_init_error::VmInitError;
|
|
||||||
pub use value::Value;
|
|
||||||
@ -1,8 +0,0 @@
|
|||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub enum VmInitError<E> {
|
|
||||||
InvalidFormat,
|
|
||||||
UnsupportedFormat,
|
|
||||||
PbsV0LoadFailed(prometeu_bytecode::LoadError),
|
|
||||||
EntrypointNotFound,
|
|
||||||
VerificationFailed(E),
|
|
||||||
}
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "prometeu-analysis"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
license = "MIT"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
name = "prometeu_analysis"
|
|
||||||
path = "src/lib.rs"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
serde = { version = "1", features = ["derive"] }
|
|
||||||
serde_json = "1"
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
pub mod ids;
|
|
||||||
pub mod span;
|
|
||||||
pub mod file_db;
|
|
||||||
pub mod interner;
|
|
||||||
pub mod text_index;
|
|
||||||
|
|
||||||
pub use ids::*;
|
|
||||||
pub use span::Span;
|
|
||||||
pub use file_db::{FileDB, LineIndex};
|
|
||||||
pub use interner::NameInterner;
|
|
||||||
pub use text_index::TextIndex;
|
|
||||||
@ -1,112 +0,0 @@
|
|||||||
/// TextIndex provides conversions between byte offsets (used in the core)
|
|
||||||
/// and LSP positions (line, column in UTF-16 units).
|
|
||||||
///
|
|
||||||
/// Notes:
|
|
||||||
/// - `line_starts` stores byte offsets for the start of each line.
|
|
||||||
/// - We keep a copy of the text to allow conversions without external dependencies.
|
|
||||||
/// - The LSP column is counted in UTF-16 units, excluding the end-of-line `\n`.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct TextIndex {
|
|
||||||
text: String,
|
|
||||||
line_starts: Vec<u32>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TextIndex {
|
|
||||||
/// Builds the index from the file's current textual content.
|
|
||||||
pub fn new(text: &str) -> Self {
|
|
||||||
let mut line_starts = Vec::with_capacity(128);
|
|
||||||
line_starts.push(0);
|
|
||||||
for (byte, ch) in text.char_indices() {
|
|
||||||
if ch == '\n' {
|
|
||||||
// the start of the next line is the byte after the '\n'
|
|
||||||
line_starts.push((byte + 1) as u32);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Self {
|
|
||||||
text: text.to_string(),
|
|
||||||
line_starts,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Number of lines (0-based; empty lines count).
|
|
||||||
#[inline]
|
|
||||||
pub fn line_count(&self) -> u32 {
|
|
||||||
self.line_starts.len() as u32
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Converts a byte offset (within the file) to (line, UTF-16 column) in LSP format.
|
|
||||||
///
|
|
||||||
/// For offsets exactly at end-of-line, the column will be the line's UTF-16 length.
|
|
||||||
pub fn byte_to_lsp(&self, byte: u32) -> (u32, u32) {
|
|
||||||
let byte = byte.min(self.text.len() as u32);
|
|
||||||
let line = match self.line_starts.binary_search(&byte) {
|
|
||||||
Ok(i) => i as u32,
|
|
||||||
Err(i) => (i.saturating_sub(1)) as u32,
|
|
||||||
};
|
|
||||||
|
|
||||||
let (line_start, line_end) = self.line_bounds(line);
|
|
||||||
let rel = byte.saturating_sub(line_start as u32) as usize;
|
|
||||||
let slice = &self.text[line_start..line_end];
|
|
||||||
|
|
||||||
let mut utf16_col: u32 = 0;
|
|
||||||
for (i, ch) in slice.char_indices() {
|
|
||||||
if i >= rel { break; }
|
|
||||||
utf16_col += ch.len_utf16() as u32;
|
|
||||||
}
|
|
||||||
(line, utf16_col)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Converts (line, UTF-16 column) to a byte offset.
|
|
||||||
///
|
|
||||||
/// - Lines outside the range are clamped to [0, last].
|
|
||||||
/// - Columns larger than the line's UTF-16 length return end-of-line.
|
|
||||||
pub fn lsp_to_byte(&self, line: u32, utf16_col: u32) -> u32 {
|
|
||||||
let line = line.min(self.line_count().saturating_sub(1));
|
|
||||||
let (line_start, line_end) = self.line_bounds(line);
|
|
||||||
let slice = &self.text[line_start..line_end];
|
|
||||||
|
|
||||||
let mut acc: u32 = 0;
|
|
||||||
for (i, ch) in slice.char_indices() {
|
|
||||||
if acc >= utf16_col {
|
|
||||||
return (line_start + i) as u32;
|
|
||||||
}
|
|
||||||
acc += ch.len_utf16() as u32;
|
|
||||||
}
|
|
||||||
// If the target column is after the last character, return end-of-line.
|
|
||||||
line_end as u32
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn line_bounds(&self, line: u32) -> (usize, usize) {
|
|
||||||
let start = *self
|
|
||||||
.line_starts
|
|
||||||
.get(line as usize)
|
|
||||||
.unwrap_or(self.line_starts.last().unwrap());
|
|
||||||
let next = self.line_starts.get(line as usize + 1).copied();
|
|
||||||
// If there is a next line, `next` points to the byte after the current line's '\n',
|
|
||||||
// so the content ends at `next - 1`. Otherwise (last line), the content ends at `text.len()`.
|
|
||||||
let end = match next {
|
|
||||||
Some(next_start) => next_start.saturating_sub(1),
|
|
||||||
None => self.text.len() as u32,
|
|
||||||
};
|
|
||||||
(start as usize, end as usize)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests_internal {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn line_bounds_basic() {
|
|
||||||
let s = "ab\ncd\n";
|
|
||||||
let idx = TextIndex::new(s);
|
|
||||||
assert_eq!(idx.line_count(), 3);
|
|
||||||
// line 0: "ab"
|
|
||||||
assert_eq!(idx.line_bounds(0), (0, 2));
|
|
||||||
// line 1: "cd"
|
|
||||||
assert_eq!(idx.line_bounds(1), (3, 5));
|
|
||||||
// line 2: final empty line
|
|
||||||
assert_eq!(idx.line_bounds(2), (6, 6));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,57 +0,0 @@
|
|||||||
use prometeu_analysis::TextIndex;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn text_index_ascii_roundtrip() {
|
|
||||||
let text = "hello\nworld\nthis is ascii";
|
|
||||||
let idx = TextIndex::new(text);
|
|
||||||
|
|
||||||
// Verify round-trip on all character boundaries
|
|
||||||
let mut boundaries: Vec<usize> = text.char_indices().map(|(i, _)| i).collect();
|
|
||||||
boundaries.push(text.len());
|
|
||||||
|
|
||||||
for &b in &boundaries {
|
|
||||||
let (line, col16) = idx.byte_to_lsp(b as u32);
|
|
||||||
let b2 = idx.lsp_to_byte(line, col16);
|
|
||||||
assert_eq!(b2, b as u32, "roundtrip failed for byte {} -> (l={},c16={})", b, line, col16);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Some direct checks
|
|
||||||
// start: (0,0)
|
|
||||||
assert_eq!(idx.byte_to_lsp(0), (0, 0));
|
|
||||||
// after "hello" (5), before '\n': line 0, col=5
|
|
||||||
assert_eq!(idx.byte_to_lsp(5), (0, 5));
|
|
||||||
// after '\n' (6): line 1, col=0
|
|
||||||
assert_eq!(idx.byte_to_lsp(6), (1, 0));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn text_index_unicode_roundtrip_utf16() {
|
|
||||||
// "a" (1B, 1u16), "é" (2B, 1u16), "🙂" (4B, 2u16), "b" (1B, 1u16)
|
|
||||||
let text = "aé🙂b";
|
|
||||||
let idx = TextIndex::new(text);
|
|
||||||
|
|
||||||
// character boundaries + end
|
|
||||||
let mut boundaries: Vec<usize> = text.char_indices().map(|(i, _)| i).collect();
|
|
||||||
boundaries.push(text.len());
|
|
||||||
|
|
||||||
for &b in &boundaries {
|
|
||||||
let (line, col16) = idx.byte_to_lsp(b as u32);
|
|
||||||
let b2 = idx.lsp_to_byte(line, col16);
|
|
||||||
assert_eq!(b2, b as u32, "unicode roundtrip failed for byte {} -> (l={},c16={})", b, line, col16);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Expected columns on line 0
|
|
||||||
// bytes: [0:'a'][1..2:'é'][3..6:'🙂'][7:'b'][8:end]
|
|
||||||
assert_eq!(idx.byte_to_lsp(0), (0, 0)); // before 'a'
|
|
||||||
assert_eq!(idx.byte_to_lsp(1), (0, 1)); // after 'a'
|
|
||||||
assert_eq!(idx.byte_to_lsp(3), (0, 2)); // after 'a' + 'é' (1+1 utf16)
|
|
||||||
assert_eq!(idx.byte_to_lsp(7), (0, 4)); // after '🙂' (2 utf16) => 1+1+2=4
|
|
||||||
assert_eq!(idx.byte_to_lsp(8), (0, 5)); // after 'b'
|
|
||||||
|
|
||||||
// and inverse, specific columns
|
|
||||||
assert_eq!(idx.lsp_to_byte(0, 0), 0);
|
|
||||||
assert_eq!(idx.lsp_to_byte(0, 1), 1);
|
|
||||||
assert_eq!(idx.lsp_to_byte(0, 2), 3);
|
|
||||||
assert_eq!(idx.lsp_to_byte(0, 4), 7);
|
|
||||||
assert_eq!(idx.lsp_to_byte(0, 5), 8);
|
|
||||||
}
|
|
||||||
@ -1,12 +1,12 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "prometeu-compiler"
|
name = "prometeu-build-pipeline"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
repository.workspace = true
|
repository.workspace = true
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "prometeu-compiler"
|
name = "prometeu-build-pipeline"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
||||||
[package.metadata.dist]
|
[package.metadata.dist]
|
||||||
@ -15,9 +15,8 @@ include = ["../../VERSION.txt"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
prometeu-bytecode = { path = "../prometeu-bytecode" }
|
prometeu-bytecode = { path = "../prometeu-bytecode" }
|
||||||
prometeu-abi = { path = "../prometeu-abi" }
|
prometeu-core = { path = "../prometeu-core" }
|
||||||
prometeu-analysis = { path = "../prometeu-analysis" }
|
prometeu-language-api = { path = "../prometeu-language-api" }
|
||||||
language-api = { path = "../../language-api" }
|
|
||||||
clap = { version = "4.5.54", features = ["derive"] }
|
clap = { version = "4.5.54", features = ["derive"] }
|
||||||
serde = { version = "1.0.228", features = ["derive"] }
|
serde = { version = "1.0.228", features = ["derive"] }
|
||||||
serde_json = "1.0.149"
|
serde_json = "1.0.149"
|
||||||
88
crates/compiler/prometeu-build-pipeline/src/cli.rs
Normal file
88
crates/compiler/prometeu-build-pipeline/src/cli.rs
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use clap::{Parser, Subcommand};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
/// Command line interface for the Prometeu Compiler.
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(name = "prometeu-build-pipeline")]
|
||||||
|
#[command(version, about = "Official compiler for the PROMETEU Virtual Machine", long_about = None)]
|
||||||
|
pub struct Cli {
|
||||||
|
/// The action to perform (build or verify).
|
||||||
|
#[command(subcommand)]
|
||||||
|
pub command: Commands,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Available subcommands for the compiler.
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
pub enum Commands {
|
||||||
|
/// Builds a Prometeu project by compiling source code into a PBC file.
|
||||||
|
Build {
|
||||||
|
/// Path to the project root directory.
|
||||||
|
project_dir: PathBuf,
|
||||||
|
|
||||||
|
/// Explicit path to the entry file (defaults to src/main.pbs).
|
||||||
|
#[arg(short, long)]
|
||||||
|
entry: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// Path to save the compiled .pbc file.
|
||||||
|
#[arg(short, long)]
|
||||||
|
out: Option<PathBuf>,
|
||||||
|
|
||||||
|
/// Whether to generate a .json symbols file for source mapping.
|
||||||
|
#[arg(long, default_value_t = true)]
|
||||||
|
emit_symbols: bool,
|
||||||
|
|
||||||
|
/// Disable symbol generation.
|
||||||
|
#[arg(long)]
|
||||||
|
no_symbols: bool,
|
||||||
|
|
||||||
|
/// Whether to generate a .disasm file for debugging.
|
||||||
|
#[arg(long, default_value_t = true)]
|
||||||
|
emit_disasm: bool,
|
||||||
|
|
||||||
|
/// Disable disassembly generation.
|
||||||
|
#[arg(long)]
|
||||||
|
no_disasm: bool,
|
||||||
|
|
||||||
|
/// Whether to explain the dependency resolution process.
|
||||||
|
#[arg(long)]
|
||||||
|
explain_deps: bool,
|
||||||
|
},
|
||||||
|
/// Verifies if a Prometeu project is syntactically and semantically valid without emitting code.
|
||||||
|
Verify {
|
||||||
|
/// Path to the project root directory.
|
||||||
|
project_dir: PathBuf,
|
||||||
|
|
||||||
|
/// Whether to explain the dependency resolution process.
|
||||||
|
#[arg(long)]
|
||||||
|
explain_deps: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Main entry point for the compiler library's execution logic.
|
||||||
|
/// Parses CLI arguments and dispatches to the appropriate compiler functions.
|
||||||
|
pub fn run() -> Result<()> {
|
||||||
|
let cli = Cli::parse();
|
||||||
|
|
||||||
|
match cli.command {
|
||||||
|
Commands::Build {
|
||||||
|
project_dir,
|
||||||
|
out,
|
||||||
|
emit_disasm,
|
||||||
|
no_disasm,
|
||||||
|
emit_symbols,
|
||||||
|
no_symbols,
|
||||||
|
explain_deps,
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
|
||||||
|
}
|
||||||
|
Commands::Verify {
|
||||||
|
project_dir,
|
||||||
|
explain_deps
|
||||||
|
} => {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
3
crates/compiler/prometeu-build-pipeline/src/lib.rs
Normal file
3
crates/compiler/prometeu-build-pipeline/src/lib.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
mod cli;
|
||||||
|
|
||||||
|
pub use cli::run;
|
||||||
@ -3,5 +3,5 @@ use anyhow::Result;
|
|||||||
/// Main entry point for the Prometeu Compiler binary.
|
/// Main entry point for the Prometeu Compiler binary.
|
||||||
/// It delegates execution to the library's `run` function.
|
/// It delegates execution to the library's `run` function.
|
||||||
fn main() -> Result<()> {
|
fn main() -> Result<()> {
|
||||||
prometeu_compiler::run()
|
prometeu_build_pipeline::run()
|
||||||
}
|
}
|
||||||
@ -24,5 +24,8 @@ pub mod decoder;
|
|||||||
|
|
||||||
mod model;
|
mod model;
|
||||||
pub mod io;
|
pub mod io;
|
||||||
|
pub mod value;
|
||||||
|
pub mod program_image;
|
||||||
|
|
||||||
pub use model::*;
|
pub use model::*;
|
||||||
|
pub use value::Value;
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
use prometeu_bytecode::abi::TrapInfo;
|
use crate::abi::TrapInfo;
|
||||||
use prometeu_bytecode::{BytecodeModule, ConstantPoolEntry, DebugInfo, Export, FunctionMeta};
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use crate::Value;
|
use crate::{BytecodeModule, ConstantPoolEntry, DebugInfo, Export, FunctionMeta};
|
||||||
|
use crate::value::Value;
|
||||||
|
|
||||||
/// Represents a fully linked, executable PBS program image.
|
/// Represents a fully linked, executable PBS program image.
|
||||||
///
|
///
|
||||||
@ -1,3 +0,0 @@
|
|||||||
pub mod symbols;
|
|
||||||
pub mod types;
|
|
||||||
pub mod project_registry;
|
|
||||||
@ -1,279 +0,0 @@
|
|||||||
use crate::common::diagnostics::{Diagnostic, Severity};
|
|
||||||
use crate::common::spans::{Span, FileId};
|
|
||||||
use crate::frontends::pbs::ast::AstArena;
|
|
||||||
use prometeu_analysis::NodeId;
|
|
||||||
use prometeu_analysis::{NameId, SymbolId, ModuleId};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
|
||||||
pub enum SymbolKind {
|
|
||||||
Type,
|
|
||||||
Value,
|
|
||||||
Service,
|
|
||||||
Function,
|
|
||||||
Struct,
|
|
||||||
Contract,
|
|
||||||
ErrorType,
|
|
||||||
Local,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
|
||||||
pub enum Namespace {
|
|
||||||
Type,
|
|
||||||
Value,
|
|
||||||
Service,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Symbol {
|
|
||||||
pub name: NameId,
|
|
||||||
pub kind: SymbolKind,
|
|
||||||
pub exported: bool,
|
|
||||||
pub module: ModuleId,
|
|
||||||
pub decl_span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct SymbolArena {
|
|
||||||
pub symbols: Vec<Symbol>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
|
||||||
pub struct DefKey {
|
|
||||||
pub module: ModuleId,
|
|
||||||
pub name: NameId,
|
|
||||||
pub namespace: Namespace,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone)]
|
|
||||||
pub struct DefIndex {
|
|
||||||
symbols: HashMap<DefKey, SymbolId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct RefIndex {
|
|
||||||
refs: Vec<Vec<Span>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct NodeToSymbol {
|
|
||||||
map: Vec<Option<SymbolId>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SymbolArena {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self { symbols: Vec::new() }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert(&mut self, symbol: Symbol) -> SymbolId {
|
|
||||||
let id = SymbolId(self.symbols.len() as u32);
|
|
||||||
self.symbols.push(symbol);
|
|
||||||
id
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(&self, id: SymbolId) -> &Symbol {
|
|
||||||
&self.symbols[id.0 as usize]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DefIndex {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
symbols: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert_symbol(&mut self, key: DefKey, symbol_id: SymbolId) -> Result<(), Diagnostic> {
|
|
||||||
if self.symbols.contains_key(&key) {
|
|
||||||
return Err(Diagnostic {
|
|
||||||
severity: Severity::Error,
|
|
||||||
code: "E_RESOLVE_DUPLICATE_SYMBOL".to_string(),
|
|
||||||
message: "Duplicate symbol in the same module and namespace".to_string(),
|
|
||||||
// Placeholder span; callers should overwrite with accurate span when known.
|
|
||||||
span: Span::new(FileId(0), 0, 0),
|
|
||||||
related: Vec::new(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
self.symbols.insert(key, symbol_id);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(&self, key: DefKey) -> Option<SymbolId> {
|
|
||||||
self.symbols.get(&key).copied()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Lookup by name/namespace ignoring module. Returns the first match with its module id.
|
|
||||||
pub fn get_by_name_any_module(&self, name: NameId, namespace: Namespace) -> Option<(ModuleId, SymbolId)> {
|
|
||||||
for (k, v) in &self.symbols {
|
|
||||||
if k.name == name && k.namespace == namespace {
|
|
||||||
return Some((k.module, *v));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RefIndex {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self { refs: Vec::new() }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ensure_symbol(&mut self, symbol_id: SymbolId) {
|
|
||||||
let index = symbol_id.0 as usize;
|
|
||||||
if index >= self.refs.len() {
|
|
||||||
self.refs.resize_with(index + 1, Vec::new);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn record_ref(&mut self, symbol_id: SymbolId, span: Span) {
|
|
||||||
self.ensure_symbol(symbol_id);
|
|
||||||
self.refs[symbol_id.0 as usize].push(span);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn refs_of(&self, symbol_id: SymbolId) -> &[Span] {
|
|
||||||
const EMPTY: [Span; 0] = [];
|
|
||||||
self.refs
|
|
||||||
.get(symbol_id.0 as usize)
|
|
||||||
.map(|refs| refs.as_slice())
|
|
||||||
.unwrap_or(&EMPTY)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl NodeToSymbol {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self { map: Vec::new() }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn bind_node(&mut self, node_id: NodeId, symbol_id: SymbolId) {
|
|
||||||
self.ensure(node_id);
|
|
||||||
self.map[node_id.0 as usize] = Some(symbol_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(&self, node_id: NodeId) -> Option<SymbolId> {
|
|
||||||
self.map.get(node_id.0 as usize).and_then(|opt| *opt)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ensure(&mut self, node_id: NodeId) {
|
|
||||||
let index = node_id.0 as usize;
|
|
||||||
if index >= self.map.len() {
|
|
||||||
self.map.resize(index + 1, None);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn resize_to_fit(&mut self, arena: &AstArena) {
|
|
||||||
self.map.resize(arena.nodes.len(), None);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
fn sample_symbol(name: NameId, module: ModuleId) -> Symbol {
|
|
||||||
Symbol {
|
|
||||||
name,
|
|
||||||
kind: SymbolKind::Function,
|
|
||||||
exported: false,
|
|
||||||
module,
|
|
||||||
decl_span: Span::new(FileId(0), 1, 2),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn insert_returns_incremental_ids() {
|
|
||||||
let mut arena = SymbolArena::new();
|
|
||||||
let id0 = arena.insert(sample_symbol(NameId(0), ModuleId(0)));
|
|
||||||
let id1 = arena.insert(sample_symbol(NameId(1), ModuleId(0)));
|
|
||||||
|
|
||||||
assert_eq!(id0, SymbolId(0));
|
|
||||||
assert_eq!(id1, SymbolId(1));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn get_returns_correct_symbol() {
|
|
||||||
let mut arena = SymbolArena::new();
|
|
||||||
let symbol = sample_symbol(NameId(7), ModuleId(3));
|
|
||||||
let id = arena.insert(symbol.clone());
|
|
||||||
|
|
||||||
assert_eq!(arena.get(id).name, symbol.name);
|
|
||||||
assert_eq!(arena.get(id).kind, symbol.kind);
|
|
||||||
assert_eq!(arena.get(id).module, symbol.module);
|
|
||||||
assert_eq!(arena.get(id).decl_span, symbol.decl_span);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn def_index_duplicate_in_same_namespace_errors() {
|
|
||||||
let mut index = DefIndex::new();
|
|
||||||
let key = DefKey {
|
|
||||||
module: ModuleId(1),
|
|
||||||
name: NameId(10),
|
|
||||||
namespace: Namespace::Type,
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(index.insert_symbol(key, SymbolId(0)).is_ok());
|
|
||||||
let err = index.insert_symbol(key, SymbolId(1)).unwrap_err();
|
|
||||||
|
|
||||||
assert_eq!(err.code, "E_RESOLVE_DUPLICATE_SYMBOL");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn def_index_allows_same_name_in_different_namespace() {
|
|
||||||
let mut index = DefIndex::new();
|
|
||||||
let name = NameId(11);
|
|
||||||
let type_key = DefKey {
|
|
||||||
module: ModuleId(2),
|
|
||||||
name,
|
|
||||||
namespace: Namespace::Type,
|
|
||||||
};
|
|
||||||
let value_key = DefKey {
|
|
||||||
module: ModuleId(2),
|
|
||||||
name,
|
|
||||||
namespace: Namespace::Value,
|
|
||||||
};
|
|
||||||
|
|
||||||
assert!(index.insert_symbol(type_key, SymbolId(0)).is_ok());
|
|
||||||
assert!(index.insert_symbol(value_key, SymbolId(1)).is_ok());
|
|
||||||
assert_eq!(index.get(type_key), Some(SymbolId(0)));
|
|
||||||
assert_eq!(index.get(value_key), Some(SymbolId(1)));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn ref_index_records_refs_per_symbol() {
|
|
||||||
let mut index = RefIndex::new();
|
|
||||||
let span_a1 = Span::new(FileId(0), 1, 2);
|
|
||||||
let span_a2 = Span::new(FileId(0), 3, 4);
|
|
||||||
let span_b1 = Span::new(FileId(1), 10, 12);
|
|
||||||
|
|
||||||
index.record_ref(SymbolId(2), span_a1.clone());
|
|
||||||
index.record_ref(SymbolId(2), span_a2.clone());
|
|
||||||
index.record_ref(SymbolId(5), span_b1.clone());
|
|
||||||
|
|
||||||
assert_eq!(index.refs_of(SymbolId(2)), &[span_a1.clone(), span_a2.clone()]);
|
|
||||||
assert_eq!(index.refs_of(SymbolId(5)), &[span_b1.clone()]);
|
|
||||||
assert!(index.refs_of(SymbolId(9)).is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn node_to_symbol_bind_and_get() {
|
|
||||||
let mut map = NodeToSymbol::new();
|
|
||||||
let nid = NodeId(10);
|
|
||||||
let sid = SymbolId(5);
|
|
||||||
|
|
||||||
map.bind_node(nid, sid);
|
|
||||||
assert_eq!(map.get(nid), Some(sid));
|
|
||||||
assert_eq!(map.get(NodeId(0)), None);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn node_to_symbol_expands_automatically() {
|
|
||||||
let mut map = NodeToSymbol::new();
|
|
||||||
let nid_high = NodeId(100);
|
|
||||||
let sid = SymbolId(1);
|
|
||||||
|
|
||||||
map.bind_node(nid_high, sid);
|
|
||||||
assert_eq!(map.get(nid_high), Some(sid));
|
|
||||||
assert!(map.map.len() > 100);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,235 +0,0 @@
|
|||||||
use crate::analysis::symbols::{SymbolArena};
|
|
||||||
use prometeu_analysis::{NameId, NameInterner, TypeId, SymbolId, NodeId};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
// Use canonical TypeId from prometeu-analysis
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
pub enum TypeKind {
|
|
||||||
Primitive { name: NameId },
|
|
||||||
Optional { inner: TypeId },
|
|
||||||
Result { ok: TypeId, err: TypeId },
|
|
||||||
Array { inner: TypeId, len: Option<u32> },
|
|
||||||
Struct { sym: SymbolId },
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
|
||||||
pub struct TypeArena {
|
|
||||||
pub types: Vec<TypeKind>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TypeArena {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self { types: Vec::new() }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Interna um tipo na arena. Atualmente apenas adiciona ao final (append-only),
|
|
||||||
/// sem realizar de-duplicação.
|
|
||||||
pub fn intern_type(&mut self, kind: TypeKind) -> TypeId {
|
|
||||||
let id = TypeId(self.types.len() as u32);
|
|
||||||
self.types.push(kind);
|
|
||||||
id
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn kind(&self, id: TypeId) -> &TypeKind {
|
|
||||||
&self.types[id.0 as usize]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn format_type(
|
|
||||||
type_id: TypeId,
|
|
||||||
arena: &TypeArena,
|
|
||||||
interner: &NameInterner,
|
|
||||||
symbols: Option<&SymbolArena>,
|
|
||||||
) -> String {
|
|
||||||
let kind = arena.kind(type_id);
|
|
||||||
match kind {
|
|
||||||
TypeKind::Primitive { name } => interner.resolve(*name).to_string(),
|
|
||||||
TypeKind::Optional { inner } => {
|
|
||||||
let inner_str = format_type(*inner, arena, interner, symbols);
|
|
||||||
format!("optional<{}>", inner_str)
|
|
||||||
}
|
|
||||||
TypeKind::Result { ok, err } => {
|
|
||||||
let ok_str = format_type(*ok, arena, interner, symbols);
|
|
||||||
let err_str = format_type(*err, arena, interner, symbols);
|
|
||||||
format!("result<{}, {}>", ok_str, err_str)
|
|
||||||
}
|
|
||||||
TypeKind::Array { inner, len } => {
|
|
||||||
let inner_str = format_type(*inner, arena, interner, symbols);
|
|
||||||
if let Some(n) = len {
|
|
||||||
format!("array<{}>[{}]", inner_str, n)
|
|
||||||
} else {
|
|
||||||
format!("array<{}>", inner_str)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TypeKind::Struct { sym } => {
|
|
||||||
if let Some(symbol_arena) = symbols {
|
|
||||||
let symbol = symbol_arena.get(*sym);
|
|
||||||
interner.resolve(symbol.name).to_string()
|
|
||||||
} else {
|
|
||||||
format!("struct#{}", sym.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
|
||||||
pub struct TypeFacts {
|
|
||||||
pub node_type: Vec<Option<TypeId>>,
|
|
||||||
pub symbol_type: Vec<Option<TypeId>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TypeFacts {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self::default()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_node_type(&mut self, node_id: NodeId, type_id: TypeId) {
|
|
||||||
let idx = node_id.0 as usize;
|
|
||||||
if idx >= self.node_type.len() {
|
|
||||||
self.node_type.resize_with(idx + 1, || None);
|
|
||||||
}
|
|
||||||
self.node_type[idx] = Some(type_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_node_type(&self, node_id: NodeId) -> Option<TypeId> {
|
|
||||||
self.node_type.get(node_id.0 as usize).and_then(|t| *t)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_symbol_type(&mut self, symbol_id: SymbolId, type_id: TypeId) {
|
|
||||||
let idx = symbol_id.0 as usize;
|
|
||||||
if idx >= self.symbol_type.len() {
|
|
||||||
self.symbol_type.resize_with(idx + 1, || None);
|
|
||||||
}
|
|
||||||
self.symbol_type[idx] = Some(type_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_symbol_type(&self, symbol_id: SymbolId) -> Option<TypeId> {
|
|
||||||
self.symbol_type.get(symbol_id.0 as usize).and_then(|t| *t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use prometeu_analysis::{FileId, ModuleId};
|
|
||||||
use super::*;
|
|
||||||
// Mock NameId and SymbolId for simplified tests if necessary,
|
|
||||||
// or use real values.
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn type_arena_push_is_append_only() {
|
|
||||||
let mut arena = TypeArena::new();
|
|
||||||
let name = NameId(0);
|
|
||||||
let t1 = arena.intern_type(TypeKind::Primitive { name });
|
|
||||||
let t2 = arena.intern_type(TypeKind::Optional { inner: t1 });
|
|
||||||
|
|
||||||
assert_eq!(t1.0, 0);
|
|
||||||
assert_eq!(t2.0, 1);
|
|
||||||
assert_eq!(arena.types.len(), 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn type_arena_index_is_stable() {
|
|
||||||
let mut arena = TypeArena::new();
|
|
||||||
let name = NameId(0);
|
|
||||||
let t1 = arena.intern_type(TypeKind::Primitive { name });
|
|
||||||
let t2 = arena.intern_type(TypeKind::Optional { inner: t1 });
|
|
||||||
|
|
||||||
assert!(matches!(arena.kind(t1), TypeKind::Primitive { .. }));
|
|
||||||
assert!(matches!(arena.kind(t2), TypeKind::Optional { .. }));
|
|
||||||
|
|
||||||
// Adding more types should not change the previous ones
|
|
||||||
let t3 = arena.intern_type(TypeKind::Array { inner: t1, len: None });
|
|
||||||
assert!(matches!(arena.kind(t1), TypeKind::Primitive { .. }));
|
|
||||||
assert!(matches!(arena.kind(t2), TypeKind::Optional { .. }));
|
|
||||||
assert!(matches!(arena.kind(t3), TypeKind::Array { .. }));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn type_facts_auto_grows_for_node_ids() {
|
|
||||||
let mut facts = TypeFacts::new();
|
|
||||||
let nid = NodeId(10);
|
|
||||||
let tid = TypeId(1);
|
|
||||||
|
|
||||||
assert_eq!(facts.get_node_type(nid), None);
|
|
||||||
facts.set_node_type(nid, tid);
|
|
||||||
assert_eq!(facts.get_node_type(nid), Some(tid));
|
|
||||||
assert!(facts.node_type.len() > 10);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn format_type_optional() {
|
|
||||||
let mut arena = TypeArena::new();
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
let int_name = interner.intern("int");
|
|
||||||
let int_t = arena.intern_type(TypeKind::Primitive { name: int_name });
|
|
||||||
let opt_t = arena.intern_type(TypeKind::Optional { inner: int_t });
|
|
||||||
|
|
||||||
let formatted = format_type(opt_t, &arena, &interner, None);
|
|
||||||
assert_eq!(formatted, "optional<int>");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn format_type_result() {
|
|
||||||
let mut arena = TypeArena::new();
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
let int_name = interner.intern("int");
|
|
||||||
let string_name = interner.intern("string");
|
|
||||||
let int_t = arena.intern_type(TypeKind::Primitive { name: int_name });
|
|
||||||
let string_t = arena.intern_type(TypeKind::Primitive { name: string_name });
|
|
||||||
let res_t = arena.intern_type(TypeKind::Result { ok: int_t, err: string_t });
|
|
||||||
|
|
||||||
let formatted = format_type(res_t, &arena, &interner, None);
|
|
||||||
assert_eq!(formatted, "result<int, string>");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn format_type_array_len() {
|
|
||||||
let mut arena = TypeArena::new();
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
let bool_name = interner.intern("bool");
|
|
||||||
let bool_t = arena.intern_type(TypeKind::Primitive { name: bool_name });
|
|
||||||
|
|
||||||
let arr_dynamic = arena.intern_type(TypeKind::Array { inner: bool_t, len: None });
|
|
||||||
let arr_fixed = arena.intern_type(TypeKind::Array { inner: bool_t, len: Some(10) });
|
|
||||||
|
|
||||||
assert_eq!(format_type(arr_dynamic, &arena, &interner, None), "array<bool>");
|
|
||||||
assert_eq!(format_type(arr_fixed, &arena, &interner, None), "array<bool>[10]");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn format_type_struct_with_arena() {
|
|
||||||
use crate::analysis::symbols::{Symbol, SymbolKind};
|
|
||||||
use crate::common::spans::Span;
|
|
||||||
|
|
||||||
let mut arena = TypeArena::new();
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
let mut symbols = SymbolArena::new();
|
|
||||||
|
|
||||||
let my_struct_name = interner.intern("MyStruct");
|
|
||||||
let sym_id = symbols.insert(Symbol {
|
|
||||||
name: my_struct_name,
|
|
||||||
kind: SymbolKind::Struct,
|
|
||||||
exported: false,
|
|
||||||
module: ModuleId(0),
|
|
||||||
decl_span: Span::new(FileId(0), 0, 0),
|
|
||||||
});
|
|
||||||
|
|
||||||
let struct_t = arena.intern_type(TypeKind::Struct { sym: sym_id });
|
|
||||||
|
|
||||||
let formatted = format_type(struct_t, &arena, &interner, Some(&symbols));
|
|
||||||
assert_eq!(formatted, "MyStruct");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn format_type_struct_fallback() {
|
|
||||||
let mut arena = TypeArena::new();
|
|
||||||
let interner = NameInterner::new();
|
|
||||||
let sym_id = SymbolId(42);
|
|
||||||
|
|
||||||
let struct_t = arena.intern_type(TypeKind::Struct { sym: sym_id });
|
|
||||||
|
|
||||||
let formatted = format_type(struct_t, &arena, &interner, None);
|
|
||||||
assert_eq!(formatted, "struct#42");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,158 +0,0 @@
|
|||||||
use crate::common::symbols::{
|
|
||||||
AnalysisFileTableEntry, AnalysisFileV0, AnalysisModuleEntry, AnalysisNameEntry, AnalysisSymbolEntry,
|
|
||||||
DebugSymbol, SymbolsFile,
|
|
||||||
};
|
|
||||||
use anyhow::{Context, Result};
|
|
||||||
use prometeu_bytecode::disasm::disasm;
|
|
||||||
use prometeu_bytecode::BytecodeLoader;
|
|
||||||
use std::fs;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
pub struct Artifacts {
|
|
||||||
pub rom: Vec<u8>,
|
|
||||||
pub debug_symbols: Vec<DebugSymbol>,
|
|
||||||
pub lsp_symbols: SymbolsFile,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Artifacts {
|
|
||||||
pub fn new(rom: Vec<u8>, debug_symbols: Vec<DebugSymbol>, lsp_symbols: SymbolsFile) -> Self {
|
|
||||||
Self { rom, debug_symbols, lsp_symbols }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn export(&self, out: &Path, emit_disasm: bool, emit_symbols: bool) -> Result<()> {
|
|
||||||
// 1. Save the main binary
|
|
||||||
fs::write(out, &self.rom).with_context(|| format!("Failed to write PBC to {:?}", out))?;
|
|
||||||
|
|
||||||
// 2. Export symbols for LSP
|
|
||||||
if emit_symbols {
|
|
||||||
let symbols_path = out.with_file_name("symbols.json");
|
|
||||||
let symbols_json = serde_json::to_string_pretty(&self.lsp_symbols)?;
|
|
||||||
fs::write(&symbols_path, symbols_json)?;
|
|
||||||
|
|
||||||
// Also export analysis.json v0
|
|
||||||
let analysis = build_analysis_v0(&self.lsp_symbols);
|
|
||||||
let analysis_path = out.with_file_name("analysis.json");
|
|
||||||
let analysis_json = serde_json::to_string_pretty(&analysis)?;
|
|
||||||
fs::write(&analysis_path, analysis_json)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Export human-readable disassembly for developer inspection
|
|
||||||
if emit_disasm {
|
|
||||||
let disasm_path = out.with_extension("disasm.txt");
|
|
||||||
|
|
||||||
// Extract the actual bytecode (stripping the industrial PBS\0 header)
|
|
||||||
let rom_to_disasm = if let Ok(module) = BytecodeLoader::load(&self.rom) {
|
|
||||||
module.code
|
|
||||||
} else {
|
|
||||||
self.rom.clone()
|
|
||||||
};
|
|
||||||
|
|
||||||
let instructions = disasm(&rom_to_disasm).map_err(|e| anyhow::anyhow!("Disassembly failed: {}", e))?;
|
|
||||||
|
|
||||||
let mut disasm_text = String::new();
|
|
||||||
for instr in instructions {
|
|
||||||
// Find a matching symbol to show which source line generated this instruction
|
|
||||||
let symbol = self.debug_symbols.iter().find(|s| s.pc == instr.pc);
|
|
||||||
let comment = if let Some(s) = symbol {
|
|
||||||
format!(" ; {}:{}", s.file, s.line)
|
|
||||||
} else {
|
|
||||||
"".to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
let operands_str = instr.operands.iter()
|
|
||||||
.map(|o| format!("{:?}", o))
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(" ");
|
|
||||||
|
|
||||||
disasm_text.push_str(&format!("{:08X} {:?} {}{}\n", instr.pc, instr.opcode, operands_str, comment));
|
|
||||||
}
|
|
||||||
fs::write(disasm_path, disasm_text)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_analysis_v0(symbols_file: &SymbolsFile) -> AnalysisFileV0 {
|
|
||||||
// Gather uniques with deterministic ordering
|
|
||||||
use std::collections::BTreeMap;
|
|
||||||
let mut file_ids: BTreeMap<String, u32> = BTreeMap::new();
|
|
||||||
let mut name_ids: BTreeMap<String, u32> = BTreeMap::new();
|
|
||||||
let mut module_ids: BTreeMap<String, u32> = BTreeMap::new();
|
|
||||||
|
|
||||||
// First pass: collect all unique entries
|
|
||||||
for project in &symbols_file.projects {
|
|
||||||
for s in &project.symbols {
|
|
||||||
file_ids.entry(s.decl_span.file_uri.clone()).or_insert(0);
|
|
||||||
name_ids.entry(s.name.clone()).or_insert(0);
|
|
||||||
module_ids.entry(s.module_path.clone()).or_insert(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assign incremental IDs in key order (deterministic)
|
|
||||||
let mut next = 0u32;
|
|
||||||
for v in file_ids.values_mut() { *v = next; next += 1; }
|
|
||||||
next = 0; for v in name_ids.values_mut() { *v = next; next += 1; }
|
|
||||||
next = 0; for v in module_ids.values_mut() { *v = next; next += 1; }
|
|
||||||
|
|
||||||
// Build tables
|
|
||||||
let mut file_table = Vec::with_capacity(file_ids.len());
|
|
||||||
for (uri, file_id) in &file_ids {
|
|
||||||
file_table.push(AnalysisFileTableEntry { file_id: *file_id, uri: uri.clone() });
|
|
||||||
}
|
|
||||||
let mut name_table = Vec::with_capacity(name_ids.len());
|
|
||||||
for (name, name_id) in &name_ids {
|
|
||||||
name_table.push(AnalysisNameEntry { name_id: *name_id, name: name.clone() });
|
|
||||||
}
|
|
||||||
let mut module_table = Vec::with_capacity(module_ids.len());
|
|
||||||
for (module_path, module_id) in &module_ids {
|
|
||||||
module_table.push(AnalysisModuleEntry { module_id: *module_id, module_path: module_path.clone() });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Second pass: assign symbol ids in a deterministic global order
|
|
||||||
let mut flat_symbols: Vec<(&str, &crate::common::symbols::Symbol)> = Vec::new();
|
|
||||||
for project in &symbols_file.projects {
|
|
||||||
for s in &project.symbols {
|
|
||||||
flat_symbols.push((project.project.as_str(), s));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Deterministic order: by file, start line/col, then module, name, kind
|
|
||||||
flat_symbols.sort_by(|a, b| {
|
|
||||||
let sa = a.1; let sb = b.1;
|
|
||||||
sa.decl_span.file_uri.cmp(&sb.decl_span.file_uri)
|
|
||||||
.then(sa.decl_span.start.line.cmp(&sb.decl_span.start.line))
|
|
||||||
.then(sa.decl_span.start.col.cmp(&sb.decl_span.start.col))
|
|
||||||
.then(sa.module_path.cmp(&sb.module_path))
|
|
||||||
.then(sa.name.cmp(&sb.name))
|
|
||||||
.then(sa.kind.cmp(&sb.kind))
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut symbols = Vec::with_capacity(flat_symbols.len());
|
|
||||||
for (i, (_proj, s)) in flat_symbols.iter().enumerate() {
|
|
||||||
let name_id = *name_ids.get(&s.name).unwrap();
|
|
||||||
let module_id = *module_ids.get(&s.module_path).unwrap();
|
|
||||||
let _file_id = *file_ids.get(&s.decl_span.file_uri).unwrap();
|
|
||||||
symbols.push(AnalysisSymbolEntry {
|
|
||||||
symbol_id: i as u32,
|
|
||||||
name_id,
|
|
||||||
kind: s.kind.clone(),
|
|
||||||
exported: s.exported,
|
|
||||||
module_id,
|
|
||||||
decl_span: s.decl_span.clone(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
AnalysisFileV0 {
|
|
||||||
schema_version: 0,
|
|
||||||
compiler_version: symbols_file.compiler_version.clone(),
|
|
||||||
root_project: symbols_file.root_project.clone(),
|
|
||||||
file_table,
|
|
||||||
name_table,
|
|
||||||
module_table,
|
|
||||||
symbols,
|
|
||||||
refs: Vec::new(),
|
|
||||||
types: Vec::new(),
|
|
||||||
facts: Default::default(),
|
|
||||||
diagnostics: Vec::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,542 +0,0 @@
|
|||||||
//! # Bytecode Emitter
|
|
||||||
//!
|
|
||||||
//! This module is responsible for the final stage of the compilation process:
|
|
||||||
//! converting the Intermediate Representation (IR) into the binary Prometeu ByteCode (PBC) format.
|
|
||||||
//!
|
|
||||||
//! It performs two main tasks:
|
|
||||||
//! 1. **Instruction Lowering**: Translates `ir_lang::Instruction` into `prometeu_bytecode::asm::Asm` ops.
|
|
||||||
//! 2. **DebugSymbol Mapping**: Associates bytecode offsets (Program Counter) with source code locations.
|
|
||||||
|
|
||||||
use crate::ir_core::ConstantValue;
|
|
||||||
use crate::ir_lang;
|
|
||||||
use crate::ir_lang::instr::InstrKind;
|
|
||||||
use anyhow::{anyhow, Result};
|
|
||||||
use prometeu_bytecode::abi::SourceSpan;
|
|
||||||
use prometeu_bytecode::asm::{update_pc_by_operand, Asm, Operand};
|
|
||||||
use prometeu_bytecode::opcode::OpCode;
|
|
||||||
use prometeu_bytecode::{BytecodeModule, ConstantPoolEntry, DebugInfo, FunctionMeta};
|
|
||||||
|
|
||||||
/// The final output of the code generation phase.
|
|
||||||
pub struct EmitResult {
|
|
||||||
/// The serialized binary data of the PBC file.
|
|
||||||
pub rom: Vec<u8>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct EmitFragments {
|
|
||||||
pub const_pool: Vec<ConstantPoolEntry>,
|
|
||||||
pub functions: Vec<FunctionMeta>,
|
|
||||||
pub code: Vec<u8>,
|
|
||||||
pub debug_info: Option<DebugInfo>,
|
|
||||||
pub unresolved_labels: std::collections::HashMap<String, Vec<u32>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Entry point for emitting a bytecode module from the IR.
|
|
||||||
pub fn emit_module(module: &ir_lang::Module) -> Result<EmitResult> {
|
|
||||||
let fragments = emit_fragments(module)?;
|
|
||||||
|
|
||||||
let exports: Vec<_> = module.functions.iter().enumerate().map(|(i, f)| {
|
|
||||||
prometeu_bytecode::Export {
|
|
||||||
symbol: f.name.clone(),
|
|
||||||
func_idx: i as u32,
|
|
||||||
}
|
|
||||||
}).collect();
|
|
||||||
|
|
||||||
let bytecode_module = BytecodeModule {
|
|
||||||
version: 0,
|
|
||||||
const_pool: fragments.const_pool,
|
|
||||||
functions: fragments.functions,
|
|
||||||
code: fragments.code,
|
|
||||||
debug_info: fragments.debug_info,
|
|
||||||
exports,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(EmitResult {
|
|
||||||
rom: bytecode_module.serialize(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn emit_fragments(module: &ir_lang::Module) -> Result<EmitFragments> {
|
|
||||||
let mut emitter = BytecodeEmitter::new();
|
|
||||||
|
|
||||||
let mut asm_instrs = Vec::new();
|
|
||||||
let mut ir_instr_map = Vec::new();
|
|
||||||
let function_ranges = emitter.lower_instrs(module, &mut asm_instrs, &mut ir_instr_map)?;
|
|
||||||
|
|
||||||
let pcs = BytecodeEmitter::calculate_pcs(&asm_instrs);
|
|
||||||
let assemble_res = prometeu_bytecode::asm::assemble_with_unresolved(&asm_instrs).map_err(|e| anyhow!(e))?;
|
|
||||||
let bytecode = assemble_res.code;
|
|
||||||
|
|
||||||
let mut functions = Vec::new();
|
|
||||||
let mut function_names = Vec::new();
|
|
||||||
for (i, function) in module.functions.iter().enumerate() {
|
|
||||||
let (start_idx, last_op_idx) = function_ranges[i];
|
|
||||||
let start_pc = pcs[start_idx];
|
|
||||||
// `last_op_idx` aponta para o último Asm::Op pertencente à função. O PC de término canônico
|
|
||||||
// é o PC da próxima entrada em `pcs` (exclusivo). Labels subsequentes não alteram o PC.
|
|
||||||
let end_pc = if (last_op_idx + 1) < pcs.len() { pcs[last_op_idx + 1] } else { bytecode.len() as u32 };
|
|
||||||
|
|
||||||
// Nome enriquecido para tooling/analysis: "name@offset+len"
|
|
||||||
let enriched_name = format!("{}@{}+{}", function.name, start_pc, end_pc - start_pc);
|
|
||||||
|
|
||||||
functions.push(FunctionMeta {
|
|
||||||
code_offset: start_pc,
|
|
||||||
code_len: end_pc - start_pc,
|
|
||||||
param_slots: function.param_slots,
|
|
||||||
local_slots: function.local_slots,
|
|
||||||
return_slots: function.return_slots,
|
|
||||||
max_stack_slots: 0, // Will be filled by verifier
|
|
||||||
});
|
|
||||||
function_names.push((i as u32, enriched_name));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut pc_to_span = Vec::new();
|
|
||||||
for (i, instr_opt) in ir_instr_map.iter().enumerate() {
|
|
||||||
let current_pc = pcs[i];
|
|
||||||
if let Some(instr) = instr_opt {
|
|
||||||
if let Some(span) = &instr.span {
|
|
||||||
pc_to_span.push((current_pc, SourceSpan {
|
|
||||||
file_id: span.file.as_u32(),
|
|
||||||
start: span.start,
|
|
||||||
end: span.end,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pc_to_span.sort_by_key(|(pc, _)| *pc);
|
|
||||||
pc_to_span.dedup_by_key(|(pc, _)| *pc);
|
|
||||||
|
|
||||||
Ok(EmitFragments {
|
|
||||||
const_pool: emitter.constant_pool,
|
|
||||||
functions,
|
|
||||||
code: bytecode,
|
|
||||||
debug_info: Some(DebugInfo {
|
|
||||||
pc_to_span,
|
|
||||||
function_names,
|
|
||||||
}),
|
|
||||||
unresolved_labels: assemble_res.unresolved_labels,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Internal helper for managing the bytecode emission state.
|
|
||||||
struct BytecodeEmitter {
|
|
||||||
/// Stores constant values (like strings) that are referenced by instructions.
|
|
||||||
constant_pool: Vec<ConstantPoolEntry>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BytecodeEmitter {
|
|
||||||
fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
// Index 0 is traditionally reserved for Null in many VMs
|
|
||||||
constant_pool: vec![ConstantPoolEntry::Null],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Adds a value to the constant pool if it doesn't exist, returning its unique index.
|
|
||||||
fn add_constant(&mut self, entry: ConstantPoolEntry) -> u32 {
|
|
||||||
if let Some(pos) = self.constant_pool.iter().position(|e| e == &entry) {
|
|
||||||
pos as u32
|
|
||||||
} else {
|
|
||||||
let id = self.constant_pool.len() as u32;
|
|
||||||
self.constant_pool.push(entry);
|
|
||||||
id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_ir_constant(&mut self, val: &ConstantValue) -> u32 {
|
|
||||||
let entry = match val {
|
|
||||||
ConstantValue::Int(v) => ConstantPoolEntry::Int64(*v),
|
|
||||||
ConstantValue::Float(v) => ConstantPoolEntry::Float64(*v),
|
|
||||||
ConstantValue::String(s) => ConstantPoolEntry::String(s.clone()),
|
|
||||||
};
|
|
||||||
self.add_constant(entry)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_instrs<'b>(
|
|
||||||
&mut self,
|
|
||||||
module: &'b ir_lang::Module,
|
|
||||||
asm_instrs: &mut Vec<Asm>,
|
|
||||||
ir_instr_map: &mut Vec<Option<&'b ir_lang::Instruction>>
|
|
||||||
) -> Result<Vec<(usize, usize)>> {
|
|
||||||
// Cache to map VM IR const ids to emitted constant pool ids
|
|
||||||
let mut const_id_map: std::collections::HashMap<ir_lang::types::ConstId, u32> = std::collections::HashMap::new();
|
|
||||||
// Build a mapping from VM function id -> index into module.functions
|
|
||||||
let mut id_to_index = std::collections::HashMap::new();
|
|
||||||
let mut func_names = std::collections::HashMap::new();
|
|
||||||
for (idx, func) in module.functions.iter().enumerate() {
|
|
||||||
id_to_index.insert(func.id, idx as u32);
|
|
||||||
func_names.insert(func.id, func.name.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut ranges = Vec::new();
|
|
||||||
|
|
||||||
for function in &module.functions {
|
|
||||||
let start_idx = asm_instrs.len();
|
|
||||||
// Each function starts with a label for its entry point.
|
|
||||||
asm_instrs.push(Asm::Label(function.name.clone()));
|
|
||||||
ir_instr_map.push(None);
|
|
||||||
// Track an approximate stack height for this function
|
|
||||||
let mut stack_height: i32 = 0;
|
|
||||||
// Nome canônico para o label de término desta função
|
|
||||||
let end_label = format!("{}::__end", function.name);
|
|
||||||
// Track last opcode index for this function (to exclude trailing padding/labels)
|
|
||||||
let mut last_op_idx_in_func: Option<usize> = None;
|
|
||||||
|
|
||||||
for instr in &function.body {
|
|
||||||
let op_start_idx = asm_instrs.len();
|
|
||||||
|
|
||||||
// Translate each IR instruction to its equivalent Bytecode OpCode.
|
|
||||||
match &instr.kind {
|
|
||||||
InstrKind::Nop => asm_instrs.push(Asm::Op(OpCode::Nop, vec![])),
|
|
||||||
InstrKind::Halt => asm_instrs.push(Asm::Op(OpCode::Halt, vec![])),
|
|
||||||
InstrKind::PushConst(id) => {
|
|
||||||
// Map VM const id to emitted const pool id on-demand
|
|
||||||
let mapped_id = if let Some(mid) = const_id_map.get(id) {
|
|
||||||
*mid
|
|
||||||
} else {
|
|
||||||
let idx = id.0 as usize;
|
|
||||||
let val = module
|
|
||||||
.const_pool
|
|
||||||
.constants
|
|
||||||
.get(idx)
|
|
||||||
.ok_or_else(|| anyhow!("Invalid const id {} (pool len {})", id.0, module.const_pool.constants.len()))?;
|
|
||||||
let mid = self.add_ir_constant(val);
|
|
||||||
const_id_map.insert(*id, mid);
|
|
||||||
mid
|
|
||||||
};
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::PushConst, vec![Operand::U32(mapped_id)]));
|
|
||||||
stack_height += 1;
|
|
||||||
}
|
|
||||||
InstrKind::PushBounded(val) => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::PushBounded, vec![Operand::U32(*val)]));
|
|
||||||
stack_height += 1;
|
|
||||||
}
|
|
||||||
InstrKind::PushBool(v) => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::PushBool, vec![Operand::Bool(*v)]));
|
|
||||||
stack_height += 1;
|
|
||||||
}
|
|
||||||
InstrKind::PushNull => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::PushConst, vec![Operand::U32(0)]));
|
|
||||||
stack_height += 1;
|
|
||||||
}
|
|
||||||
InstrKind::Pop => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::Pop, vec![]));
|
|
||||||
stack_height = (stack_height - 1).max(0);
|
|
||||||
}
|
|
||||||
InstrKind::Dup => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::Dup, vec![]));
|
|
||||||
stack_height += 1;
|
|
||||||
}
|
|
||||||
InstrKind::Swap => asm_instrs.push(Asm::Op(OpCode::Swap, vec![])),
|
|
||||||
InstrKind::Add => { asm_instrs.push(Asm::Op(OpCode::Add, vec![])); stack_height = (stack_height - 1).max(0); }
|
|
||||||
InstrKind::Sub => { asm_instrs.push(Asm::Op(OpCode::Sub, vec![])); stack_height = (stack_height - 1).max(0); }
|
|
||||||
InstrKind::Mul => { asm_instrs.push(Asm::Op(OpCode::Mul, vec![])); stack_height = (stack_height - 1).max(0); }
|
|
||||||
InstrKind::Div => { asm_instrs.push(Asm::Op(OpCode::Div, vec![])); stack_height = (stack_height - 1).max(0); }
|
|
||||||
InstrKind::Neg => { asm_instrs.push(Asm::Op(OpCode::Neg, vec![])); /* unary */ }
|
|
||||||
InstrKind::Eq => { asm_instrs.push(Asm::Op(OpCode::Eq, vec![])); stack_height = (stack_height - 1).max(0); }
|
|
||||||
InstrKind::Neq => { asm_instrs.push(Asm::Op(OpCode::Neq, vec![])); stack_height = (stack_height - 1).max(0); }
|
|
||||||
InstrKind::Lt => { asm_instrs.push(Asm::Op(OpCode::Lt, vec![])); stack_height = (stack_height - 1).max(0); }
|
|
||||||
InstrKind::Gt => { asm_instrs.push(Asm::Op(OpCode::Gt, vec![])); stack_height = (stack_height - 1).max(0); }
|
|
||||||
InstrKind::Lte => { asm_instrs.push(Asm::Op(OpCode::Lte, vec![])); stack_height = (stack_height - 1).max(0); }
|
|
||||||
InstrKind::Gte => { asm_instrs.push(Asm::Op(OpCode::Gte, vec![])); stack_height = (stack_height - 1).max(0); }
|
|
||||||
InstrKind::And => asm_instrs.push(Asm::Op(OpCode::And, vec![])),
|
|
||||||
InstrKind::Or => asm_instrs.push(Asm::Op(OpCode::Or, vec![])),
|
|
||||||
InstrKind::Not => asm_instrs.push(Asm::Op(OpCode::Not, vec![])),
|
|
||||||
InstrKind::BitAnd => asm_instrs.push(Asm::Op(OpCode::BitAnd, vec![])),
|
|
||||||
InstrKind::BitOr => asm_instrs.push(Asm::Op(OpCode::BitOr, vec![])),
|
|
||||||
InstrKind::BitXor => asm_instrs.push(Asm::Op(OpCode::BitXor, vec![])),
|
|
||||||
InstrKind::Shl => asm_instrs.push(Asm::Op(OpCode::Shl, vec![])),
|
|
||||||
InstrKind::Shr => asm_instrs.push(Asm::Op(OpCode::Shr, vec![])),
|
|
||||||
InstrKind::LocalLoad { slot } => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::GetLocal, vec![Operand::U32(*slot)]));
|
|
||||||
stack_height += 1;
|
|
||||||
}
|
|
||||||
InstrKind::LocalStore { slot } => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::SetLocal, vec![Operand::U32(*slot)]));
|
|
||||||
stack_height = (stack_height - 1).max(0);
|
|
||||||
}
|
|
||||||
InstrKind::GetGlobal(slot) => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::GetGlobal, vec![Operand::U32(*slot)]));
|
|
||||||
stack_height += 1;
|
|
||||||
}
|
|
||||||
InstrKind::SetGlobal(slot) => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::SetGlobal, vec![Operand::U32(*slot)]));
|
|
||||||
stack_height = (stack_height - 1).max(0);
|
|
||||||
}
|
|
||||||
InstrKind::Jmp(label) => {
|
|
||||||
let target = if label.0 == "end" { end_label.clone() } else { label.0.clone() };
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::Jmp, vec![Operand::RelLabel(target, function.name.clone())]));
|
|
||||||
}
|
|
||||||
InstrKind::JmpIfFalse(label) => {
|
|
||||||
let target = if label.0 == "end" { end_label.clone() } else { label.0.clone() };
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::JmpIfFalse, vec![Operand::RelLabel(target, function.name.clone())]));
|
|
||||||
// VM consumes the condition for JmpIfFalse
|
|
||||||
stack_height = (stack_height - 1).max(0);
|
|
||||||
}
|
|
||||||
InstrKind::Label(label) => {
|
|
||||||
asm_instrs.push(Asm::Label(label.0.clone()));
|
|
||||||
// Each labeled block in VM code is a fresh basic block.
|
|
||||||
// Our IR lowering (core_to_vm) assumes empty evaluation stack at
|
|
||||||
// block boundaries. Ensure the emitter's internal accounting
|
|
||||||
// matches that assumption to avoid inserting balancing Pops
|
|
||||||
// carried over from previous fallthrough paths.
|
|
||||||
stack_height = 0;
|
|
||||||
}
|
|
||||||
InstrKind::Call { func_id, arg_count } => {
|
|
||||||
// Translate call by function index within this module
|
|
||||||
if let Some(idx) = id_to_index.get(func_id) {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::Call, vec![Operand::U32(*idx)]));
|
|
||||||
stack_height = (stack_height - (*arg_count as i32)).max(0);
|
|
||||||
} else {
|
|
||||||
// As a fallback, if we can resolve by name (cross-module import label)
|
|
||||||
let name = func_names.get(func_id).ok_or_else(|| anyhow!("Undefined function ID: {:?}", func_id))?;
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::Call, vec![Operand::Label(name.clone())]));
|
|
||||||
stack_height = (stack_height - (*arg_count as i32)).max(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
InstrKind::ImportCall { dep_alias, module_path, owner, base_name, sig, arg_count } => {
|
|
||||||
let display_name = if let Some(o) = owner {
|
|
||||||
format!("{}.{}", o, base_name)
|
|
||||||
} else {
|
|
||||||
base_name.clone()
|
|
||||||
};
|
|
||||||
let label = format!("@{}::{}:{}#sig{}", dep_alias, module_path, display_name, sig.0);
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::Call, vec![Operand::Label(label)]));
|
|
||||||
stack_height = (stack_height - (*arg_count as i32)).max(0);
|
|
||||||
}
|
|
||||||
InstrKind::Ret => {
|
|
||||||
// Do not emit balancing Pops here. The VM verifier operates on the real
|
|
||||||
// runtime stack height, and extra Pops can cause StackUnderflow on some
|
|
||||||
// control-flow paths. Ret should appear when the stack is already in the
|
|
||||||
// correct state.
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::Ret, vec![]));
|
|
||||||
}
|
|
||||||
InstrKind::Syscall(id) => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::Syscall, vec![Operand::U32(*id)]));
|
|
||||||
}
|
|
||||||
InstrKind::FrameSync => asm_instrs.push(Asm::Op(OpCode::FrameSync, vec![])),
|
|
||||||
InstrKind::Alloc { type_id, slots } => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::Alloc, vec![Operand::U32(type_id.0), Operand::U32(*slots)]));
|
|
||||||
stack_height += *slots as i32;
|
|
||||||
}
|
|
||||||
InstrKind::GateLoad { offset } => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::GateLoad, vec![Operand::U32(*offset)]));
|
|
||||||
stack_height += 1;
|
|
||||||
}
|
|
||||||
InstrKind::GateStore { offset } => {
|
|
||||||
asm_instrs.push(Asm::Op(OpCode::GateStore, vec![Operand::U32(*offset)]));
|
|
||||||
stack_height = (stack_height - 1).max(0);
|
|
||||||
}
|
|
||||||
InstrKind::GateBeginPeek => asm_instrs.push(Asm::Op(OpCode::GateBeginPeek, vec![])),
|
|
||||||
InstrKind::GateEndPeek => asm_instrs.push(Asm::Op(OpCode::GateEndPeek, vec![])),
|
|
||||||
InstrKind::GateBeginBorrow => asm_instrs.push(Asm::Op(OpCode::GateBeginBorrow, vec![])),
|
|
||||||
InstrKind::GateEndBorrow => asm_instrs.push(Asm::Op(OpCode::GateEndBorrow, vec![])),
|
|
||||||
InstrKind::GateBeginMutate => asm_instrs.push(Asm::Op(OpCode::GateBeginMutate, vec![])),
|
|
||||||
InstrKind::GateEndMutate => asm_instrs.push(Asm::Op(OpCode::GateEndMutate, vec![])),
|
|
||||||
InstrKind::GateRetain => asm_instrs.push(Asm::Op(OpCode::GateRetain, vec![])),
|
|
||||||
InstrKind::GateRelease => asm_instrs.push(Asm::Op(OpCode::GateRelease, vec![])),
|
|
||||||
}
|
|
||||||
|
|
||||||
let op_end_idx = asm_instrs.len();
|
|
||||||
// If we just pushed an Op, record its index as last_op_idx_in_func
|
|
||||||
if op_end_idx > 0 {
|
|
||||||
if let Asm::Op(_, _) = &asm_instrs[op_end_idx - 1] {
|
|
||||||
last_op_idx_in_func = Some(op_end_idx - 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _ in op_start_idx..op_end_idx {
|
|
||||||
ir_instr_map.push(Some(instr));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Emite label canônico de término no fim real do corpo
|
|
||||||
asm_instrs.push(Asm::Label(end_label));
|
|
||||||
ir_instr_map.push(None);
|
|
||||||
// Determine last op index; if function had no ops, fallback to the padding NOP we just injected
|
|
||||||
let last_op_idx = last_op_idx_in_func.unwrap_or(start_idx);
|
|
||||||
ranges.push((start_idx, last_op_idx));
|
|
||||||
}
|
|
||||||
Ok(ranges)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn calculate_pcs(asm_instrs: &[Asm]) -> Vec<u32> {
|
|
||||||
let mut pcs = Vec::with_capacity(asm_instrs.len());
|
|
||||||
let mut current_pc = 0u32;
|
|
||||||
for instr in asm_instrs {
|
|
||||||
pcs.push(current_pc);
|
|
||||||
match instr {
|
|
||||||
Asm::Label(_) => {}
|
|
||||||
Asm::Op(_opcode, operands) => {
|
|
||||||
current_pc += 2;
|
|
||||||
current_pc = update_pc_by_operand(current_pc, operands);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pcs
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::ir_core::const_pool::ConstantValue;
|
|
||||||
use crate::ir_core::ids::FunctionId;
|
|
||||||
use crate::ir_lang::instr::{InstrKind, Instruction};
|
|
||||||
use crate::ir_lang::module::{Function, Module};
|
|
||||||
use crate::ir_lang::types::Type;
|
|
||||||
use prometeu_bytecode::{BytecodeLoader, ConstantPoolEntry};
|
|
||||||
use prometeu_bytecode::disasm::disasm;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_emit_module_with_const_pool() {
|
|
||||||
let mut module = Module::new("test".to_string());
|
|
||||||
|
|
||||||
let id_int = module.const_pool.insert(ConstantValue::Int(12345));
|
|
||||||
let id_str = module.const_pool.insert(ConstantValue::String("hello".to_string()));
|
|
||||||
|
|
||||||
let function = Function {
|
|
||||||
id: FunctionId(0),
|
|
||||||
name: "main".to_string(),
|
|
||||||
sig: crate::ir_core::SigId(0),
|
|
||||||
params: vec![],
|
|
||||||
return_type: Type::Void,
|
|
||||||
body: vec![
|
|
||||||
Instruction::new(InstrKind::PushConst(ir_lang::ConstId(id_int.0)), None),
|
|
||||||
Instruction::new(InstrKind::PushConst(ir_lang::ConstId(id_str.0)), None),
|
|
||||||
Instruction::new(InstrKind::Ret, None),
|
|
||||||
],
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
module.functions.push(function);
|
|
||||||
|
|
||||||
let result = emit_module(&module).expect("Failed to emit module");
|
|
||||||
|
|
||||||
let pbc = BytecodeLoader::load(&result.rom).expect("Failed to parse emitted PBC");
|
|
||||||
|
|
||||||
assert_eq!(pbc.const_pool.len(), 3);
|
|
||||||
assert_eq!(pbc.const_pool[0], ConstantPoolEntry::Null);
|
|
||||||
assert_eq!(pbc.const_pool[1], ConstantPoolEntry::Int64(12345));
|
|
||||||
assert_eq!(pbc.const_pool[2], ConstantPoolEntry::String("hello".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
// #[test]
|
|
||||||
// fn test_stack_is_reset_on_label_boundaries() {
|
|
||||||
// // This builds a function with control flow that would leave a value
|
|
||||||
// // on the stack before a jump. The target block starts with a Label
|
|
||||||
// // and immediately returns. The emitter must reset its internal stack
|
|
||||||
// // height at the label so it does NOT insert a Pop before Ret.
|
|
||||||
//
|
|
||||||
// let mut module = Module::new("label_stack_reset".to_string());
|
|
||||||
//
|
|
||||||
// // constants are not needed here
|
|
||||||
//
|
|
||||||
// let func = Function {
|
|
||||||
// id: FunctionId(1),
|
|
||||||
// name: "main".to_string(),
|
|
||||||
// sig: crate::ir_core::SigId(0),
|
|
||||||
// params: vec![],
|
|
||||||
// return_type: Type::Void,
|
|
||||||
// body: vec![
|
|
||||||
// // entry block (block_0)
|
|
||||||
// Instruction::new(InstrKind::PushConst(ir_lang::ConstId(module.const_pool.insert(ConstantValue::Int(1)).0)), None),
|
|
||||||
// // jump to else, leaving one value on the emitter's stack accounting
|
|
||||||
// Instruction::new(InstrKind::Jmp(ir_lang::Label("else".to_string())), None),
|
|
||||||
// // then block (unreachable, but included for shape)
|
|
||||||
// Instruction::new(InstrKind::Label(ir_lang::Label("then".to_string())), None),
|
|
||||||
// Instruction::new(InstrKind::Ret, None),
|
|
||||||
// // else block: must not start with an extra Pop
|
|
||||||
// Instruction::new(InstrKind::Label(ir_lang::Label("else".to_string())), None),
|
|
||||||
// Instruction::new(InstrKind::Ret, None),
|
|
||||||
// ],
|
|
||||||
// param_slots: 0,
|
|
||||||
// local_slots: 0,
|
|
||||||
// return_slots: 0,
|
|
||||||
// };
|
|
||||||
//
|
|
||||||
// module.functions.push(func);
|
|
||||||
//
|
|
||||||
// let result = emit_module(&module).expect("emit failed");
|
|
||||||
// let pbc = BytecodeLoader::load(&result.rom).expect("pbc load failed");
|
|
||||||
// let instrs = disasm(&pbc.code).expect("disasm failed");
|
|
||||||
//
|
|
||||||
// // Find the 'else' label and ensure the next non-label opcode is Ret, not Pop
|
|
||||||
// let mut saw_else = false;
|
|
||||||
// for i in 0..instrs.len() - 1 {
|
|
||||||
// if let prometeu_bytecode::disasm::DisasmOp { opcode, .. } = &instrs[i] {
|
|
||||||
// if format!("{:?}", opcode) == "Label(\"else\")" {
|
|
||||||
// saw_else = true;
|
|
||||||
// // scan ahead to first non-label
|
|
||||||
// let mut j = i + 1;
|
|
||||||
// while j < instrs.len() && format!("{:?}", instrs[j].opcode).starts_with("Label(") {
|
|
||||||
// j += 1;
|
|
||||||
// }
|
|
||||||
// assert!(j < instrs.len(), "No instruction after else label");
|
|
||||||
// let next = format!("{:?}", instrs[j].opcode);
|
|
||||||
// assert_ne!(next.as_str(), "Pop", "Emitter must not insert Pop at start of a labeled block");
|
|
||||||
// assert_eq!(next.as_str(), "Ret", "Expected Ret directly after label when function is void");
|
|
||||||
// break;
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// assert!(saw_else, "Expected to find else label in emitted bytecode");
|
|
||||||
// }
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_jmp_if_false_does_not_emit_pop() {
|
|
||||||
// Build a tiny VM IR module with a conditional jump.
|
|
||||||
// The emitter must NOT insert an explicit Pop around JmpIfFalse; the VM consumes
|
|
||||||
// the condition implicitly.
|
|
||||||
|
|
||||||
let mut module = Module::new("test_jif".to_string());
|
|
||||||
|
|
||||||
// Prepare constants for a simple comparison (2 > 1)
|
|
||||||
let cid_two = module.const_pool.insert(ConstantValue::Int(2));
|
|
||||||
let cid_one = module.const_pool.insert(ConstantValue::Int(1));
|
|
||||||
|
|
||||||
let func = Function {
|
|
||||||
id: FunctionId(1),
|
|
||||||
name: "main".to_string(),
|
|
||||||
sig: crate::ir_core::SigId(0),
|
|
||||||
params: vec![],
|
|
||||||
return_type: Type::Void,
|
|
||||||
body: vec![
|
|
||||||
// cond: 2 > 1
|
|
||||||
Instruction::new(InstrKind::PushConst(ir_lang::ConstId(cid_two.0)), None),
|
|
||||||
Instruction::new(InstrKind::PushConst(ir_lang::ConstId(cid_one.0)), None),
|
|
||||||
Instruction::new(InstrKind::Gt, None),
|
|
||||||
// if !cond -> else
|
|
||||||
Instruction::new(InstrKind::JmpIfFalse(ir_lang::Label("else".to_string())), None),
|
|
||||||
// then: jump to merge
|
|
||||||
Instruction::new(InstrKind::Jmp(ir_lang::Label("then".to_string())), None),
|
|
||||||
// else block
|
|
||||||
Instruction::new(InstrKind::Label(ir_lang::Label("else".to_string())), None),
|
|
||||||
Instruction::new(InstrKind::Ret, None),
|
|
||||||
// then block
|
|
||||||
Instruction::new(InstrKind::Label(ir_lang::Label("then".to_string())), None),
|
|
||||||
Instruction::new(InstrKind::Ret, None),
|
|
||||||
],
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
module.functions.push(func);
|
|
||||||
|
|
||||||
let result = emit_module(&module).expect("Failed to emit module");
|
|
||||||
let pbc = BytecodeLoader::load(&result.rom).expect("Failed to parse emitted PBC");
|
|
||||||
let instrs = disasm(&pbc.code).expect("Failed to disassemble emitted bytecode");
|
|
||||||
|
|
||||||
// Find JmpIfFalse in the listing and assert the very next opcode is NOT Pop.
|
|
||||||
let mut found_jif = false;
|
|
||||||
for i in 0..instrs.len().saturating_sub(1) {
|
|
||||||
if format!("{:?}", instrs[i].opcode) == "JmpIfFalse" {
|
|
||||||
found_jif = true;
|
|
||||||
let next_op = format!("{:?}", instrs[i + 1].opcode);
|
|
||||||
assert_ne!(next_op.as_str(), "Pop", "Emitter must not insert Pop after JmpIfFalse");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert!(found_jif, "Expected JmpIfFalse in emitted code but none was found");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
pub mod emit_bytecode;
|
|
||||||
pub mod artifacts;
|
|
||||||
|
|
||||||
pub use artifacts::Artifacts;
|
|
||||||
pub use emit_bytecode::EmitResult;
|
|
||||||
pub use emit_bytecode::{emit_fragments, emit_module, EmitFragments};
|
|
||||||
@ -1,725 +0,0 @@
|
|||||||
use crate::building::output::CompiledModule;
|
|
||||||
use crate::building::plan::BuildStep;
|
|
||||||
use prometeu_abi::{ProgramImage, Value};
|
|
||||||
use prometeu_analysis::ids::ProjectId;
|
|
||||||
use prometeu_bytecode::decoder::decode_next;
|
|
||||||
use prometeu_bytecode::layout;
|
|
||||||
use prometeu_bytecode::opcode::OpCode;
|
|
||||||
use prometeu_bytecode::{ConstantPoolEntry, DebugInfo};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
|
||||||
pub enum LinkError {
|
|
||||||
OutOfBounds(usize, usize),
|
|
||||||
UnresolvedSymbol(String),
|
|
||||||
DuplicateExport(String),
|
|
||||||
IncompatibleSymbolSignature(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for LinkError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
LinkError::OutOfBounds(pos, len) => write!(f, "Out of bounds: pos={} len={}", pos, len),
|
|
||||||
LinkError::UnresolvedSymbol(s) => write!(f, "Unresolved symbol: {}", s),
|
|
||||||
LinkError::DuplicateExport(s) => write!(f, "Duplicate export: {}", s),
|
|
||||||
LinkError::IncompatibleSymbolSignature(s) => write!(f, "Incompatible symbol signature: {}", s),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for LinkError {}
|
|
||||||
|
|
||||||
pub struct Linker;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
|
||||||
struct ConstantPoolBitKey(Vec<u8>);
|
|
||||||
|
|
||||||
impl ConstantPoolBitKey {
|
|
||||||
fn from_entry(entry: &ConstantPoolEntry) -> Self {
|
|
||||||
match entry {
|
|
||||||
ConstantPoolEntry::Null => Self(vec![0]),
|
|
||||||
ConstantPoolEntry::Int64(v) => {
|
|
||||||
let mut b = vec![1];
|
|
||||||
b.extend_from_slice(&v.to_le_bytes());
|
|
||||||
Self(b)
|
|
||||||
}
|
|
||||||
ConstantPoolEntry::Float64(v) => {
|
|
||||||
let mut b = vec![2];
|
|
||||||
b.extend_from_slice(&v.to_bits().to_le_bytes());
|
|
||||||
Self(b)
|
|
||||||
}
|
|
||||||
ConstantPoolEntry::Boolean(v) => {
|
|
||||||
Self(vec![3, if *v { 1 } else { 0 }])
|
|
||||||
}
|
|
||||||
ConstantPoolEntry::String(v) => {
|
|
||||||
let mut b = vec![4];
|
|
||||||
b.extend_from_slice(v.as_bytes());
|
|
||||||
Self(b)
|
|
||||||
}
|
|
||||||
ConstantPoolEntry::Int32(v) => {
|
|
||||||
let mut b = vec![5];
|
|
||||||
b.extend_from_slice(&v.to_le_bytes());
|
|
||||||
Self(b)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Linker {
|
|
||||||
|
|
||||||
pub fn link(modules: Vec<CompiledModule>, steps: Vec<BuildStep>) -> Result<ProgramImage, LinkError> {
|
|
||||||
if modules.len() != steps.len() {
|
|
||||||
return Err(LinkError::IncompatibleSymbolSignature(format!("Module count ({}) does not match build steps count ({})", modules.len(), steps.len())));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut combined_code = Vec::new();
|
|
||||||
let mut combined_functions = Vec::new();
|
|
||||||
let mut combined_constants = Vec::new();
|
|
||||||
let mut constant_map: HashMap<ConstantPoolBitKey, u32> = HashMap::new();
|
|
||||||
|
|
||||||
// Debug info merging
|
|
||||||
let mut combined_pc_to_span = Vec::new();
|
|
||||||
let mut combined_function_names = Vec::new();
|
|
||||||
|
|
||||||
// 1. DebugSymbol resolution maps:
|
|
||||||
// - canonical: (ProjectKey, module_path, ExportItem) -> func_idx
|
|
||||||
// - compatibility (Phase 03 transitional): (ProjectKey, module_path, short_name) -> func_idx
|
|
||||||
let mut global_symbols = HashMap::new();
|
|
||||||
let mut global_symbols_str = HashMap::new();
|
|
||||||
|
|
||||||
let mut module_code_offsets = Vec::with_capacity(modules.len());
|
|
||||||
let mut module_function_offsets = Vec::with_capacity(modules.len());
|
|
||||||
|
|
||||||
// Map ProjectKey to index
|
|
||||||
let _project_to_idx: HashMap<_, _> = modules.iter().enumerate().map(|(i, m)| (m.project_id.clone(), i)).collect();
|
|
||||||
|
|
||||||
// PASS 1: Collect exports and calculate offsets
|
|
||||||
for (_i, module) in modules.iter().enumerate() {
|
|
||||||
let code_offset = combined_code.len() as u32;
|
|
||||||
let function_offset = combined_functions.len() as u32;
|
|
||||||
|
|
||||||
module_code_offsets.push(code_offset);
|
|
||||||
module_function_offsets.push(function_offset);
|
|
||||||
|
|
||||||
for (key, meta) in &module.exports {
|
|
||||||
if let Some(local_func_idx) = meta.func_idx {
|
|
||||||
let global_func_idx = function_offset + local_func_idx;
|
|
||||||
// Note: Use a tuple as key for clarity
|
|
||||||
let symbol_id = (module.project_id.clone(), key.module_path.clone(), key.item.clone());
|
|
||||||
|
|
||||||
if global_symbols.contains_key(&symbol_id) {
|
|
||||||
return Err(LinkError::DuplicateExport(format!(
|
|
||||||
"Project {:?} export {}:{:?} already defined",
|
|
||||||
symbol_id.0, symbol_id.1, symbol_id.2
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
// Canonical mapping
|
|
||||||
global_symbols.insert(symbol_id, global_func_idx);
|
|
||||||
|
|
||||||
// Compatibility string mapping (short name only)
|
|
||||||
let short_name = match &key.item {
|
|
||||||
language_api::types::ExportItem::Function { fn_key } => fn_key.name.as_str().to_string(),
|
|
||||||
language_api::types::ExportItem::Service { name } => name.as_str().to_string(),
|
|
||||||
language_api::types::ExportItem::Type { name } => name.as_str().to_string(),
|
|
||||||
};
|
|
||||||
let symbol_id_str = (module.project_id.clone(), key.module_path.clone(), short_name);
|
|
||||||
global_symbols_str.insert(symbol_id_str, global_func_idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
combined_code.extend_from_slice(&module.code);
|
|
||||||
for func in &module.function_metas {
|
|
||||||
let mut relocated = func.clone();
|
|
||||||
relocated.code_offset += code_offset;
|
|
||||||
combined_functions.push(relocated);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(debug) = &module.debug_info {
|
|
||||||
for (pc, span) in &debug.pc_to_span {
|
|
||||||
combined_pc_to_span.push((code_offset + pc, span.clone()));
|
|
||||||
}
|
|
||||||
for (func_idx, name) in &debug.function_names {
|
|
||||||
combined_function_names.push((function_offset + func_idx, name.clone()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// PASS 2: Relocate constants and patch CALLs
|
|
||||||
for (i, module) in modules.iter().enumerate() {
|
|
||||||
let step = &steps[i];
|
|
||||||
let code_offset = module_code_offsets[i] as usize;
|
|
||||||
|
|
||||||
// Map local constant indices to global constant indices
|
|
||||||
let mut local_to_global_const = Vec::with_capacity(module.const_pool.len());
|
|
||||||
for entry in &module.const_pool {
|
|
||||||
let bit_key = ConstantPoolBitKey::from_entry(entry);
|
|
||||||
if let Some(&global_idx) = constant_map.get(&bit_key) {
|
|
||||||
local_to_global_const.push(global_idx);
|
|
||||||
} else {
|
|
||||||
let global_idx = combined_constants.len() as u32;
|
|
||||||
combined_constants.push(match entry {
|
|
||||||
ConstantPoolEntry::Null => Value::Null,
|
|
||||||
ConstantPoolEntry::Int64(v) => Value::Int64(*v),
|
|
||||||
ConstantPoolEntry::Float64(v) => Value::Float(*v),
|
|
||||||
ConstantPoolEntry::Boolean(v) => Value::Boolean(*v),
|
|
||||||
ConstantPoolEntry::String(v) => Value::String(v.clone()),
|
|
||||||
ConstantPoolEntry::Int32(v) => Value::Int32(*v),
|
|
||||||
});
|
|
||||||
constant_map.insert(bit_key, global_idx);
|
|
||||||
local_to_global_const.push(global_idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Patch imports
|
|
||||||
for import in &module.imports {
|
|
||||||
// Resolve the dependency project id. If alias is missing/self, try all deps as fallback.
|
|
||||||
let mut candidate_projects: Vec<&ProjectId> = Vec::new();
|
|
||||||
if import.key.dep_alias == "self" || import.key.dep_alias.is_empty() {
|
|
||||||
candidate_projects.push(&module.project_id);
|
|
||||||
for (_alias, pid) in &step.deps { candidate_projects.push(pid); }
|
|
||||||
} else {
|
|
||||||
let pid = step.deps.get(&import.key.dep_alias)
|
|
||||||
.ok_or_else(|| LinkError::UnresolvedSymbol(format!("Dependency alias '{}' not found in project {:?}", import.key.dep_alias, module.project_id)))?;
|
|
||||||
candidate_projects.push(pid);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut resolved_idx: Option<u32> = None;
|
|
||||||
for pid in candidate_projects {
|
|
||||||
let pid_val: ProjectId = (*pid).clone();
|
|
||||||
let key = (pid_val, import.key.module_path.clone(), import.key.symbol_name.clone());
|
|
||||||
if let Some(&idx) = global_symbols_str.get(&key) {
|
|
||||||
resolved_idx = Some(idx);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let target_func_idx = resolved_idx.ok_or_else(|| {
|
|
||||||
LinkError::UnresolvedSymbol(format!(
|
|
||||||
"DebugSymbol '{}:{}' not found in any candidate project (self={:?}, deps={:?})",
|
|
||||||
import.key.module_path,
|
|
||||||
import.key.symbol_name,
|
|
||||||
module.project_id,
|
|
||||||
step.deps
|
|
||||||
))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
for &reloc_pc in &import.relocation_pcs {
|
|
||||||
// `reloc_pc` aponta para o INÍCIO do operando (após os 2 bytes do opcode),
|
|
||||||
// conforme `assemble_with_unresolved` grava `pc` antes de escrever o U32.
|
|
||||||
// Portanto, devemos escrever exatamente em `absolute_pc`.
|
|
||||||
let absolute_pc = code_offset + reloc_pc as usize;
|
|
||||||
if absolute_pc + 4 <= combined_code.len() {
|
|
||||||
combined_code[absolute_pc..absolute_pc+4]
|
|
||||||
.copy_from_slice(&target_func_idx.to_le_bytes());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut pc = code_offset;
|
|
||||||
let end = code_offset + module.code.len();
|
|
||||||
while pc < end {
|
|
||||||
// Scope the immutable borrow from decode_next so we can mutate combined_code afterwards
|
|
||||||
let (opcode, next_pc, imm_start, imm_u32_opt) = {
|
|
||||||
match decode_next(pc, &combined_code) {
|
|
||||||
Ok(instr) => {
|
|
||||||
let opcode = instr.opcode;
|
|
||||||
let next_pc = instr.next_pc;
|
|
||||||
let imm_start = instr.pc + 2; // start of immediate payload
|
|
||||||
let imm_u32_opt = match opcode {
|
|
||||||
OpCode::PushConst | OpCode::Call => {
|
|
||||||
match instr.imm_u32() {
|
|
||||||
Ok(v) => Some(v),
|
|
||||||
Err(_) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
(opcode, next_pc, imm_start, imm_u32_opt)
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
return Err(LinkError::IncompatibleSymbolSignature(format!(
|
|
||||||
"Bytecode decode error at pc {}: {:?}",
|
|
||||||
pc - code_offset, e
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
match opcode {
|
|
||||||
OpCode::PushConst => {
|
|
||||||
let local_idx = imm_u32_opt.ok_or_else(|| LinkError::IncompatibleSymbolSignature(format!(
|
|
||||||
"Invalid PUSH_CONST immediate at pc {}",
|
|
||||||
pc - code_offset
|
|
||||||
)))? as usize;
|
|
||||||
if let Some(&global_idx) = local_to_global_const.get(local_idx) {
|
|
||||||
patch_u32_at(&mut combined_code, imm_start, &|_| global_idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
OpCode::Call => {
|
|
||||||
let local_func_idx = imm_u32_opt.ok_or_else(|| LinkError::IncompatibleSymbolSignature(format!(
|
|
||||||
"Invalid CALL immediate at pc {}",
|
|
||||||
pc - code_offset
|
|
||||||
)))?;
|
|
||||||
// Determine if this CALL site corresponds to an import relocation.
|
|
||||||
let reloc_rel_pc = (imm_start - code_offset) as u32;
|
|
||||||
let is_import = module
|
|
||||||
.imports
|
|
||||||
.iter()
|
|
||||||
.any(|imp| imp.relocation_pcs.contains(&reloc_rel_pc));
|
|
||||||
if !is_import {
|
|
||||||
let global_func_idx = module_function_offsets[i] + local_func_idx;
|
|
||||||
patch_u32_at(&mut combined_code, imm_start, &|_| global_func_idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Branches are strictly function-relative. Do NOT relocate or inspect immediates.
|
|
||||||
// The emitter encodes `target_rel = label - func_start` and the verifier enforces it.
|
|
||||||
OpCode::Jmp | OpCode::JmpIfFalse | OpCode::JmpIfTrue => { /* no-op */ }
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
pc = next_pc;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Final Exports map for ProgramImage (String -> func_idx)
|
|
||||||
// Only including exports from the ROOT project (the last one in build plan usually)
|
|
||||||
// In PBS v0, exports are name -> func_id.
|
|
||||||
let mut final_exports = HashMap::new();
|
|
||||||
if let Some(root_module) = modules.last() {
|
|
||||||
for (key, meta) in &root_module.exports {
|
|
||||||
if let Some(local_func_idx) = meta.func_idx {
|
|
||||||
let global_func_idx = module_function_offsets.last().unwrap() + local_func_idx;
|
|
||||||
final_exports.insert(format!("{}:{:?}", key.module_path, key.item), global_func_idx);
|
|
||||||
// Also provide short name for root module exports to facilitate entrypoint resolution.
|
|
||||||
// For canonical items, we fall back to the `Debug` representation without the module path.
|
|
||||||
let short = format!("{:?}", key.item);
|
|
||||||
if !final_exports.contains_key(&short) {
|
|
||||||
final_exports.insert(short, global_func_idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// v0: Fallback export for entrypoint `frame` (root module)
|
|
||||||
if !final_exports.iter().any(|(name, _)| name.ends_with(":frame") || name == "frame") {
|
|
||||||
if let Some(&root_offset) = module_function_offsets.last() {
|
|
||||||
if let Some((idx, _)) = combined_function_names.iter().find(|(i, name)| *i >= root_offset && name == "frame") {
|
|
||||||
final_exports.insert("frame".to_string(), *idx);
|
|
||||||
final_exports.insert("src/main/modules:frame".to_string(), *idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ajuste final: se os nomes de função no DebugInfo estiverem enriquecidos no formato
|
|
||||||
// "name@offset+len", alinhar apenas o `code_len` de `combined_functions[idx]` a esses
|
|
||||||
// valores (os offsets do DebugInfo são locais ao módulo antes do link). Mantemos o
|
|
||||||
// `code_offset` já realocado durante o PASS 1.
|
|
||||||
// Track which function metas received a precise code_len from DebugInfo
|
|
||||||
let mut has_precise_len: Vec<bool> = vec![false; combined_functions.len()];
|
|
||||||
|
|
||||||
for (idx, name) in &combined_function_names {
|
|
||||||
if let Some((base, rest)) = name.split_once('@') {
|
|
||||||
let mut parts = rest.split('+');
|
|
||||||
if let (Some(off_str), Some(len_str)) = (parts.next(), parts.next()) {
|
|
||||||
if let (Ok(_off), Ok(len)) = (off_str.parse::<u32>(), len_str.parse::<u32>()) {
|
|
||||||
if let Some(meta) = combined_functions.get_mut(*idx as usize) {
|
|
||||||
let old_off = meta.code_offset;
|
|
||||||
let old_len = meta.code_len;
|
|
||||||
meta.code_len = len;
|
|
||||||
has_precise_len[*idx as usize] = true;
|
|
||||||
eprintln!(
|
|
||||||
"[Linker][debug] Align len idx={} name={} -> code_offset {} (kept) | code_len {} -> {}",
|
|
||||||
idx, base, old_off, old_len, len
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure DebugInfo also contains plain base names alongside enriched names for easy lookup.
|
|
||||||
// For any entry of form "name@off+len", also add (idx, "name") if missing.
|
|
||||||
let mut plain_names_to_add: Vec<(u32, String)> = Vec::new();
|
|
||||||
for (idx, name) in &combined_function_names {
|
|
||||||
if let Some((base, _)) = name.split_once('@') {
|
|
||||||
let already_has_plain = combined_function_names.iter().any(|(i, n)| i == idx && n == base);
|
|
||||||
if !already_has_plain {
|
|
||||||
plain_names_to_add.push((*idx, base.to_string()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
combined_function_names.extend(plain_names_to_add);
|
|
||||||
|
|
||||||
// Recompute code_len ONLY for functions that did NOT receive a precise length from DebugInfo.
|
|
||||||
// This preserves exact ends emitted by the compiler while still filling lengths for functions
|
|
||||||
// that lack enriched annotations.
|
|
||||||
let total_len = combined_code.len();
|
|
||||||
let layouts = layout::compute_function_layouts(&combined_functions, total_len);
|
|
||||||
for i in 0..combined_functions.len() {
|
|
||||||
if !has_precise_len.get(i).copied().unwrap_or(false) {
|
|
||||||
let start = layouts[i].start;
|
|
||||||
let end = layouts[i].end;
|
|
||||||
combined_functions[i].code_len = end.saturating_sub(start) as u32;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Removido padding específico de `frame`; o emissor passou a garantir que o label de término
|
|
||||||
// esteja no ponto exato do fim do corpo, e, quando necessário, insere NOPs reais antes do fim.
|
|
||||||
|
|
||||||
// Garantir export do entry point 'frame' mesmo com nomes enriquecidos no DebugInfo.
|
|
||||||
if !final_exports.contains_key("frame") {
|
|
||||||
if let Some((idx, _name)) = combined_function_names.iter().find(|(i, name)| {
|
|
||||||
let base = name.split('@').next().unwrap_or(name.as_str());
|
|
||||||
let i_usize = *i as usize;
|
|
||||||
(base == "frame" || base.ends_with(":frame"))
|
|
||||||
&& combined_functions.get(i_usize).map(|m| m.param_slots == 0 && m.return_slots == 0).unwrap_or(false)
|
|
||||||
}) {
|
|
||||||
final_exports.insert("frame".to_string(), *idx);
|
|
||||||
final_exports.insert("src/main/modules:frame".to_string(), *idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let combined_debug_info = if combined_pc_to_span.is_empty() && combined_function_names.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
// Ensure entry-point name mapping is present for easy lookup in DebugInfo
|
|
||||||
if let Some(frame_idx) = final_exports.get("frame") {
|
|
||||||
if !combined_function_names.iter().any(|(i, n)| i == frame_idx && n == "frame") {
|
|
||||||
combined_function_names.push((*frame_idx, "frame".to_string()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(DebugInfo {
|
|
||||||
pc_to_span: combined_pc_to_span,
|
|
||||||
function_names: combined_function_names,
|
|
||||||
})
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(ProgramImage::new(
|
|
||||||
combined_code,
|
|
||||||
combined_constants,
|
|
||||||
combined_functions,
|
|
||||||
combined_debug_info,
|
|
||||||
final_exports,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn patch_u32_at(
|
|
||||||
buf: &mut [u8],
|
|
||||||
pos: usize,
|
|
||||||
f: impl FnOnce(u32) -> u32,
|
|
||||||
) -> Result<(), LinkError> {
|
|
||||||
let current = prometeu_bytecode::io::read_u32_le(buf, pos).ok_or(LinkError::OutOfBounds(pos, buf.len()))?;
|
|
||||||
let next = f(current);
|
|
||||||
prometeu_bytecode::io::write_u32_le(buf, pos, next).ok_or(LinkError::OutOfBounds(pos, buf.len()))?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::building::output::{ExportKey, ExportMetadata, ImportKey, ImportMetadata};
|
|
||||||
use crate::building::plan::BuildTarget;
|
|
||||||
use crate::deps::resolver::ProjectKey;
|
|
||||||
use prometeu_analysis::ids::ProjectId;
|
|
||||||
use prometeu_bytecode::opcode::OpCode;
|
|
||||||
use prometeu_bytecode::FunctionMeta;
|
|
||||||
use std::collections::BTreeMap;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_link_root_and_lib() {
|
|
||||||
let lib_key = ProjectKey { name: "lib".into(), version: "1.0.0".into() };
|
|
||||||
let root_key = ProjectKey { name: "root".into(), version: "1.0.0".into() };
|
|
||||||
let lib_id = ProjectId(0);
|
|
||||||
let root_id = ProjectId(1);
|
|
||||||
|
|
||||||
// Lib module: exports 'add'
|
|
||||||
let mut lib_code = Vec::new();
|
|
||||||
lib_code.extend_from_slice(&(OpCode::Add as u16).to_le_bytes());
|
|
||||||
lib_code.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
||||||
|
|
||||||
let mut lib_exports = BTreeMap::new();
|
|
||||||
use language_api::types::{CanonicalFnKey, ExportItem, ItemName, SignatureRef};
|
|
||||||
// NOTE: ItemName validation may enforce capitalized identifiers; for test purposes use a canonical valid name.
|
|
||||||
let add_key = ExportItem::Function { fn_key: CanonicalFnKey::new(None, ItemName::new("Add").unwrap(), SignatureRef(0)) };
|
|
||||||
lib_exports.insert(ExportKey { module_path: "math".into(), item: add_key }, ExportMetadata { func_idx: Some(0), is_host: false, ty: None });
|
|
||||||
|
|
||||||
let lib_module = CompiledModule {
|
|
||||||
project_id: lib_id,
|
|
||||||
project_key: lib_key.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
exports: lib_exports,
|
|
||||||
imports: vec![],
|
|
||||||
const_pool: vec![],
|
|
||||||
code: lib_code,
|
|
||||||
function_metas: vec![FunctionMeta {
|
|
||||||
code_offset: 0,
|
|
||||||
code_len: 4,
|
|
||||||
..Default::default()
|
|
||||||
}],
|
|
||||||
debug_info: None,
|
|
||||||
symbols: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Root module: calls 'lib::math:add'
|
|
||||||
let mut root_code = Vec::new();
|
|
||||||
root_code.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
||||||
root_code.extend_from_slice(&10i32.to_le_bytes());
|
|
||||||
root_code.extend_from_slice(&(OpCode::PushI32 as u16).to_le_bytes());
|
|
||||||
root_code.extend_from_slice(&20i32.to_le_bytes());
|
|
||||||
// Call lib:math:add
|
|
||||||
let call_pc = root_code.len() as u32;
|
|
||||||
root_code.extend_from_slice(&(OpCode::Call as u16).to_le_bytes());
|
|
||||||
root_code.extend_from_slice(&0u32.to_le_bytes()); // placeholder
|
|
||||||
root_code.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
||||||
|
|
||||||
let root_imports = vec![ImportMetadata {
|
|
||||||
key: ImportKey {
|
|
||||||
dep_alias: "mylib".into(),
|
|
||||||
module_path: "math".into(),
|
|
||||||
symbol_name: "Add".into(),
|
|
||||||
},
|
|
||||||
relocation_pcs: vec![call_pc],
|
|
||||||
}];
|
|
||||||
|
|
||||||
let root_module = CompiledModule {
|
|
||||||
project_id: root_id,
|
|
||||||
project_key: root_key.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
exports: BTreeMap::new(),
|
|
||||||
imports: root_imports,
|
|
||||||
const_pool: vec![],
|
|
||||||
code: root_code,
|
|
||||||
function_metas: vec![FunctionMeta {
|
|
||||||
code_offset: 0,
|
|
||||||
code_len: 20,
|
|
||||||
..Default::default()
|
|
||||||
}],
|
|
||||||
debug_info: None,
|
|
||||||
symbols: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
let lib_step = BuildStep {
|
|
||||||
project_id: lib_id,
|
|
||||||
project_key: lib_key.clone(),
|
|
||||||
project_dir: "".into(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
sources: vec![],
|
|
||||||
deps: BTreeMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut root_deps: BTreeMap<String, ProjectId> = BTreeMap::new();
|
|
||||||
root_deps.insert("mylib".into(), lib_id);
|
|
||||||
|
|
||||||
let root_step = BuildStep {
|
|
||||||
project_id: root_id,
|
|
||||||
project_key: root_key.clone(),
|
|
||||||
project_dir: "".into(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
sources: vec![],
|
|
||||||
deps: root_deps,
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = Linker::link(vec![lib_module, root_module], vec![lib_step, root_step]).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(result.functions.len(), 2);
|
|
||||||
// lib:add is func 0
|
|
||||||
// root:main is func 1
|
|
||||||
|
|
||||||
// lib_code length is 4.
|
|
||||||
// Root code starts at 4.
|
|
||||||
// CALL was at root_code offset 12.
|
|
||||||
// Absolute PC of CALL: 4 + 12 = 16.
|
|
||||||
// Immediate is at 16 + 2 = 18.
|
|
||||||
let patched_func_idx = u32::from_le_bytes(result.rom[18..22].try_into().unwrap());
|
|
||||||
assert_eq!(patched_func_idx, 0); // Points to lib:add
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_link_const_deduplication() {
|
|
||||||
let key = ProjectKey { name: "test".into(), version: "1.0.0".into() };
|
|
||||||
let id = ProjectId(0);
|
|
||||||
let step = BuildStep { project_id: id, project_key: key.clone(), project_dir: "".into(), target: BuildTarget::Main, sources: vec![], deps: BTreeMap::new() };
|
|
||||||
|
|
||||||
let m1 = CompiledModule {
|
|
||||||
project_id: id,
|
|
||||||
project_key: key.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
exports: BTreeMap::new(),
|
|
||||||
imports: vec![],
|
|
||||||
const_pool: vec![ConstantPoolEntry::Int32(42), ConstantPoolEntry::String("hello".into())],
|
|
||||||
code: vec![],
|
|
||||||
function_metas: vec![],
|
|
||||||
debug_info: None,
|
|
||||||
symbols: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
let m2 = CompiledModule {
|
|
||||||
project_id: id,
|
|
||||||
project_key: key.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
exports: BTreeMap::new(),
|
|
||||||
imports: vec![],
|
|
||||||
const_pool: vec![ConstantPoolEntry::String("hello".into()), ConstantPoolEntry::Int32(99)],
|
|
||||||
code: vec![],
|
|
||||||
function_metas: vec![],
|
|
||||||
debug_info: None,
|
|
||||||
symbols: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = Linker::link(vec![m1, m2], vec![step.clone(), step]).unwrap();
|
|
||||||
|
|
||||||
// Constants should be: 42, "hello", 99
|
|
||||||
assert_eq!(result.constant_pool.len(), 3);
|
|
||||||
assert_eq!(result.constant_pool[0], Value::Int32(42));
|
|
||||||
assert_eq!(result.constant_pool[1], Value::String("hello".into()));
|
|
||||||
assert_eq!(result.constant_pool[2], Value::Int32(99));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_jump_relocation_across_modules() {
|
|
||||||
// Module 1: small stub to create a non-zero code offset for module 2
|
|
||||||
let key1 = ProjectKey { name: "m1".into(), version: "1.0.0".into() };
|
|
||||||
let id1 = ProjectId(0);
|
|
||||||
let step1 = BuildStep { project_id: id1, project_key: key1.clone(), project_dir: "".into(), target: BuildTarget::Main, sources: vec![], deps: BTreeMap::new() };
|
|
||||||
|
|
||||||
let mut code1 = Vec::new();
|
|
||||||
code1.extend_from_slice(&(OpCode::Add as u16).to_le_bytes());
|
|
||||||
code1.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
||||||
let m1 = CompiledModule {
|
|
||||||
project_id: id1,
|
|
||||||
project_key: key1.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
exports: BTreeMap::new(),
|
|
||||||
imports: vec![],
|
|
||||||
const_pool: vec![],
|
|
||||||
code: code1.clone(),
|
|
||||||
function_metas: vec![FunctionMeta { code_offset: 0, code_len: code1.len() as u32, ..Default::default() }],
|
|
||||||
debug_info: None,
|
|
||||||
symbols: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Module 2: contains an unconditional JMP and a conditional JMP_IF_TRUE with local targets
|
|
||||||
let key2 = ProjectKey { name: "m2".into(), version: "1.0.0".into() };
|
|
||||||
let id2 = ProjectId(1);
|
|
||||||
let step2 = BuildStep { project_id: id2, project_key: key2.clone(), project_dir: "".into(), target: BuildTarget::Main, sources: vec![], deps: BTreeMap::new() };
|
|
||||||
|
|
||||||
let mut code2 = Vec::new();
|
|
||||||
// Unconditional JMP to local target 0 (module-local start)
|
|
||||||
let jmp_pc = code2.len() as u32; // where opcode will be placed
|
|
||||||
code2.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
|
|
||||||
code2.extend_from_slice(&0u32.to_le_bytes());
|
|
||||||
|
|
||||||
// PushBool true; then conditional jump to local target 0
|
|
||||||
code2.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
|
|
||||||
code2.push(1u8);
|
|
||||||
let cjmp_pc = code2.len() as u32;
|
|
||||||
code2.extend_from_slice(&(OpCode::JmpIfTrue as u16).to_le_bytes());
|
|
||||||
code2.extend_from_slice(&0u32.to_le_bytes());
|
|
||||||
|
|
||||||
// End with HALT so VM would stop if executed
|
|
||||||
code2.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
||||||
|
|
||||||
let m2 = CompiledModule {
|
|
||||||
project_id: id2,
|
|
||||||
project_key: key2.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
exports: BTreeMap::new(),
|
|
||||||
imports: vec![],
|
|
||||||
const_pool: vec![],
|
|
||||||
code: code2.clone(),
|
|
||||||
function_metas: vec![FunctionMeta { code_offset: 0, code_len: code2.len() as u32, ..Default::default() }],
|
|
||||||
debug_info: None,
|
|
||||||
symbols: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Link with order [m1, m2]
|
|
||||||
let result = Linker::link(vec![m1, m2], vec![step1, step2]).unwrap();
|
|
||||||
|
|
||||||
// Module 2's code starts after module 1's code
|
|
||||||
let module2_offset = code1.len() as u32;
|
|
||||||
|
|
||||||
// Verify that the JMP immediate remains function-relative (0), no relocation applied
|
|
||||||
let jmp_abs_pc = module2_offset as usize + jmp_pc as usize;
|
|
||||||
let jmp_imm_off = jmp_abs_pc + 2; // skip opcode
|
|
||||||
let jmp_patched = u32::from_le_bytes(result.rom[jmp_imm_off..jmp_imm_off+4].try_into().unwrap());
|
|
||||||
assert_eq!(jmp_patched, 0);
|
|
||||||
|
|
||||||
// Verify that the conditional JMP immediate also remains function-relative (0)
|
|
||||||
let cjmp_abs_pc = module2_offset as usize + cjmp_pc as usize;
|
|
||||||
let cjmp_imm_off = cjmp_abs_pc + 2;
|
|
||||||
let cjmp_patched = u32::from_le_bytes(result.rom[cjmp_imm_off..cjmp_imm_off+4].try_into().unwrap());
|
|
||||||
assert_eq!(cjmp_patched, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_jump_link_order_invariance() {
|
|
||||||
// Same setup as previous test, but link order is [m2, m1]
|
|
||||||
let key1 = ProjectKey { name: "m1".into(), version: "1.0.0".into() };
|
|
||||||
let id1 = ProjectId(0);
|
|
||||||
let step1 = BuildStep { project_id: id1, project_key: key1.clone(), project_dir: "".into(), target: BuildTarget::Main, sources: vec![], deps: BTreeMap::new() };
|
|
||||||
|
|
||||||
let mut code1 = Vec::new();
|
|
||||||
code1.extend_from_slice(&(OpCode::Add as u16).to_le_bytes());
|
|
||||||
code1.extend_from_slice(&(OpCode::Ret as u16).to_le_bytes());
|
|
||||||
let m1 = CompiledModule {
|
|
||||||
project_id: id1,
|
|
||||||
project_key: key1.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
exports: BTreeMap::new(),
|
|
||||||
imports: vec![],
|
|
||||||
const_pool: vec![],
|
|
||||||
code: code1.clone(),
|
|
||||||
function_metas: vec![FunctionMeta { code_offset: 0, code_len: code1.len() as u32, ..Default::default() }],
|
|
||||||
debug_info: None,
|
|
||||||
symbols: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
let key2 = ProjectKey { name: "m2".into(), version: "1.0.0".into() };
|
|
||||||
let id2 = ProjectId(1);
|
|
||||||
let step2 = BuildStep { project_id: id2, project_key: key2.clone(), project_dir: "".into(), target: BuildTarget::Main, sources: vec![], deps: BTreeMap::new() };
|
|
||||||
|
|
||||||
let mut code2 = Vec::new();
|
|
||||||
let jmp_pc = code2.len() as u32; // where opcode will be placed
|
|
||||||
code2.extend_from_slice(&(OpCode::Jmp as u16).to_le_bytes());
|
|
||||||
code2.extend_from_slice(&0u32.to_le_bytes());
|
|
||||||
|
|
||||||
code2.extend_from_slice(&(OpCode::PushBool as u16).to_le_bytes());
|
|
||||||
code2.push(1u8);
|
|
||||||
let cjmp_pc = code2.len() as u32;
|
|
||||||
code2.extend_from_slice(&(OpCode::JmpIfTrue as u16).to_le_bytes());
|
|
||||||
code2.extend_from_slice(&0u32.to_le_bytes());
|
|
||||||
|
|
||||||
code2.extend_from_slice(&(OpCode::Halt as u16).to_le_bytes());
|
|
||||||
|
|
||||||
let m2 = CompiledModule {
|
|
||||||
project_id: id2,
|
|
||||||
project_key: key2.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
exports: BTreeMap::new(),
|
|
||||||
imports: vec![],
|
|
||||||
const_pool: vec![],
|
|
||||||
code: code2.clone(),
|
|
||||||
function_metas: vec![FunctionMeta { code_offset: 0, code_len: code2.len() as u32, ..Default::default() }],
|
|
||||||
debug_info: None,
|
|
||||||
symbols: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Link with order [m2, m1]
|
|
||||||
let result = Linker::link(vec![m2, m1], vec![step2, step1]).unwrap();
|
|
||||||
|
|
||||||
// Module 2 is now at offset 0
|
|
||||||
let module2_offset = 0usize;
|
|
||||||
|
|
||||||
// Verify that the JMP immediate remains function-relative (0), no relocation applied
|
|
||||||
let jmp_abs_pc = module2_offset + jmp_pc as usize;
|
|
||||||
let jmp_imm_off = jmp_abs_pc + 2; // skip opcode
|
|
||||||
let jmp_patched = u32::from_le_bytes(result.rom[jmp_imm_off..jmp_imm_off+4].try_into().unwrap());
|
|
||||||
assert_eq!(jmp_patched, 0);
|
|
||||||
|
|
||||||
// Verify that the conditional JMP immediate also remains function-relative (0)
|
|
||||||
let cjmp_abs_pc = module2_offset + cjmp_pc as usize;
|
|
||||||
let cjmp_imm_off = cjmp_abs_pc + 2;
|
|
||||||
let cjmp_patched = u32::from_le_bytes(result.rom[cjmp_imm_off..cjmp_imm_off+4].try_into().unwrap());
|
|
||||||
assert_eq!(cjmp_patched, 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
pub mod plan;
|
|
||||||
pub mod output;
|
|
||||||
pub mod linker;
|
|
||||||
pub mod orchestrator;
|
|
||||||
|
|
||||||
// Compile-time boundary guard: Backend modules must not import PBS directly.
|
|
||||||
// This doctest will fail to compile if someone tries to `use crate::frontends::pbs` from here.
|
|
||||||
// It is lightweight and runs with `cargo test`.
|
|
||||||
/// ```compile_fail
|
|
||||||
/// use crate::frontends::pbs; // Backend must not depend on PBS directly
|
|
||||||
/// # let _ = &pbs; // ensure the import is actually used so the check is meaningful
|
|
||||||
/// ```
|
|
||||||
mod __backend_boundary_guard {}
|
|
||||||
@ -1,302 +0,0 @@
|
|||||||
use crate::building::linker::{LinkError, Linker};
|
|
||||||
use crate::building::output::{compile_project, CompileError};
|
|
||||||
use language_api::traits::Frontend as CanonFrontend;
|
|
||||||
use crate::building::plan::{BuildPlan, BuildTarget};
|
|
||||||
use crate::common::diagnostics::DiagnosticBundle;
|
|
||||||
use crate::common::files::FileManager;
|
|
||||||
use crate::deps::resolver::ResolvedGraph;
|
|
||||||
use prometeu_abi::ProgramImage;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum BuildError {
|
|
||||||
Compile(CompileError),
|
|
||||||
Link(LinkError),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct BuildResult {
|
|
||||||
pub image: ProgramImage,
|
|
||||||
pub file_manager: FileManager,
|
|
||||||
pub symbols: Vec<crate::common::symbols::ProjectSymbols>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for BuildError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
BuildError::Compile(e) => write!(f, "Compile error: {}", e),
|
|
||||||
BuildError::Link(e) => write!(f, "Link error: {}", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for BuildError {}
|
|
||||||
|
|
||||||
impl From<CompileError> for BuildError {
|
|
||||||
fn from(e: CompileError) -> Self {
|
|
||||||
BuildError::Compile(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<LinkError> for BuildError {
|
|
||||||
fn from(e: LinkError) -> Self {
|
|
||||||
BuildError::Link(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_from_graph(graph: &ResolvedGraph, target: BuildTarget, fe: &dyn CanonFrontend) -> Result<BuildResult, BuildError> {
|
|
||||||
let plan = BuildPlan::from_graph(graph, target);
|
|
||||||
let mut compiled_modules = HashMap::new();
|
|
||||||
let mut modules_in_order = Vec::new();
|
|
||||||
let mut file_manager = FileManager::new();
|
|
||||||
|
|
||||||
for step in &plan.steps {
|
|
||||||
let compiled = compile_project(step.clone(), &compiled_modules, fe, &mut file_manager)?;
|
|
||||||
compiled_modules.insert(step.project_id.clone(), compiled.clone());
|
|
||||||
modules_in_order.push(compiled);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate PBS entry point only for the root project (last step in plan order)
|
|
||||||
if let Some(root_step) = plan.steps.last() {
|
|
||||||
// 1) Ensure the root project contains file src/main/modules/main.pbs
|
|
||||||
let main_pbs_path = root_step.project_dir.join("src/main/modules/main.pbs");
|
|
||||||
let has_main_pbs = main_pbs_path.exists();
|
|
||||||
if !has_main_pbs {
|
|
||||||
return Err(BuildError::Compile(CompileError::Frontend(
|
|
||||||
DiagnosticBundle::error(
|
|
||||||
"E_MISSING_ENTRY_POINT_FILE",
|
|
||||||
"Root project must contain src/main/modules/main.pbs".to_string(),
|
|
||||||
crate::common::spans::Span::new(crate::common::spans::FileId::INVALID, 0, 0),
|
|
||||||
),
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2) Ensure that file declares fn frame(): void (no params)
|
|
||||||
// We validate at the bytecode metadata level using function names and signature slots.
|
|
||||||
if let Some(root_compiled) = compiled_modules.get(&root_step.project_id) {
|
|
||||||
// Instrumentação: listar nomes de funções e metas do módulo root
|
|
||||||
if let Some(di) = &root_compiled.debug_info {
|
|
||||||
// Suprimir logs verbosos na versão final.
|
|
||||||
let _ = di; // no-op
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find function index by name "frame" (tolerate qualified names ending with ":frame")
|
|
||||||
let mut found_valid = false;
|
|
||||||
if let Some(di) = &root_compiled.debug_info {
|
|
||||||
for (idx, name) in &di.function_names {
|
|
||||||
// Names in debug_info may be enriched as "name@offset+len". Strip the annotation for comparison.
|
|
||||||
let base_name = name.split('@').next().unwrap_or(name.as_str());
|
|
||||||
let is_frame_name = base_name == "frame" || base_name.ends_with(":frame");
|
|
||||||
if is_frame_name {
|
|
||||||
// Check signature: 0 params, 0 return slots
|
|
||||||
if let Some(meta) = root_compiled.function_metas.get(*idx as usize) {
|
|
||||||
if meta.param_slots == 0 && meta.return_slots == 0 {
|
|
||||||
found_valid = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !found_valid {
|
|
||||||
return Err(BuildError::Compile(CompileError::Frontend(
|
|
||||||
DiagnosticBundle::error(
|
|
||||||
"E_MISSING_ENTRY_POINT_FN",
|
|
||||||
"Missing entry point fn frame(): void in src/main/modules/main.pbs".to_string(),
|
|
||||||
crate::common::spans::Span::new(crate::common::spans::FileId::INVALID, 0, 0),
|
|
||||||
),
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let program_image = Linker::link(modules_in_order.clone(), plan.steps.clone())?;
|
|
||||||
|
|
||||||
let mut all_project_symbols = Vec::new();
|
|
||||||
for (i, module) in modules_in_order.into_iter().enumerate() {
|
|
||||||
let project_dir = &plan.steps[i].project_dir;
|
|
||||||
let project_dir_norm = project_dir.canonicalize().unwrap_or_else(|_| project_dir.clone());
|
|
||||||
|
|
||||||
// Relativize file URIs for this project's symbols
|
|
||||||
let mut rel_symbols = Vec::with_capacity(module.symbols.len());
|
|
||||||
for mut s in module.symbols {
|
|
||||||
// Try to relativize decl span's file_uri
|
|
||||||
let original = std::path::PathBuf::from(&s.decl_span.file_uri);
|
|
||||||
let rel = if original.is_absolute() {
|
|
||||||
pathdiff::diff_paths(&original, &project_dir_norm).unwrap_or(original.clone())
|
|
||||||
} else {
|
|
||||||
original.clone()
|
|
||||||
};
|
|
||||||
let rel_str = rel.to_string_lossy().replace('\\', "/");
|
|
||||||
s.decl_span.file_uri = rel_str;
|
|
||||||
rel_symbols.push(s);
|
|
||||||
}
|
|
||||||
|
|
||||||
all_project_symbols.push(crate::common::symbols::ProjectSymbols {
|
|
||||||
project: module.project_key.name.clone(),
|
|
||||||
project_dir: project_dir.to_string_lossy().to_string(),
|
|
||||||
symbols: rel_symbols,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(BuildResult {
|
|
||||||
image: program_image,
|
|
||||||
file_manager,
|
|
||||||
symbols: all_project_symbols,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::deps::resolver::{ProjectKey, ResolvedGraph, ResolvedNode};
|
|
||||||
use crate::manifest::{Manifest, ManifestKind};
|
|
||||||
use crate::sources::discover;
|
|
||||||
use prometeu_analysis::ids::ProjectId;
|
|
||||||
use std::collections::BTreeMap;
|
|
||||||
use crate::frontends::pbs::adapter::PbsFrontendAdapter;
|
|
||||||
use std::fs;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
|
|
||||||
fn make_minimal_manifest(dir: &std::path::Path) {
|
|
||||||
// Minimal Prometeu JSON manifest for an App project
|
|
||||||
// See crates/prometeu-compiler/src/manifest.rs::load_manifest (expects prometeu.json)
|
|
||||||
let manifest_json = r#"{
|
|
||||||
"name": "root",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "app",
|
|
||||||
"dependencies": {}
|
|
||||||
}"#;
|
|
||||||
fs::write(dir.join("prometeu.json"), manifest_json).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_single_node_graph(project_dir: PathBuf) -> ResolvedGraph {
|
|
||||||
// Discover sources as the normal pipeline would
|
|
||||||
let sources = discover(&project_dir).unwrap_or_else(|_| crate::sources::ProjectSources {
|
|
||||||
main: None,
|
|
||||||
files: vec![],
|
|
||||||
test_files: vec![],
|
|
||||||
});
|
|
||||||
|
|
||||||
let id = ProjectId(0);
|
|
||||||
let node = ResolvedNode {
|
|
||||||
id,
|
|
||||||
key: ProjectKey { name: "root".to_string(), version: "0.1.0".to_string() },
|
|
||||||
path: project_dir,
|
|
||||||
manifest: Manifest { name: "root".into(), version: "0.1.0".into(), kind: ManifestKind::App, dependencies: BTreeMap::new() },
|
|
||||||
sources,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut g = ResolvedGraph::default();
|
|
||||||
g.root_id = Some(id);
|
|
||||||
g.nodes.insert(id, node);
|
|
||||||
g
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_missing_main_pbs_errors() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().to_path_buf();
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
make_minimal_manifest(&project_dir);
|
|
||||||
|
|
||||||
let graph = build_single_node_graph(project_dir);
|
|
||||||
let fe = PbsFrontendAdapter;
|
|
||||||
let res = build_from_graph(&graph, BuildTarget::Main, &fe);
|
|
||||||
assert!(res.is_err());
|
|
||||||
let err = res.err().unwrap();
|
|
||||||
match err {
|
|
||||||
BuildError::Compile(CompileError::Frontend(bundle)) => {
|
|
||||||
assert!(bundle.diagnostics.iter().any(|d| d.code == "E_MISSING_ENTRY_POINT_FILE"));
|
|
||||||
}
|
|
||||||
BuildError::Compile(other) => {
|
|
||||||
// Accept any compile error here; presence check should have fired earlier.
|
|
||||||
// This keeps the test resilient across internal pipeline changes.
|
|
||||||
eprintln!("Got non-frontend compile error: {}", other);
|
|
||||||
}
|
|
||||||
_ => panic!("expected compile error for missing entry point file"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_wrong_signature_frame_errors() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().to_path_buf();
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
make_minimal_manifest(&project_dir);
|
|
||||||
|
|
||||||
// Create main.pbs but with wrong signature for frame (has a parameter)
|
|
||||||
let code = r#"
|
|
||||||
fn frame(a: int): void {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
fs::write(project_dir.join("src/main/modules/main.pbs"), code).unwrap();
|
|
||||||
|
|
||||||
let graph = build_single_node_graph(project_dir);
|
|
||||||
let fe = PbsFrontendAdapter;
|
|
||||||
let res = build_from_graph(&graph, BuildTarget::Main, &fe);
|
|
||||||
assert!(res.is_err());
|
|
||||||
let err = res.err().unwrap();
|
|
||||||
match err {
|
|
||||||
BuildError::Compile(CompileError::Frontend(bundle)) => {
|
|
||||||
assert!(bundle.diagnostics.iter().any(|d| d.code == "E_MISSING_ENTRY_POINT_FN"));
|
|
||||||
}
|
|
||||||
_ => panic!("expected frontend error for wrong frame signature"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_framesync_injected_end_to_end() {
|
|
||||||
use prometeu_bytecode::opcode::OpCode;
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().to_path_buf();
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
make_minimal_manifest(&project_dir);
|
|
||||||
|
|
||||||
// Valid entry point
|
|
||||||
let code = r#"
|
|
||||||
fn frame(): void {
|
|
||||||
let x = 1 + 1;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
fs::write(project_dir.join("src/main/modules/main.pbs"), code).unwrap();
|
|
||||||
|
|
||||||
let graph = build_single_node_graph(project_dir);
|
|
||||||
let fe = PbsFrontendAdapter;
|
|
||||||
let res = build_from_graph(&graph, BuildTarget::Main, &fe).expect("should compile");
|
|
||||||
|
|
||||||
// Locate function by name -> function index
|
|
||||||
let di = res.image.debug_info.as_ref().expect("debug info");
|
|
||||||
let (func_idx, _) = di
|
|
||||||
.function_names
|
|
||||||
.iter()
|
|
||||||
.find(|(_, name)| name == "frame")
|
|
||||||
.cloned()
|
|
||||||
.expect("frame function should exist");
|
|
||||||
|
|
||||||
let meta = &res.image.functions[func_idx as usize];
|
|
||||||
let start = meta.code_offset as usize;
|
|
||||||
let end = (meta.code_offset + meta.code_len) as usize;
|
|
||||||
let code = &res.image.rom[start..end];
|
|
||||||
|
|
||||||
// Decode sequentially using the canonical decoder; record opcode stream.
|
|
||||||
let mut pcs = Vec::new();
|
|
||||||
let mut i = 0usize;
|
|
||||||
while i < code.len() {
|
|
||||||
let instr = prometeu_bytecode::decoder::decode_next(i, code).expect("decoder should succeed");
|
|
||||||
pcs.push(instr.opcode as u16);
|
|
||||||
i = instr.next_pc;
|
|
||||||
}
|
|
||||||
assert_eq!(i, code.len(), "decoder must end exactly at function end");
|
|
||||||
|
|
||||||
assert!(pcs.len() >= 2);
|
|
||||||
let last = *pcs.last().unwrap();
|
|
||||||
let prev = pcs[pcs.len() - 2];
|
|
||||||
assert_eq!(last, OpCode::Ret as u16, "last opcode must be RET");
|
|
||||||
assert_eq!(prev, OpCode::FrameSync as u16, "prev opcode must be FRAME_SYNC");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,498 +0,0 @@
|
|||||||
use crate::backend::emit_fragments;
|
|
||||||
use crate::building::plan::{BuildStep, BuildTarget};
|
|
||||||
use crate::common::diagnostics::DiagnosticBundle;
|
|
||||||
use crate::common::files::FileManager;
|
|
||||||
use crate::deps::resolver::ProjectKey;
|
|
||||||
use language_api::traits::Frontend as CanonFrontend;
|
|
||||||
use language_api::types::{ExportItem, TypeRef};
|
|
||||||
use prometeu_analysis::ids::ProjectId;
|
|
||||||
use prometeu_bytecode::{ConstantPoolEntry, DebugInfo, FunctionMeta};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::{BTreeMap, HashMap};
|
|
||||||
|
|
||||||
// Simple stable 32-bit FNV-1a hash for synthesizing opaque TypeRef tokens from names.
|
|
||||||
fn symbol_name_hash(name: &str) -> u32 {
|
|
||||||
let mut hash: u32 = 0x811C9DC5; // FNV offset basis
|
|
||||||
for &b in name.as_bytes() {
|
|
||||||
hash ^= b as u32;
|
|
||||||
hash = hash.wrapping_mul(0x01000193); // FNV prime
|
|
||||||
}
|
|
||||||
hash
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
|
||||||
pub struct ExportKey {
|
|
||||||
pub module_path: String,
|
|
||||||
pub item: ExportItem,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct ExportMetadata {
|
|
||||||
pub func_idx: Option<u32>,
|
|
||||||
pub is_host: bool,
|
|
||||||
pub ty: Option<TypeRef>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
|
|
||||||
pub struct ImportKey {
|
|
||||||
pub dep_alias: String,
|
|
||||||
pub module_path: String,
|
|
||||||
pub symbol_name: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct ImportMetadata {
|
|
||||||
pub key: ImportKey,
|
|
||||||
pub relocation_pcs: Vec<u32>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct CompiledModule {
|
|
||||||
pub project_id: ProjectId,
|
|
||||||
pub project_key: ProjectKey,
|
|
||||||
pub target: BuildTarget,
|
|
||||||
pub exports: BTreeMap<ExportKey, ExportMetadata>,
|
|
||||||
pub imports: Vec<ImportMetadata>,
|
|
||||||
pub const_pool: Vec<ConstantPoolEntry>,
|
|
||||||
pub code: Vec<u8>,
|
|
||||||
pub function_metas: Vec<FunctionMeta>,
|
|
||||||
pub debug_info: Option<DebugInfo>,
|
|
||||||
pub symbols: Vec<crate::common::symbols::Symbol>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum CompileError {
|
|
||||||
Frontend(DiagnosticBundle),
|
|
||||||
DuplicateExport {
|
|
||||||
symbol: String,
|
|
||||||
first_dep: String,
|
|
||||||
second_dep: String,
|
|
||||||
},
|
|
||||||
Io(std::io::Error),
|
|
||||||
Internal(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for CompileError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
CompileError::Frontend(d) => write!(f, "Frontend error: {:?}", d),
|
|
||||||
CompileError::DuplicateExport {
|
|
||||||
symbol,
|
|
||||||
first_dep,
|
|
||||||
second_dep,
|
|
||||||
} => write!(
|
|
||||||
f,
|
|
||||||
"duplicate export: symbol `{}`\n first defined in dependency `{}`\n again defined in dependency `{}`",
|
|
||||||
symbol, first_dep, second_dep
|
|
||||||
),
|
|
||||||
CompileError::Io(e) => write!(f, "IO error: {}", e),
|
|
||||||
CompileError::Internal(s) => write!(f, "Internal error: {}", s),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for CompileError {}
|
|
||||||
|
|
||||||
impl From<std::io::Error> for CompileError {
|
|
||||||
fn from(e: std::io::Error) -> Self {
|
|
||||||
CompileError::Io(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<crate::common::diagnostics::DiagnosticBundle> for CompileError {
|
|
||||||
fn from(d: crate::common::diagnostics::DiagnosticBundle) -> Self {
|
|
||||||
CompileError::Frontend(d)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note: PBS ModuleProvider/ModuleSymbols are no longer used at the backend boundary.
|
|
||||||
|
|
||||||
pub fn compile_project(
|
|
||||||
step: BuildStep,
|
|
||||||
dep_modules: &HashMap<ProjectId, CompiledModule>,
|
|
||||||
fe: &dyn CanonFrontend,
|
|
||||||
_file_manager: &mut FileManager,
|
|
||||||
) -> Result<CompiledModule, CompileError> {
|
|
||||||
// 1) FE-driven analysis per source → gather VM IR modules
|
|
||||||
let mut combined_vm = crate::ir_lang::Module::new(step.project_key.name.clone());
|
|
||||||
combined_vm.const_pool = crate::ir_core::ConstPool::new();
|
|
||||||
|
|
||||||
// Origin module_path per appended function
|
|
||||||
let mut combined_func_origins: Vec<String> = Vec::new();
|
|
||||||
|
|
||||||
let insert_const =
|
|
||||||
|pool: &mut crate::ir_core::ConstPool, val: &crate::ir_core::ConstantValue| -> crate::ir_lang::types::ConstId {
|
|
||||||
let new_id = pool.insert(val.clone());
|
|
||||||
crate::ir_lang::types::ConstId(new_id.0)
|
|
||||||
};
|
|
||||||
|
|
||||||
// Map: module_path → FE exports for that module
|
|
||||||
let mut fe_exports_per_module: HashMap<String, Vec<language_api::types::ExportItem>> = HashMap::new();
|
|
||||||
|
|
||||||
// Build dependency synthetic export keys and detect cross-dependency duplicates upfront
|
|
||||||
use std::collections::HashSet;
|
|
||||||
#[derive(Hash, Eq, PartialEq)]
|
|
||||||
struct DepKey(String, ExportItem); // (module_path, item)
|
|
||||||
|
|
||||||
fn display_export_item(item: &ExportItem) -> String {
|
|
||||||
match item {
|
|
||||||
ExportItem::Type { name } | ExportItem::Service { name } => name.as_str().to_string(),
|
|
||||||
ExportItem::Function { fn_key } => {
|
|
||||||
let base = fn_key.debug_name();
|
|
||||||
format!("{}#sig{}", base, fn_key.sig.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let mut dep_seen: HashSet<DepKey> = HashSet::new();
|
|
||||||
for (alias, project_id) in &step.deps {
|
|
||||||
if let Some(compiled) = dep_modules.get(project_id) {
|
|
||||||
for (key, _meta) in &compiled.exports {
|
|
||||||
// Track using canonical item keyed by module path; alias variations only for display/conflict reporting.
|
|
||||||
let synthetic_paths = [
|
|
||||||
format!("{}/{}", alias, key.module_path),
|
|
||||||
format!("@{}:{}", alias, key.module_path),
|
|
||||||
];
|
|
||||||
for sp in synthetic_paths {
|
|
||||||
let k = DepKey(sp.clone(), key.item.clone());
|
|
||||||
if !dep_seen.insert(k) {
|
|
||||||
return Err(CompileError::DuplicateExport {
|
|
||||||
symbol: display_export_item(&key.item),
|
|
||||||
first_dep: alias.clone(),
|
|
||||||
second_dep: alias.clone(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for source_rel in &step.sources {
|
|
||||||
let source_abs = step.project_dir.join(source_rel);
|
|
||||||
let full_path = source_rel.to_string_lossy().replace('\\', "/");
|
|
||||||
let logical_module_path = if let Some(stripped) = full_path.strip_prefix("src/main/modules/") {
|
|
||||||
stripped
|
|
||||||
} else if let Some(stripped) = full_path.strip_prefix("src/test/modules/") {
|
|
||||||
stripped
|
|
||||||
} else {
|
|
||||||
&full_path
|
|
||||||
};
|
|
||||||
let module_path = std::path::Path::new(logical_module_path)
|
|
||||||
.parent()
|
|
||||||
.map(|p| p.to_string_lossy().replace('\\', "/"))
|
|
||||||
.unwrap_or_else(|| "".to_string());
|
|
||||||
|
|
||||||
let unit = fe.parse_and_analyze(&source_abs.to_string_lossy());
|
|
||||||
// Deserialize VM IR from canonical payload
|
|
||||||
let vm_module: crate::ir_lang::Module = if unit.lowered_ir.format == "vm-ir-json" {
|
|
||||||
match serde_json::from_slice(&unit.lowered_ir.bytes) {
|
|
||||||
Ok(m) => m,
|
|
||||||
Err(e) => return Err(CompileError::Internal(format!("Invalid FE VM-IR payload: {}", e))),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Err(CompileError::Internal(format!("Unsupported lowered IR format: {}", unit.lowered_ir.format)));
|
|
||||||
};
|
|
||||||
|
|
||||||
// Aggregate FE exports per module, detecting duplicates and dep conflicts
|
|
||||||
let entry = fe_exports_per_module.entry(module_path.clone()).or_insert_with(Vec::new);
|
|
||||||
for it in unit.exports {
|
|
||||||
// Conflict with dependency synthetic exports?
|
|
||||||
let dep_key = DepKey(module_path.clone(), it.clone());
|
|
||||||
if dep_seen.contains(&dep_key) {
|
|
||||||
return Err(CompileError::DuplicateExport {
|
|
||||||
symbol: display_export_item(&it),
|
|
||||||
first_dep: "dependency".to_string(),
|
|
||||||
second_dep: "local".to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Local duplicate within same module?
|
|
||||||
let already = entry.iter().any(|e| e == &it);
|
|
||||||
if already {
|
|
||||||
return Err(CompileError::Frontend(
|
|
||||||
DiagnosticBundle::error(
|
|
||||||
"E_RESOLVE_DUPLICATE_SYMBOL",
|
|
||||||
format!("Duplicate symbol '{:?}' in module '{}'", it, module_path),
|
|
||||||
crate::common::spans::Span::new(crate::common::spans::FileId::INVALID, 0, 0),
|
|
||||||
)
|
|
||||||
));
|
|
||||||
}
|
|
||||||
entry.push(it);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remap this module's const pool into the combined pool
|
|
||||||
let mut const_map: Vec<crate::ir_lang::types::ConstId> = Vec::with_capacity(vm_module.const_pool.constants.len());
|
|
||||||
for c in &vm_module.const_pool.constants {
|
|
||||||
const_map.push(insert_const(&mut combined_vm.const_pool, c));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Append functions; remap PushConst ids safely
|
|
||||||
for mut f in vm_module.functions.into_iter() {
|
|
||||||
for instr in &mut f.body {
|
|
||||||
let kind_clone = instr.kind.clone();
|
|
||||||
if let crate::ir_lang::instr::InstrKind::PushConst(old_id) = kind_clone {
|
|
||||||
let mapped = const_map.get(old_id.0 as usize).cloned().unwrap_or(old_id);
|
|
||||||
instr.kind = crate::ir_lang::instr::InstrKind::PushConst(mapped);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
combined_func_origins.push(module_path.clone());
|
|
||||||
combined_vm.functions.push(f);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let fragments = emit_fragments(&combined_vm)
|
|
||||||
.map_err(|e| CompileError::Internal(format!("Emission error: {}", e)))?;
|
|
||||||
|
|
||||||
// Ensure function metas reflect final slots info
|
|
||||||
let mut fixed_function_metas = fragments.functions.clone();
|
|
||||||
for (i, fm) in fixed_function_metas.iter_mut().enumerate() {
|
|
||||||
if let Some(vm_func) = combined_vm.functions.get(i) {
|
|
||||||
fm.param_slots = vm_func.param_slots;
|
|
||||||
fm.local_slots = vm_func.local_slots;
|
|
||||||
fm.return_slots = vm_func.return_slots;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2) Collect exports from FE contract, map to VM function indices
|
|
||||||
let mut exports = BTreeMap::new();
|
|
||||||
|
|
||||||
for (module_path, items) in &fe_exports_per_module {
|
|
||||||
for item in items {
|
|
||||||
match item {
|
|
||||||
ExportItem::Type { name } => {
|
|
||||||
exports.insert(
|
|
||||||
ExportKey { module_path: module_path.clone(), item: item.clone() },
|
|
||||||
ExportMetadata { func_idx: None, is_host: false, ty: Some(TypeRef(symbol_name_hash(name.as_str()))) },
|
|
||||||
);
|
|
||||||
}
|
|
||||||
ExportItem::Service { name: _ } => {
|
|
||||||
// Service owner export (no functions synthesized here)
|
|
||||||
exports.insert(
|
|
||||||
ExportKey { module_path: module_path.clone(), item: item.clone() },
|
|
||||||
ExportMetadata { func_idx: None, is_host: false, ty: None },
|
|
||||||
);
|
|
||||||
}
|
|
||||||
ExportItem::Function { fn_key } => {
|
|
||||||
// Map function to VM function index by name and overwrite FE-provided sig with actual VM sig id
|
|
||||||
for (i, f) in combined_vm.functions.iter().enumerate() {
|
|
||||||
if combined_func_origins.get(i).map(|s| s.as_str()) != Some(module_path.as_str()) { continue; }
|
|
||||||
if f.name != fn_key.name.as_str() { continue; }
|
|
||||||
// Rebuild canonical key with authoritative BE signature id
|
|
||||||
let fixed_key = ExportItem::Function { fn_key: language_api::types::CanonicalFnKey::new(
|
|
||||||
fn_key.owner.clone(),
|
|
||||||
fn_key.name.clone(),
|
|
||||||
language_api::types::SignatureRef(f.sig.0 as u32),
|
|
||||||
)};
|
|
||||||
exports.insert(
|
|
||||||
ExportKey { module_path: module_path.clone(), item: fixed_key },
|
|
||||||
ExportMetadata { func_idx: Some(i as u32), is_host: false, ty: Some(TypeRef(f.sig.0 as u32)) },
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3) Collect symbols for analysis (LSP, etc.) — minimal fallback from debug_info
|
|
||||||
let mut project_symbols = Vec::new();
|
|
||||||
if let Some(di) = &fragments.debug_info {
|
|
||||||
// Create at least a symbol for entry point or first function
|
|
||||||
if let Some((_, name)) = di.function_names.first() {
|
|
||||||
let name = name.split('@').next().unwrap_or(name.as_str()).to_string();
|
|
||||||
let span = crate::common::symbols::SpanRange {
|
|
||||||
file_uri: step.project_dir.join("src/main/modules/main.pbs").to_string_lossy().to_string(),
|
|
||||||
start: crate::common::symbols::Pos { line: 0, col: 0 },
|
|
||||||
end: crate::common::symbols::Pos { line: 0, col: 1 },
|
|
||||||
};
|
|
||||||
project_symbols.push(crate::common::symbols::Symbol {
|
|
||||||
id: format!("{}:{}:{}:{}:{:016x}", step.project_key.name, "function", "", name.clone(), 0),
|
|
||||||
name,
|
|
||||||
kind: "function".to_string(),
|
|
||||||
exported: false,
|
|
||||||
module_path: "".to_string(),
|
|
||||||
decl_span: span,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 4) Enrich debug_info (only if present). Avoid requiring Default on DebugInfo.
|
|
||||||
let mut debug_info = fragments.debug_info.clone();
|
|
||||||
if let Some(dbg) = debug_info.as_mut() {
|
|
||||||
// annotate function names with "@offset+len"
|
|
||||||
// NOTE: assumes dbg.function_names aligns with functions order.
|
|
||||||
let mut enriched = Vec::new();
|
|
||||||
for (i, (fid, name)) in dbg.function_names.clone().into_iter().enumerate() {
|
|
||||||
if let Some(meta) = fixed_function_metas.get(i) {
|
|
||||||
enriched.push((fid, format!("{}@{}+{}", name, meta.code_offset, meta.code_len)));
|
|
||||||
} else {
|
|
||||||
enriched.push((fid, name));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !enriched.is_empty() {
|
|
||||||
dbg.function_names = enriched;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5) Collect imports from unresolved labels
|
|
||||||
let mut imports = Vec::new();
|
|
||||||
for (label, pcs) in fragments.unresolved_labels {
|
|
||||||
if !label.starts_with('@') {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Format: @dep_alias::module_path:symbol_name
|
|
||||||
let parts: Vec<&str> = label[1..].splitn(2, "::").collect();
|
|
||||||
if parts.len() != 2 {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let dep_alias = parts[0].to_string();
|
|
||||||
let rest = parts[1];
|
|
||||||
|
|
||||||
// Split from the right once: "...:<symbol_name>"
|
|
||||||
let sub_parts: Vec<&str> = rest.rsplitn(2, ':').collect();
|
|
||||||
if sub_parts.len() != 2 {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let symbol_name = sub_parts[0].to_string();
|
|
||||||
let module_path = sub_parts[1].to_string();
|
|
||||||
|
|
||||||
imports.push(ImportMetadata {
|
|
||||||
key: ImportKey {
|
|
||||||
dep_alias,
|
|
||||||
module_path,
|
|
||||||
symbol_name,
|
|
||||||
},
|
|
||||||
relocation_pcs: pcs,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(CompiledModule {
|
|
||||||
project_id: step.project_id,
|
|
||||||
project_key: step.project_key,
|
|
||||||
target: step.target,
|
|
||||||
exports,
|
|
||||||
imports,
|
|
||||||
const_pool: fragments.const_pool,
|
|
||||||
code: fragments.code,
|
|
||||||
function_metas: fixed_function_metas,
|
|
||||||
debug_info,
|
|
||||||
symbols: project_symbols,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::frontends::pbs::adapter::PbsFrontendAdapter;
|
|
||||||
use language_api::types::{ExportItem, ItemName};
|
|
||||||
use std::fs;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_compile_root_only_project() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().to_path_buf();
|
|
||||||
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
|
|
||||||
// NOTE: ajuste de sintaxe: seu PBS entrypoint atual é `fn frame(): void` dentro de main.pbs
|
|
||||||
// e "mod fn frame" pode não ser válido. Mantive o essencial.
|
|
||||||
let main_code = r#"
|
|
||||||
pub declare struct Vec2(x: int, y: int)
|
|
||||||
|
|
||||||
fn add(a: int, b: int): int {
|
|
||||||
return a + b;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn frame(): void {
|
|
||||||
let x = add(1, 2);
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
|
|
||||||
fs::write(project_dir.join("src/main/modules/main.pbs"), main_code).unwrap();
|
|
||||||
|
|
||||||
let project_key = ProjectKey {
|
|
||||||
name: "root".to_string(),
|
|
||||||
version: "0.1.0".to_string(),
|
|
||||||
};
|
|
||||||
let project_id = ProjectId(0);
|
|
||||||
|
|
||||||
let step = BuildStep {
|
|
||||||
project_id,
|
|
||||||
project_key: project_key.clone(),
|
|
||||||
project_dir: project_dir.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
sources: vec![PathBuf::from("src/main/modules/main.pbs")],
|
|
||||||
deps: BTreeMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut file_manager = FileManager::new();
|
|
||||||
let fe = PbsFrontendAdapter;
|
|
||||||
let compiled =
|
|
||||||
compile_project(step, &HashMap::new(), &fe, &mut file_manager).expect("Failed to compile project");
|
|
||||||
|
|
||||||
assert_eq!(compiled.project_id, project_id);
|
|
||||||
assert_eq!(compiled.target, BuildTarget::Main);
|
|
||||||
|
|
||||||
// Vec2 should be exported (canonical)
|
|
||||||
let vec2_key = ExportKey {
|
|
||||||
module_path: "".to_string(),
|
|
||||||
item: ExportItem::Type { name: ItemName::new("Vec2").unwrap() },
|
|
||||||
};
|
|
||||||
assert!(compiled.exports.contains_key(&vec2_key));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_service_method_export_qualified() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().to_path_buf();
|
|
||||||
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
|
|
||||||
let main_code = r#"
|
|
||||||
pub service Log {
|
|
||||||
fn debug(msg: string): void {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
|
|
||||||
fs::write(project_dir.join("src/main/modules/main.pbs"), main_code).unwrap();
|
|
||||||
|
|
||||||
let project_key = ProjectKey {
|
|
||||||
name: "root".to_string(),
|
|
||||||
version: "0.1.0".to_string(),
|
|
||||||
};
|
|
||||||
let project_id = ProjectId(0);
|
|
||||||
|
|
||||||
let step = BuildStep {
|
|
||||||
project_id,
|
|
||||||
project_key: project_key.clone(),
|
|
||||||
project_dir: project_dir.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
sources: vec![PathBuf::from("src/main/modules/main.pbs")],
|
|
||||||
deps: BTreeMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut file_manager = FileManager::new();
|
|
||||||
let fe = PbsFrontendAdapter;
|
|
||||||
let compiled = compile_project(step, &HashMap::new(), &fe, &mut file_manager)
|
|
||||||
.expect("Failed to compile project");
|
|
||||||
|
|
||||||
// Find a function export with canonical fn key for method `debug`.
|
|
||||||
// Owner is optional at this stage; canonical owner propagation will be added later.
|
|
||||||
let mut found = false;
|
|
||||||
for (key, _meta) in &compiled.exports {
|
|
||||||
if let ExportItem::Function { fn_key } = &key.item {
|
|
||||||
if fn_key.name.as_str() == "debug" {
|
|
||||||
found = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
assert!(found, "Expected an export with canonical fn key name=debug but not found. Exports: {:?}", compiled.exports.keys().collect::<Vec<_>>());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,249 +0,0 @@
|
|||||||
use crate::deps::resolver::{ProjectKey, ResolvedGraph};
|
|
||||||
use prometeu_analysis::ids::ProjectId;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::{BTreeMap, HashMap};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[serde(rename_all = "lowercase")]
|
|
||||||
pub enum BuildTarget {
|
|
||||||
Main,
|
|
||||||
Test,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct BuildStep {
|
|
||||||
pub project_id: ProjectId,
|
|
||||||
pub project_key: ProjectKey,
|
|
||||||
pub project_dir: PathBuf,
|
|
||||||
pub target: BuildTarget,
|
|
||||||
pub sources: Vec<PathBuf>,
|
|
||||||
pub deps: BTreeMap<String, ProjectId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct BuildPlan {
|
|
||||||
pub steps: Vec<BuildStep>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BuildPlan {
|
|
||||||
pub fn from_graph(graph: &ResolvedGraph, target: BuildTarget) -> Self {
|
|
||||||
let mut steps = Vec::new();
|
|
||||||
let sorted_ids = topological_sort(graph);
|
|
||||||
|
|
||||||
for id in sorted_ids {
|
|
||||||
if let Some(node) = graph.nodes.get(&id) {
|
|
||||||
let sources_list: Vec<PathBuf> = match target {
|
|
||||||
BuildTarget::Main => node.sources.files.clone(),
|
|
||||||
BuildTarget::Test => node.sources.test_files.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Normalize to relative paths and sort lexicographically
|
|
||||||
let mut sources: Vec<PathBuf> = sources_list
|
|
||||||
.into_iter()
|
|
||||||
.map(|p| {
|
|
||||||
p.strip_prefix(&node.path)
|
|
||||||
.map(|rp| rp.to_path_buf())
|
|
||||||
.unwrap_or(p)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
sources.sort();
|
|
||||||
|
|
||||||
let mut deps: BTreeMap<String, ProjectId> = BTreeMap::new();
|
|
||||||
if let Some(edges) = graph.edges.get(&id) {
|
|
||||||
for edge in edges {
|
|
||||||
deps.insert(edge.alias.clone(), edge.to);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
steps.push(BuildStep {
|
|
||||||
project_id: id,
|
|
||||||
project_key: node.key.clone(),
|
|
||||||
project_dir: node.path.clone(),
|
|
||||||
target,
|
|
||||||
sources,
|
|
||||||
deps,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Self { steps }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn topological_sort(graph: &ResolvedGraph) -> Vec<ProjectId> {
|
|
||||||
let mut in_degree = HashMap::new();
|
|
||||||
let mut adj = HashMap::new();
|
|
||||||
|
|
||||||
for id in graph.nodes.keys() {
|
|
||||||
in_degree.insert(id.clone(), 0);
|
|
||||||
adj.insert(id.clone(), Vec::new());
|
|
||||||
}
|
|
||||||
|
|
||||||
for (from, edges) in &graph.edges {
|
|
||||||
for edge in edges {
|
|
||||||
// from depends on edge.to
|
|
||||||
// so edge.to must be built BEFORE from
|
|
||||||
// edge.to -> from
|
|
||||||
adj.get_mut(&edge.to).unwrap().push(from.clone());
|
|
||||||
*in_degree.get_mut(from).unwrap() += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut ready: std::collections::BinaryHeap<ReverseProjectId> = graph.nodes.keys()
|
|
||||||
.filter(|id| *in_degree.get(id).unwrap() == 0)
|
|
||||||
.map(|id| ReverseProjectId(*id))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut result = Vec::new();
|
|
||||||
while let Some(ReverseProjectId(u)) = ready.pop() {
|
|
||||||
result.push(u);
|
|
||||||
|
|
||||||
if let Some(neighbors) = adj.get(&u) {
|
|
||||||
for v in neighbors {
|
|
||||||
let degree = in_degree.get_mut(v).unwrap();
|
|
||||||
*degree -= 1;
|
|
||||||
if *degree == 0 {
|
|
||||||
ready.push(ReverseProjectId(*v));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Eq, PartialEq, Copy, Clone)]
|
|
||||||
struct ReverseProjectId(ProjectId);
|
|
||||||
|
|
||||||
impl Ord for ReverseProjectId {
|
|
||||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
|
||||||
// BinaryHeap is a max-heap. We want min-heap with stable numeric order.
|
|
||||||
// So we reverse the comparison on the numeric id.
|
|
||||||
other.0.as_u32().cmp(&self.0.as_u32())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialOrd for ReverseProjectId {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
|
||||||
Some(self.cmp(other))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::deps::resolver::{ProjectKey, ResolvedEdge, ResolvedGraph, ResolvedNode};
|
|
||||||
use crate::manifest::Manifest;
|
|
||||||
use crate::sources::ProjectSources;
|
|
||||||
use std::collections::BTreeMap;
|
|
||||||
|
|
||||||
fn mock_node(id: ProjectId, name: &str, version: &str) -> (ProjectId, ResolvedNode) {
|
|
||||||
let node = ResolvedNode {
|
|
||||||
id,
|
|
||||||
key: ProjectKey { name: name.to_string(), version: version.to_string() },
|
|
||||||
path: PathBuf::from(format!("/{}", name)),
|
|
||||||
manifest: Manifest {
|
|
||||||
name: name.to_string(),
|
|
||||||
version: version.to_string(),
|
|
||||||
kind: crate::manifest::ManifestKind::Lib,
|
|
||||||
dependencies: BTreeMap::new(),
|
|
||||||
},
|
|
||||||
sources: ProjectSources {
|
|
||||||
main: None,
|
|
||||||
files: vec![PathBuf::from("b.pbs"), PathBuf::from("a.pbs")],
|
|
||||||
test_files: vec![PathBuf::from("test_b.pbs"), PathBuf::from("test_a.pbs")],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
(id, node)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_topo_sort_stability() {
|
|
||||||
let mut graph = ResolvedGraph::default();
|
|
||||||
|
|
||||||
let (a_id, a) = mock_node(ProjectId(0), "a", "1.0.0");
|
|
||||||
let (b_id, b) = mock_node(ProjectId(1), "b", "1.0.0");
|
|
||||||
let (c_id, c) = mock_node(ProjectId(2), "c", "1.0.0");
|
|
||||||
|
|
||||||
graph.nodes.insert(a_id, a);
|
|
||||||
graph.nodes.insert(b_id, b);
|
|
||||||
graph.nodes.insert(c_id, c);
|
|
||||||
|
|
||||||
// No edges, order by numeric id: a(0), b(1), c(2)
|
|
||||||
let plan = BuildPlan::from_graph(&graph, BuildTarget::Main);
|
|
||||||
assert_eq!(plan.steps[0].project_key.name, "a");
|
|
||||||
assert_eq!(plan.steps[1].project_key.name, "b");
|
|
||||||
assert_eq!(plan.steps[2].project_key.name, "c");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_topo_sort_dependencies() {
|
|
||||||
let mut graph = ResolvedGraph::default();
|
|
||||||
|
|
||||||
let (a_id, a) = mock_node(ProjectId(0), "a", "1.0.0");
|
|
||||||
let (b_id, b) = mock_node(ProjectId(1), "b", "1.0.0");
|
|
||||||
let (c_id, c) = mock_node(ProjectId(2), "c", "1.0.0");
|
|
||||||
|
|
||||||
graph.nodes.insert(a_id, a.clone());
|
|
||||||
graph.nodes.insert(b_id, b.clone());
|
|
||||||
graph.nodes.insert(c_id, c.clone());
|
|
||||||
|
|
||||||
// c depends on b, b depends on a
|
|
||||||
// Sort should be: a, b, c
|
|
||||||
graph.edges.insert(c_id, vec![ResolvedEdge { alias: "b_alias".to_string(), to: b_id }]);
|
|
||||||
graph.edges.insert(b_id, vec![ResolvedEdge { alias: "a_alias".to_string(), to: a_id }]);
|
|
||||||
|
|
||||||
let plan = BuildPlan::from_graph(&graph, BuildTarget::Main);
|
|
||||||
assert_eq!(plan.steps.len(), 3);
|
|
||||||
assert_eq!(plan.steps[0].project_key.name, "a");
|
|
||||||
assert_eq!(plan.steps[1].project_key.name, "b");
|
|
||||||
assert_eq!(plan.steps[2].project_key.name, "c");
|
|
||||||
|
|
||||||
assert_eq!(plan.steps[2].deps.get("b_alias").copied(), Some(b_id));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_topo_sort_complex() {
|
|
||||||
let mut graph = ResolvedGraph::default();
|
|
||||||
|
|
||||||
// d -> b, c
|
|
||||||
// b -> a
|
|
||||||
// c -> a
|
|
||||||
// a
|
|
||||||
|
|
||||||
let (a_id, a) = mock_node(ProjectId(0), "a", "1.0.0");
|
|
||||||
let (b_id, b) = mock_node(ProjectId(1), "b", "1.0.0");
|
|
||||||
let (c_id, c) = mock_node(ProjectId(2), "c", "1.0.0");
|
|
||||||
let (d_id, d) = mock_node(ProjectId(3), "d", "1.0.0");
|
|
||||||
|
|
||||||
graph.nodes.insert(a_id, a.clone());
|
|
||||||
graph.nodes.insert(b_id, b.clone());
|
|
||||||
graph.nodes.insert(c_id, c.clone());
|
|
||||||
graph.nodes.insert(d_id, d.clone());
|
|
||||||
|
|
||||||
graph.edges.insert(d_id, vec![
|
|
||||||
ResolvedEdge { alias: "b".to_string(), to: b_id },
|
|
||||||
ResolvedEdge { alias: "c".to_string(), to: c_id },
|
|
||||||
]);
|
|
||||||
graph.edges.insert(b_id, vec![ResolvedEdge { alias: "a".to_string(), to: a_id }]);
|
|
||||||
graph.edges.insert(c_id, vec![ResolvedEdge { alias: "a".to_string(), to: a_id }]);
|
|
||||||
|
|
||||||
let plan = BuildPlan::from_graph(&graph, BuildTarget::Main);
|
|
||||||
let names: Vec<_> = plan.steps.iter().map(|s| s.project_key.name.as_str()).collect();
|
|
||||||
assert_eq!(names, vec!["a", "b", "c", "d"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_sources_sorting() {
|
|
||||||
let mut graph = ResolvedGraph::default();
|
|
||||||
let (a_id, a) = mock_node(ProjectId(0), "a", "1.0.0");
|
|
||||||
graph.nodes.insert(a_id, a);
|
|
||||||
|
|
||||||
let plan = BuildPlan::from_graph(&graph, BuildTarget::Main);
|
|
||||||
assert_eq!(plan.steps[0].sources, vec![PathBuf::from("a.pbs"), PathBuf::from("b.pbs")]);
|
|
||||||
|
|
||||||
let plan_test = BuildPlan::from_graph(&graph, BuildTarget::Test);
|
|
||||||
assert_eq!(plan_test.steps[0].sources, vec![PathBuf::from("test_a.pbs"), PathBuf::from("test_b.pbs")]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,55 +0,0 @@
|
|||||||
use crate::manifest::Manifest;
|
|
||||||
use anyhow::Result;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
|
||||||
pub struct ProjectConfig {
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub manifest: Manifest,
|
|
||||||
pub script_fe: String,
|
|
||||||
pub entry: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ProjectConfig {
|
|
||||||
pub fn load(project_dir: &Path) -> Result<Self> {
|
|
||||||
let config_path = project_dir.join("prometeu.json");
|
|
||||||
let content = std::fs::read_to_string(&config_path)?;
|
|
||||||
let config: ProjectConfig = serde_json::from_str(&content)
|
|
||||||
.map_err(|e| anyhow::anyhow!("JSON error in {:?}: {}", config_path, e))?;
|
|
||||||
|
|
||||||
// Use manifest validation
|
|
||||||
crate::manifest::load_manifest(project_dir)
|
|
||||||
.map_err(|e| anyhow::anyhow!("{}", e))?;
|
|
||||||
|
|
||||||
Ok(config)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use std::fs;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_load_valid_config() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let config_path = dir.path().join("prometeu.json");
|
|
||||||
fs::write(
|
|
||||||
config_path,
|
|
||||||
r#"{
|
|
||||||
"name": "test_project",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"script_fe": "pbs",
|
|
||||||
"entry": "main.pbs"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let config = ProjectConfig::load(dir.path()).unwrap();
|
|
||||||
assert_eq!(config.manifest.name, "test_project");
|
|
||||||
assert_eq!(config.script_fe, "pbs");
|
|
||||||
assert_eq!(config.entry, PathBuf::from("main.pbs"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,155 +0,0 @@
|
|||||||
use crate::common::files::FileManager;
|
|
||||||
use crate::common::spans::{FileId, Span};
|
|
||||||
use serde::{Serialize, Serializer};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub enum Severity {
|
|
||||||
Error,
|
|
||||||
Warning,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for Severity {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: Serializer,
|
|
||||||
{
|
|
||||||
match self {
|
|
||||||
Severity::Error => serializer.serialize_str("error"),
|
|
||||||
Severity::Warning => serializer.serialize_str("warning"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize)]
|
|
||||||
pub struct Diagnostic {
|
|
||||||
pub severity: Severity,
|
|
||||||
pub code: String,
|
|
||||||
pub message: String,
|
|
||||||
pub span: Span,
|
|
||||||
pub related: Vec<(String, Span)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize)]
|
|
||||||
pub struct DiagnosticBundle {
|
|
||||||
pub diagnostics: Vec<Diagnostic>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DiagnosticBundle {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
diagnostics: Vec::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn push(&mut self, diagnostic: Diagnostic) {
|
|
||||||
self.diagnostics.push(diagnostic);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn error(code: &str, message: String, span: Span) -> Self {
|
|
||||||
let mut bundle = Self::new();
|
|
||||||
bundle.push(Diagnostic {
|
|
||||||
severity: Severity::Error,
|
|
||||||
code: code.to_string(),
|
|
||||||
message,
|
|
||||||
span,
|
|
||||||
related: Vec::new(),
|
|
||||||
});
|
|
||||||
bundle
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn has_errors(&self) -> bool {
|
|
||||||
self.diagnostics
|
|
||||||
.iter()
|
|
||||||
.any(|d| matches!(d.severity, Severity::Error))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Serializes the diagnostic bundle to canonical JSON, resolving file IDs via FileManager.
|
|
||||||
/// The output is deterministic: diagnostics are sorted by (file_id, start, end, code).
|
|
||||||
pub fn to_json(&self, file_manager: &FileManager) -> String {
|
|
||||||
#[derive(Serialize)]
|
|
||||||
struct CanonicalSpan {
|
|
||||||
file: String,
|
|
||||||
start: u32,
|
|
||||||
end: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
struct CanonicalDiag {
|
|
||||||
severity: Severity,
|
|
||||||
code: String,
|
|
||||||
message: String,
|
|
||||||
span: CanonicalSpan,
|
|
||||||
related: Vec<(String, CanonicalSpan)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut diags = self.diagnostics.clone();
|
|
||||||
diags.sort_by(|a, b| {
|
|
||||||
(
|
|
||||||
a.span.file.as_usize(),
|
|
||||||
a.span.start,
|
|
||||||
a.span.end,
|
|
||||||
&a.code,
|
|
||||||
)
|
|
||||||
.cmp(&(b.span.file.as_usize(), b.span.start, b.span.end, &b.code))
|
|
||||||
});
|
|
||||||
|
|
||||||
let canonical_diags: Vec<CanonicalDiag> = diags
|
|
||||||
.iter()
|
|
||||||
.map(|d| {
|
|
||||||
let s = &d.span;
|
|
||||||
let file = if s.file == FileId::INVALID {
|
|
||||||
"<virtual>".to_string()
|
|
||||||
} else {
|
|
||||||
file_manager
|
|
||||||
.get_path(s.file.as_usize())
|
|
||||||
.and_then(|p| p.file_name().map(|n| n.to_string_lossy().to_string()))
|
|
||||||
.unwrap_or_else(|| format!("file_{}", s.file.as_usize()))
|
|
||||||
};
|
|
||||||
let canonical_span = CanonicalSpan {
|
|
||||||
file,
|
|
||||||
start: s.start,
|
|
||||||
end: s.end,
|
|
||||||
};
|
|
||||||
|
|
||||||
let related = d
|
|
||||||
.related
|
|
||||||
.iter()
|
|
||||||
.map(|(msg, sp)| {
|
|
||||||
let file = if sp.file == FileId::INVALID {
|
|
||||||
"<virtual>".to_string()
|
|
||||||
} else {
|
|
||||||
file_manager
|
|
||||||
.get_path(sp.file.as_usize())
|
|
||||||
.and_then(|p| p.file_name().map(|n| n.to_string_lossy().to_string()))
|
|
||||||
.unwrap_or_else(|| format!("file_{}", sp.file.as_usize()))
|
|
||||||
};
|
|
||||||
let rsp = CanonicalSpan {
|
|
||||||
file,
|
|
||||||
start: sp.start,
|
|
||||||
end: sp.end,
|
|
||||||
};
|
|
||||||
(msg.clone(), rsp)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
CanonicalDiag {
|
|
||||||
severity: d.severity.clone(),
|
|
||||||
code: d.code.clone(),
|
|
||||||
message: d.message.clone(),
|
|
||||||
span: canonical_span,
|
|
||||||
related,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
serde_json::to_string_pretty(&canonical_diags).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Diagnostic> for DiagnosticBundle {
|
|
||||||
fn from(diagnostic: Diagnostic) -> Self {
|
|
||||||
let mut bundle = Self::new();
|
|
||||||
bundle.push(diagnostic);
|
|
||||||
bundle
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,106 +0,0 @@
|
|||||||
use prometeu_analysis::interner::NameInterner;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct SourceFile {
|
|
||||||
pub id: usize,
|
|
||||||
pub path: PathBuf,
|
|
||||||
pub source: Arc<str>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct FileManager {
|
|
||||||
files: Vec<SourceFile>,
|
|
||||||
interner: NameInterner,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileManager {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
files: Vec::new(),
|
|
||||||
interner: NameInterner::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_interner(interner: NameInterner) -> Self {
|
|
||||||
Self {
|
|
||||||
files: Vec::new(),
|
|
||||||
interner,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn interner(&self) -> &NameInterner {
|
|
||||||
&self.interner
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn interner_mut(&mut self) -> &mut NameInterner {
|
|
||||||
&mut self.interner
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add(&mut self, path: PathBuf, source: String) -> usize {
|
|
||||||
let id = self.files.len();
|
|
||||||
self.files.push(SourceFile {
|
|
||||||
id,
|
|
||||||
path,
|
|
||||||
source: Arc::from(source),
|
|
||||||
});
|
|
||||||
id
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_file(&self, id: usize) -> Option<&SourceFile> {
|
|
||||||
self.files.get(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_path(&self, id: usize) -> Option<PathBuf> {
|
|
||||||
self.files.get(id).map(|f| f.path.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lookup_pos(&self, file_id: usize, pos: u32) -> (usize, usize) {
|
|
||||||
let file = if let Some(f) = self.files.get(file_id) {
|
|
||||||
f
|
|
||||||
} else {
|
|
||||||
return (0, 0);
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut line = 1;
|
|
||||||
let mut col = 1;
|
|
||||||
for (i, c) in file.source.char_indices() {
|
|
||||||
if i as u32 == pos {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if c == '\n' {
|
|
||||||
line += 1;
|
|
||||||
col = 1;
|
|
||||||
} else {
|
|
||||||
col += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(line, col)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lookup_pos() {
|
|
||||||
let mut fm = FileManager::new();
|
|
||||||
let source = "line1\nline2\n line3".to_string();
|
|
||||||
let file_id = fm.add(PathBuf::from("test.pbs"), source);
|
|
||||||
|
|
||||||
// "l" in line 1
|
|
||||||
assert_eq!(fm.lookup_pos(file_id, 0), (1, 1));
|
|
||||||
// "e" in line 1
|
|
||||||
assert_eq!(fm.lookup_pos(file_id, 3), (1, 4));
|
|
||||||
// "\n" after line 1
|
|
||||||
assert_eq!(fm.lookup_pos(file_id, 5), (1, 6));
|
|
||||||
// "l" in line 2
|
|
||||||
assert_eq!(fm.lookup_pos(file_id, 6), (2, 1));
|
|
||||||
// first space in line 3
|
|
||||||
assert_eq!(fm.lookup_pos(file_id, 12), (3, 1));
|
|
||||||
// "l" in line 3
|
|
||||||
assert_eq!(fm.lookup_pos(file_id, 14), (3, 3));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,5 +0,0 @@
|
|||||||
pub mod diagnostics;
|
|
||||||
pub mod spans;
|
|
||||||
pub mod files;
|
|
||||||
pub mod symbols;
|
|
||||||
pub mod config;
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
// Canonical Span for the whole workspace
|
|
||||||
pub use prometeu_analysis::span::Span;
|
|
||||||
pub use prometeu_analysis::ids::FileId;
|
|
||||||
@ -1,245 +0,0 @@
|
|||||||
use crate::common::spans::Span;
|
|
||||||
use prometeu_analysis::NameInterner;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct RawSymbol {
|
|
||||||
pub pc: u32,
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct DebugSymbol {
|
|
||||||
pub pc: u32,
|
|
||||||
pub file: String,
|
|
||||||
pub line: usize,
|
|
||||||
pub col: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct Symbol {
|
|
||||||
pub id: String,
|
|
||||||
pub name: String,
|
|
||||||
pub kind: String,
|
|
||||||
pub exported: bool,
|
|
||||||
pub module_path: String,
|
|
||||||
pub decl_span: SpanRange,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct SpanRange {
|
|
||||||
pub file_uri: String,
|
|
||||||
pub start: Pos,
|
|
||||||
pub end: Pos,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct Pos {
|
|
||||||
pub line: u32,
|
|
||||||
pub col: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct ProjectSymbols {
|
|
||||||
pub project: String,
|
|
||||||
pub project_dir: String,
|
|
||||||
pub symbols: Vec<Symbol>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct SymbolsFile {
|
|
||||||
pub schema_version: u32,
|
|
||||||
pub compiler_version: String,
|
|
||||||
pub root_project: String,
|
|
||||||
pub projects: Vec<ProjectSymbols>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type SymbolInfo = Symbol;
|
|
||||||
|
|
||||||
// ========================
|
|
||||||
// analysis.json v0 structs
|
|
||||||
// ========================
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct AnalysisFileTableEntry {
|
|
||||||
pub file_id: u32,
|
|
||||||
pub uri: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct AnalysisNameEntry {
|
|
||||||
pub name_id: u32,
|
|
||||||
pub name: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct AnalysisModuleEntry {
|
|
||||||
pub module_id: u32,
|
|
||||||
pub module_path: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct AnalysisSymbolEntry {
|
|
||||||
pub symbol_id: u32,
|
|
||||||
pub name_id: u32,
|
|
||||||
pub kind: String,
|
|
||||||
pub exported: bool,
|
|
||||||
pub module_id: u32,
|
|
||||||
pub decl_span: SpanRange,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
|
|
||||||
pub struct AnalysisFacts {
|
|
||||||
pub node_type: Vec<serde_json::Value>,
|
|
||||||
pub symbol_type: Vec<serde_json::Value>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct AnalysisFileV0 {
|
|
||||||
pub schema_version: u32,
|
|
||||||
pub compiler_version: String,
|
|
||||||
pub root_project: String,
|
|
||||||
pub file_table: Vec<AnalysisFileTableEntry>,
|
|
||||||
pub name_table: Vec<AnalysisNameEntry>,
|
|
||||||
pub module_table: Vec<AnalysisModuleEntry>,
|
|
||||||
pub symbols: Vec<AnalysisSymbolEntry>,
|
|
||||||
pub refs: Vec<serde_json::Value>,
|
|
||||||
pub types: Vec<serde_json::Value>,
|
|
||||||
pub facts: AnalysisFacts,
|
|
||||||
pub diagnostics: Vec<serde_json::Value>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn collect_symbols(
|
|
||||||
project_id: &str,
|
|
||||||
module_symbols: &HashMap<String, crate::frontends::pbs::symbols::ModuleSymbols>,
|
|
||||||
file_manager: &crate::common::files::FileManager,
|
|
||||||
interner: &NameInterner,
|
|
||||||
) -> Vec<Symbol> {
|
|
||||||
let mut result = Vec::new();
|
|
||||||
|
|
||||||
for (module_path, ms) in module_symbols {
|
|
||||||
// Collect from type_symbols
|
|
||||||
for list in ms.type_symbols.symbols.values() {
|
|
||||||
for sym in list {
|
|
||||||
if let Some(s) = convert_symbol(project_id, module_path, sym, file_manager, interner) {
|
|
||||||
result.push(s);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Collect from value_symbols
|
|
||||||
for list in ms.value_symbols.symbols.values() {
|
|
||||||
for sym in list {
|
|
||||||
if let Some(s) = convert_symbol(project_id, module_path, sym, file_manager, interner) {
|
|
||||||
result.push(s);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Deterministic ordering: by file, then start pos, then name
|
|
||||||
result.sort_by(|a, b| {
|
|
||||||
a.decl_span.file_uri.cmp(&b.decl_span.file_uri)
|
|
||||||
.then(a.decl_span.start.line.cmp(&b.decl_span.start.line))
|
|
||||||
.then(a.decl_span.start.col.cmp(&b.decl_span.start.col))
|
|
||||||
.then(a.name.cmp(&b.name))
|
|
||||||
});
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
fn convert_symbol(
|
|
||||||
project_id: &str,
|
|
||||||
module_path: &str,
|
|
||||||
sym: &crate::frontends::pbs::symbols::Symbol,
|
|
||||||
file_manager: &crate::common::files::FileManager,
|
|
||||||
interner: &NameInterner,
|
|
||||||
) -> Option<Symbol> {
|
|
||||||
use crate::frontends::pbs::symbols::{SymbolKind, Visibility};
|
|
||||||
|
|
||||||
let kind = match sym.kind {
|
|
||||||
SymbolKind::Service => "service",
|
|
||||||
SymbolKind::Struct | SymbolKind::Contract | SymbolKind::ErrorType => "type",
|
|
||||||
SymbolKind::Function => "function",
|
|
||||||
SymbolKind::Local => return None, // Ignore locals for v0
|
|
||||||
};
|
|
||||||
|
|
||||||
let exported = sym.visibility == Visibility::Pub;
|
|
||||||
|
|
||||||
// According to v0 policy, only service and declare are exported.
|
|
||||||
// Functions are NOT exportable yet.
|
|
||||||
if exported && sym.kind == SymbolKind::Function {
|
|
||||||
// This should have been caught by semantic analysis, but we enforce it here too
|
|
||||||
// for the symbols.json output.
|
|
||||||
// Actually, we'll just mark it exported=false if it's a function.
|
|
||||||
}
|
|
||||||
|
|
||||||
let span = sym.span.clone();
|
|
||||||
let file_path = file_manager.get_path(span.file.as_usize())
|
|
||||||
.map(|p| p.to_string_lossy().to_string())
|
|
||||||
.unwrap_or_else(|| format!("unknown_file_{}", span.file.as_usize()));
|
|
||||||
|
|
||||||
// Convert 1-based to 0-based
|
|
||||||
let (s_line, s_col) = file_manager.lookup_pos(span.file.as_usize(), span.start);
|
|
||||||
let (e_line, e_col) = file_manager.lookup_pos(span.file.as_usize(), span.end);
|
|
||||||
|
|
||||||
let decl_span = SpanRange {
|
|
||||||
file_uri: file_path,
|
|
||||||
start: Pos { line: (s_line - 1) as u32, col: (s_col - 1) as u32 },
|
|
||||||
end: Pos { line: (e_line - 1) as u32, col: (e_col - 1) as u32 },
|
|
||||||
};
|
|
||||||
|
|
||||||
let hash = decl_span.compute_hash();
|
|
||||||
let name = interner.resolve(sym.name).to_string();
|
|
||||||
let id = format!("{}:{}:{}:{}:{:016x}", project_id, kind, module_path, name, hash);
|
|
||||||
|
|
||||||
Some(Symbol {
|
|
||||||
id,
|
|
||||||
name,
|
|
||||||
kind: kind.to_string(),
|
|
||||||
exported,
|
|
||||||
module_path: module_path.to_string(),
|
|
||||||
decl_span,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SpanRange {
|
|
||||||
pub fn compute_hash(&self) -> u64 {
|
|
||||||
let mut h = 0xcbf29ce484222325u64;
|
|
||||||
let mut update = |bytes: &[u8]| {
|
|
||||||
for b in bytes {
|
|
||||||
h ^= *b as u64;
|
|
||||||
h = h.wrapping_mul(0x100000001b3u64);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
update(self.file_uri.as_bytes());
|
|
||||||
update(&self.start.line.to_le_bytes());
|
|
||||||
update(&self.start.col.to_le_bytes());
|
|
||||||
update(&self.end.line.to_le_bytes());
|
|
||||||
update(&self.end.col.to_le_bytes());
|
|
||||||
h
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_symbol_id_is_stable() {
|
|
||||||
let span = SpanRange {
|
|
||||||
file_uri: "main.pbs".to_string(),
|
|
||||||
start: Pos { line: 10, col: 5 },
|
|
||||||
end: Pos { line: 10, col: 20 },
|
|
||||||
};
|
|
||||||
|
|
||||||
let hash1 = span.compute_hash();
|
|
||||||
let hash2 = span.compute_hash();
|
|
||||||
|
|
||||||
assert_eq!(hash1, hash2);
|
|
||||||
// Hash constant may change if algorithm or field names change; we only
|
|
||||||
// assert determinism here.
|
|
||||||
assert!(hash1 != 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,521 +0,0 @@
|
|||||||
//! # Compiler Orchestration
|
|
||||||
//!
|
|
||||||
//! This module provides the high-level API for triggering the compilation process.
|
|
||||||
//! It handles the transition between different compiler phases: Frontend -> IR -> Backend.
|
|
||||||
|
|
||||||
use crate::backend;
|
|
||||||
use crate::common::config::ProjectConfig;
|
|
||||||
use crate::common::files::FileManager;
|
|
||||||
use crate::common::spans::{FileId, Span};
|
|
||||||
use crate::common::symbols::{DebugSymbol, ProjectSymbols, RawSymbol, SymbolsFile};
|
|
||||||
use anyhow::Result;
|
|
||||||
use prometeu_bytecode::BytecodeModule;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
/// The result of a successful compilation process.
|
|
||||||
/// It contains the final binary and the metadata needed for debugging.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct CompilationUnit {
|
|
||||||
/// The raw binary data formatted as Prometeu ByteCode (PBC).
|
|
||||||
/// This is what gets written to a `.pbc` file.
|
|
||||||
pub rom: Vec<u8>,
|
|
||||||
|
|
||||||
/// The list of debug symbols discovered during compilation.
|
|
||||||
/// These are used to map bytecode offsets back to source code locations.
|
|
||||||
pub raw_symbols: Vec<RawSymbol>,
|
|
||||||
|
|
||||||
/// The file manager containing all source files used during compilation.
|
|
||||||
pub file_manager: FileManager,
|
|
||||||
|
|
||||||
/// The high-level project symbols for LSP and other tools.
|
|
||||||
pub project_symbols: Vec<ProjectSymbols>,
|
|
||||||
|
|
||||||
/// The name of the root project.
|
|
||||||
pub root_project: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CompilationUnit {
|
|
||||||
/// Writes the compilation results (PBC binary, disassembly, and symbols) to the disk.
|
|
||||||
///
|
|
||||||
/// # Arguments
|
|
||||||
/// * `out` - The base path for the output `.pbc` file.
|
|
||||||
/// * `emit_disasm` - If true, a `.disasm` file will be created next to the output.
|
|
||||||
/// * `emit_symbols` - If true, a `.json` symbols file will be created next to the output.
|
|
||||||
pub fn export(&self, out: &Path, emit_disasm: bool, emit_symbols: bool) -> Result<()> {
|
|
||||||
let mut debug_symbols = Vec::new();
|
|
||||||
for raw in &self.raw_symbols {
|
|
||||||
let path = self.file_manager.get_path(raw.span.file.as_usize())
|
|
||||||
.map(|p| p.to_string_lossy().to_string())
|
|
||||||
.unwrap_or_else(|| format!("file_{}", raw.span.file.as_usize()));
|
|
||||||
|
|
||||||
let (line, col) = self.file_manager.lookup_pos(raw.span.file.as_usize(), raw.span.start);
|
|
||||||
|
|
||||||
debug_symbols.push(DebugSymbol {
|
|
||||||
pc: raw.pc,
|
|
||||||
file: path,
|
|
||||||
line,
|
|
||||||
col,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let lsp_symbols = SymbolsFile {
|
|
||||||
schema_version: 1,
|
|
||||||
compiler_version: "0.1.0".to_string(), // TODO: use crate version
|
|
||||||
root_project: self.root_project.clone(),
|
|
||||||
projects: self.project_symbols.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let artifacts = backend::artifacts::Artifacts::new(
|
|
||||||
self.rom.clone(),
|
|
||||||
debug_symbols,
|
|
||||||
lsp_symbols,
|
|
||||||
);
|
|
||||||
artifacts.export(out, emit_disasm, emit_symbols)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
pub fn compile(project_dir: &Path) -> Result<CompilationUnit> {
|
|
||||||
compile_ext(project_dir, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn compile_ext(project_dir: &Path, explain_deps: bool) -> Result<CompilationUnit> {
|
|
||||||
let config = ProjectConfig::load(project_dir)?;
|
|
||||||
|
|
||||||
if config.script_fe == "pbs" {
|
|
||||||
let graph_res = crate::deps::resolver::resolve_graph(project_dir);
|
|
||||||
|
|
||||||
if explain_deps || graph_res.is_err() {
|
|
||||||
match &graph_res {
|
|
||||||
Ok(graph) => {
|
|
||||||
println!("{}", graph.explain());
|
|
||||||
}
|
|
||||||
Err(crate::deps::resolver::ResolveError::WithTrace { trace, source }) => {
|
|
||||||
// Create a dummy graph to use its explain logic for the trace
|
|
||||||
let mut dummy_graph = crate::deps::resolver::ResolvedGraph::default();
|
|
||||||
dummy_graph.trace = trace.clone();
|
|
||||||
println!("{}", dummy_graph.explain());
|
|
||||||
eprintln!("Dependency resolution failed: {}", source);
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("Dependency resolution failed: {}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let graph = graph_res.map_err(|e| anyhow::anyhow!("Dependency resolution failed: {}", e))?;
|
|
||||||
|
|
||||||
// Use PBS Frontend adapter implementing the canonical FE contract
|
|
||||||
let fe = crate::frontends::pbs::adapter::PbsFrontendAdapter;
|
|
||||||
let build_result = crate::building::orchestrator::build_from_graph(&graph, crate::building::plan::BuildTarget::Main, &fe)
|
|
||||||
.map_err(|e| anyhow::anyhow!("Build failed: {}", e))?;
|
|
||||||
|
|
||||||
let module = BytecodeModule::from(build_result.image.clone());
|
|
||||||
let rom = module.serialize();
|
|
||||||
|
|
||||||
let mut raw_symbols = Vec::new();
|
|
||||||
if let Some(debug) = &build_result.image.debug_info {
|
|
||||||
for (pc, span) in &debug.pc_to_span {
|
|
||||||
raw_symbols.push(RawSymbol {
|
|
||||||
pc: *pc,
|
|
||||||
span: Span::new(FileId(span.file_id), span.start, span.end),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(CompilationUnit {
|
|
||||||
rom,
|
|
||||||
raw_symbols,
|
|
||||||
file_manager: build_result.file_manager,
|
|
||||||
project_symbols: build_result.symbols,
|
|
||||||
root_project: config.manifest.name.clone(),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
anyhow::bail!("Invalid frontend: {}", config.script_fe)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::ir_lang;
|
|
||||||
use prometeu_bytecode::disasm::disasm;
|
|
||||||
use prometeu_bytecode::opcode::OpCode;
|
|
||||||
use prometeu_bytecode::BytecodeLoader;
|
|
||||||
use std::fs;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_frontend() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let config_path = dir.path().join("prometeu.json");
|
|
||||||
fs::write(
|
|
||||||
config_path,
|
|
||||||
r#"{
|
|
||||||
"name": "invalid_fe",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"script_fe": "invalid",
|
|
||||||
"entry": "main.pbs"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let result = compile(dir.path());
|
|
||||||
assert!(result.is_err());
|
|
||||||
assert!(result.unwrap_err().to_string().contains("Invalid frontend: invalid"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_compile_hip_program() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path();
|
|
||||||
|
|
||||||
fs::write(
|
|
||||||
project_dir.join("prometeu.json"),
|
|
||||||
r#"{
|
|
||||||
"name": "hip_test",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"script_fe": "pbs",
|
|
||||||
"entry": "src/main/modules/main.pbs"
|
|
||||||
}"#,
|
|
||||||
).unwrap();
|
|
||||||
|
|
||||||
let code = "
|
|
||||||
fn frame(): void {
|
|
||||||
let x = alloc int;
|
|
||||||
mutate x as v {
|
|
||||||
let y = v + 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
";
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
fs::write(project_dir.join("src/main/modules/main.pbs"), code).unwrap();
|
|
||||||
|
|
||||||
let unit = compile(project_dir).expect("Failed to compile");
|
|
||||||
let pbc = BytecodeLoader::load(&unit.rom).expect("Failed to parse PBC");
|
|
||||||
let instrs = disasm(&pbc.code).expect("Failed to disassemble");
|
|
||||||
|
|
||||||
let opcodes: Vec<_> = instrs.iter().map(|i| i.opcode).collect();
|
|
||||||
|
|
||||||
assert!(opcodes.contains(&OpCode::Alloc));
|
|
||||||
assert!(opcodes.contains(&OpCode::GateLoad));
|
|
||||||
// After PR-09, BeginMutate/EndMutate map to their respective opcodes
|
|
||||||
assert!(opcodes.contains(&OpCode::GateBeginMutate));
|
|
||||||
assert!(opcodes.contains(&OpCode::GateEndMutate));
|
|
||||||
assert!(opcodes.contains(&OpCode::Add));
|
|
||||||
assert!(opcodes.contains(&OpCode::Ret));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_golden_bytecode_snapshot() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path();
|
|
||||||
|
|
||||||
fs::write(
|
|
||||||
project_dir.join("prometeu.json"),
|
|
||||||
r#"{
|
|
||||||
"name": "golden_test",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"script_fe": "pbs",
|
|
||||||
"entry": "src/main/modules/main.pbs"
|
|
||||||
}"#,
|
|
||||||
).unwrap();
|
|
||||||
|
|
||||||
let code = r#"
|
|
||||||
declare contract Gfx host {}
|
|
||||||
|
|
||||||
fn helper(val: int): int {
|
|
||||||
return val * 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
Gfx.clear(0);
|
|
||||||
let x = 10;
|
|
||||||
if (x > 5) {
|
|
||||||
let y = helper(x);
|
|
||||||
}
|
|
||||||
|
|
||||||
let buf = alloc int;
|
|
||||||
mutate buf as b {
|
|
||||||
let current = b + 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Entry point required by the compiler
|
|
||||||
fn frame(): void { return; }
|
|
||||||
"#;
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
fs::write(project_dir.join("src/main/modules/main.pbs"), code).unwrap();
|
|
||||||
|
|
||||||
let unit = compile(project_dir).expect("Failed to compile");
|
|
||||||
let pbc = BytecodeLoader::load(&unit.rom).expect("Failed to parse PBC");
|
|
||||||
let instrs = disasm(&pbc.code).expect("Failed to disassemble");
|
|
||||||
|
|
||||||
let mut disasm_text = String::new();
|
|
||||||
for instr in instrs {
|
|
||||||
let operands_str = instr.operands.iter()
|
|
||||||
.map(|o| format!("{:?}", o))
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(" ");
|
|
||||||
let line = if operands_str.is_empty() {
|
|
||||||
format!("{:04X} {:?}\n", instr.pc, instr.opcode)
|
|
||||||
} else {
|
|
||||||
format!("{:04X} {:?} {}\n", instr.pc, instr.opcode, operands_str.trim())
|
|
||||||
};
|
|
||||||
disasm_text.push_str(&line);
|
|
||||||
}
|
|
||||||
|
|
||||||
let expected_disasm = r#"0000 GetLocal U32(0)
|
|
||||||
0006 PushConst U32(1)
|
|
||||||
000C Mul
|
|
||||||
000E Ret
|
|
||||||
0010 PushConst U32(2)
|
|
||||||
0016 Syscall U32(4112)
|
|
||||||
001C PushConst U32(3)
|
|
||||||
0022 SetLocal U32(0)
|
|
||||||
0028 GetLocal U32(0)
|
|
||||||
002E PushConst U32(4)
|
|
||||||
0034 Gt
|
|
||||||
0036 JmpIfFalse U32(74)
|
|
||||||
003C Jmp U32(50)
|
|
||||||
0042 GetLocal U32(0)
|
|
||||||
0048 Call U32(0)
|
|
||||||
004E SetLocal U32(1)
|
|
||||||
0054 Jmp U32(80)
|
|
||||||
005A Jmp U32(80)
|
|
||||||
0060 Alloc U32(2) U32(1)
|
|
||||||
006A SetLocal U32(1)
|
|
||||||
0070 GetLocal U32(1)
|
|
||||||
0076 GateRetain
|
|
||||||
0078 SetLocal U32(2)
|
|
||||||
007E GetLocal U32(2)
|
|
||||||
0084 GateRetain
|
|
||||||
0086 GateBeginMutate
|
|
||||||
0088 GetLocal U32(2)
|
|
||||||
008E GateRetain
|
|
||||||
0090 GateLoad U32(0)
|
|
||||||
0096 SetLocal U32(3)
|
|
||||||
009C GetLocal U32(3)
|
|
||||||
00A2 PushConst U32(5)
|
|
||||||
00A8 Add
|
|
||||||
00AA SetLocal U32(4)
|
|
||||||
00B0 GateEndMutate
|
|
||||||
00B2 GateRelease
|
|
||||||
00B4 GetLocal U32(1)
|
|
||||||
00BA GateRelease
|
|
||||||
00BC GetLocal U32(2)
|
|
||||||
00C2 GateRelease
|
|
||||||
00C4 Ret
|
|
||||||
"#;
|
|
||||||
|
|
||||||
// O código pode conter funções adicionais (ex.: frame). Verificamos que o disasm
|
|
||||||
// começa com o snapshot estável e, adicionalmente, que o entry point contém FRAME_SYNC antes de RET.
|
|
||||||
assert!(disasm_text.starts_with(expected_disasm), "Golden disassembly prefix mismatch. Got:\n{}", disasm_text);
|
|
||||||
assert!(disasm_text.contains("FrameSync\n") || disasm_text.contains("FrameSync"), "Expected to find FrameSync for entry point. Got:\n{}", disasm_text);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_hip_conformance_v0() {
|
|
||||||
use crate::ir_core::*;
|
|
||||||
use crate::lowering::lower_program;
|
|
||||||
use crate::backend;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
// --- 1. SETUP CORE IR FIXTURE ---
|
|
||||||
let mut const_pool = ConstPool::new();
|
|
||||||
let val_42 = const_pool.add_int(42);
|
|
||||||
|
|
||||||
let mut field_offsets = HashMap::new();
|
|
||||||
let f1 = FieldId(0);
|
|
||||||
field_offsets.insert(f1, 0);
|
|
||||||
|
|
||||||
let mut local_types = HashMap::new();
|
|
||||||
local_types.insert(0, Type::Struct("Storage".to_string())); // slot 0: gate handle
|
|
||||||
local_types.insert(1, Type::Int); // slot 1: value 42
|
|
||||||
local_types.insert(2, Type::Int); // slot 2: result of peek
|
|
||||||
|
|
||||||
let program = Program {
|
|
||||||
const_pool,
|
|
||||||
modules: vec![Module {
|
|
||||||
name: "conformance".to_string(),
|
|
||||||
functions: vec![Function {
|
|
||||||
id: FunctionId(1),
|
|
||||||
name: "main".to_string(),
|
|
||||||
sig: {
|
|
||||||
let mut i = global_signature_interner().lock().unwrap();
|
|
||||||
i.intern(Signature { params: vec![], return_type: Type::Void })
|
|
||||||
},
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
params: vec![],
|
|
||||||
return_type: Type::Void,
|
|
||||||
blocks: vec![Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
// 1. allocates a storage struct
|
|
||||||
Instr::from(InstrKind::Alloc { ty: TypeId(1), slots: 2 }),
|
|
||||||
Instr::from(InstrKind::SetLocal(0)),
|
|
||||||
|
|
||||||
// 2. mutates a field (offset 0)
|
|
||||||
Instr::from(InstrKind::BeginMutate { gate: ValueId(0) }),
|
|
||||||
Instr::from(InstrKind::PushConst(val_42)),
|
|
||||||
Instr::from(InstrKind::SetLocal(1)),
|
|
||||||
Instr::from(InstrKind::GateStoreField { gate: ValueId(0), field: f1, value: ValueId(1) }),
|
|
||||||
Instr::from(InstrKind::EndMutate),
|
|
||||||
|
|
||||||
// 3. peeks value (offset 0)
|
|
||||||
Instr::from(InstrKind::BeginPeek { gate: ValueId(0) }),
|
|
||||||
Instr::from(InstrKind::GateLoadField { gate: ValueId(0), field: f1 }),
|
|
||||||
Instr::from(InstrKind::SetLocal(2)),
|
|
||||||
Instr::from(InstrKind::EndPeek),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
}],
|
|
||||||
local_types,
|
|
||||||
}],
|
|
||||||
}],
|
|
||||||
field_offsets,
|
|
||||||
field_types: HashMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// --- 2. LOWER TO VM IR ---
|
|
||||||
let vm_module = lower_program(&program).expect("Lowering failed");
|
|
||||||
|
|
||||||
let func = &vm_module.functions[0];
|
|
||||||
let kinds: Vec<_> = func.body.iter().map(|i| &i.kind).collect();
|
|
||||||
|
|
||||||
// Expected sequence of significant instructions:
|
|
||||||
// Alloc, LocalStore(0), GateBeginMutate, PushConst, LocalStore(1), LocalLoad(0), LocalLoad(1), GateStore(0), GateEndMutate...
|
|
||||||
|
|
||||||
assert!(kinds.iter().any(|k| matches!(k, ir_lang::InstrKind::Alloc { .. })), "Must contain Alloc");
|
|
||||||
assert!(kinds.iter().any(|k| matches!(k, ir_lang::InstrKind::GateBeginMutate)), "Must contain GateBeginMutate");
|
|
||||||
assert!(kinds.iter().any(|k| matches!(k, ir_lang::InstrKind::GateStore { offset: 0 })), "Must contain GateStore(0)");
|
|
||||||
assert!(kinds.iter().any(|k| matches!(k, ir_lang::InstrKind::GateBeginPeek)), "Must contain GateBeginPeek");
|
|
||||||
assert!(kinds.iter().any(|k| matches!(k, ir_lang::InstrKind::GateLoad { offset: 0 })), "Must contain GateLoad(0)");
|
|
||||||
|
|
||||||
// RC assertions:
|
|
||||||
assert!(kinds.contains(&&ir_lang::InstrKind::GateRetain), "Must contain GateRetain (on LocalLoad of gate)");
|
|
||||||
assert!(kinds.contains(&&ir_lang::InstrKind::GateRelease), "Must contain GateRelease (on cleanup or Pop)");
|
|
||||||
|
|
||||||
// --- 4. EMIT BYTECODE ---
|
|
||||||
let emit_result = backend::emit_module(&vm_module).expect("Emission failed");
|
|
||||||
|
|
||||||
let rom = emit_result.rom;
|
|
||||||
|
|
||||||
// --- 5. ASSERT INDUSTRIAL FORMAT ---
|
|
||||||
use prometeu_bytecode::BytecodeLoader;
|
|
||||||
let pbc = BytecodeLoader::load(&rom).expect("Failed to parse industrial PBC");
|
|
||||||
|
|
||||||
assert_eq!(&rom[0..4], b"PBS\0");
|
|
||||||
assert_eq!(pbc.const_pool.len(), 2); // Null, 42
|
|
||||||
|
|
||||||
// ROM Data contains HIP opcodes:
|
|
||||||
let code = pbc.code;
|
|
||||||
assert!(code.iter().any(|&b| b == 0x60), "Bytecode must contain Alloc (0x60)");
|
|
||||||
assert!(code.iter().any(|&b| b == 0x67), "Bytecode must contain GateBeginMutate (0x67)");
|
|
||||||
assert!(code.iter().any(|&b| b == 0x62), "Bytecode must contain GateStore (0x62)");
|
|
||||||
assert!(code.iter().any(|&b| b == 0x63), "Bytecode must contain GateBeginPeek (0x63)");
|
|
||||||
assert!(code.iter().any(|&b| b == 0x61), "Bytecode must contain GateLoad (0x61)");
|
|
||||||
assert!(code.iter().any(|&b| b == 0x69), "Bytecode must contain GateRetain (0x69)");
|
|
||||||
assert!(code.iter().any(|&b| b == 0x6A), "Bytecode must contain GateRelease (0x6A)");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_project_root_and_entry_resolution() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path();
|
|
||||||
|
|
||||||
// Create prometeu.json
|
|
||||||
fs::write(
|
|
||||||
project_dir.join("prometeu.json"),
|
|
||||||
r#"{
|
|
||||||
"name": "resolution_test",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"script_fe": "pbs",
|
|
||||||
"entry": "src/main/modules/main.pbs"
|
|
||||||
}"#,
|
|
||||||
).unwrap();
|
|
||||||
|
|
||||||
// Create src directory and main.pbs (must contain entry point frame(): void)
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
fs::write(
|
|
||||||
project_dir.join("src/main/modules/main.pbs"),
|
|
||||||
"fn frame(): void { return; }",
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Call compile
|
|
||||||
let result = compile(project_dir);
|
|
||||||
|
|
||||||
assert!(result.is_ok(), "Failed to compile: {:?}", result.err());
|
|
||||||
}
|
|
||||||
|
|
||||||
// #[test]
|
|
||||||
// fn test_symbols_emission_integration() {
|
|
||||||
// let dir = tempdir().unwrap();
|
|
||||||
// let project_dir = dir.path();
|
|
||||||
//
|
|
||||||
// fs::write(
|
|
||||||
// project_dir.join("prometeu.json"),
|
|
||||||
// r#"{
|
|
||||||
// "name": "symbols_test",
|
|
||||||
// "version": "0.1.0",
|
|
||||||
// "script_fe": "pbs",
|
|
||||||
// "entry": "src/main/modules/main.pbs"
|
|
||||||
// }"#,
|
|
||||||
// ).unwrap();
|
|
||||||
//
|
|
||||||
// let code = r#"
|
|
||||||
// fn frame(): void {
|
|
||||||
// let x = 10;
|
|
||||||
// }
|
|
||||||
// "#;
|
|
||||||
// fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
// fs::write(project_dir.join("src/main/modules/main.pbs"), code).unwrap();
|
|
||||||
//
|
|
||||||
// let unit = compile(project_dir).expect("Failed to compile");
|
|
||||||
// let out_pbc = project_dir.join("../../../../build/program.pbc");
|
|
||||||
// fs::create_dir_all(out_pbc.parent().unwrap()).unwrap();
|
|
||||||
//
|
|
||||||
// unit.export(&out_pbc, false, true).expect("Failed to export");
|
|
||||||
//
|
|
||||||
// let symbols_path = project_dir.join("../../../../build/symbols.json");
|
|
||||||
// assert!(symbols_path.exists(), "symbols.json should exist at {:?}", symbols_path);
|
|
||||||
//
|
|
||||||
// let symbols_content = fs::read_to_string(symbols_path).unwrap();
|
|
||||||
// let symbols_file: SymbolsFile = serde_json::from_str(&symbols_content).unwrap();
|
|
||||||
//
|
|
||||||
// assert_eq!(symbols_file.schema_version, 1);
|
|
||||||
// assert!(!symbols_file.projects.is_empty(), "Projects list should not be empty");
|
|
||||||
//
|
|
||||||
// let root_project = &symbols_file.projects[0];
|
|
||||||
// assert!(!root_project.symbols.is_empty(), "Symbols list should not be empty");
|
|
||||||
//
|
|
||||||
// // Check for a symbol (v0 schema uses 0-based lines)
|
|
||||||
// let main_sym = root_project.symbols.iter().find(|s| s.name == "frame");
|
|
||||||
// assert!(main_sym.is_some(), "Should find 'frame' symbol");
|
|
||||||
//
|
|
||||||
// let sym = main_sym.unwrap();
|
|
||||||
// assert!(sym.decl_span.file_uri.contains("main.pbs"), "Symbol file should point to main.pbs, got {}", sym.decl_span.file_uri);
|
|
||||||
//
|
|
||||||
// // Check analysis.json exists and has the basic structure
|
|
||||||
// let analysis_path = project_dir.join("build/analysis.json");
|
|
||||||
// assert!(analysis_path.exists(), "analysis.json should exist at {:?}", analysis_path);
|
|
||||||
// let analysis_content = fs::read_to_string(analysis_path).unwrap();
|
|
||||||
// #[derive(serde::Deserialize)]
|
|
||||||
// struct MinimalAnalysisV0 {
|
|
||||||
// schema_version: u32,
|
|
||||||
// file_table: Vec<serde_json::Value>,
|
|
||||||
// name_table: Vec<serde_json::Value>,
|
|
||||||
// module_table: Vec<serde_json::Value>,
|
|
||||||
// symbols: Vec<serde_json::Value>,
|
|
||||||
// }
|
|
||||||
// let analysis: MinimalAnalysisV0 = serde_json::from_str(&analysis_content).unwrap();
|
|
||||||
// assert_eq!(analysis.schema_version, 0);
|
|
||||||
// assert!(!analysis.file_table.is_empty());
|
|
||||||
// assert!(!analysis.name_table.is_empty());
|
|
||||||
// assert!(!analysis.module_table.is_empty());
|
|
||||||
// assert!(!analysis.symbols.is_empty());
|
|
||||||
// }
|
|
||||||
}
|
|
||||||
@ -1,61 +0,0 @@
|
|||||||
use anyhow::Result;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::fs;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct CacheManifest {
|
|
||||||
#[serde(default)]
|
|
||||||
pub git: HashMap<String, GitCacheEntry>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct GitCacheEntry {
|
|
||||||
pub path: PathBuf,
|
|
||||||
pub resolved_ref: String,
|
|
||||||
pub fetched_at: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CacheManifest {
|
|
||||||
pub fn load(cache_dir: &Path) -> Result<Self> {
|
|
||||||
let manifest_path = cache_dir.join("cache.json");
|
|
||||||
if !manifest_path.exists() {
|
|
||||||
return Ok(Self {
|
|
||||||
git: HashMap::new(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
let content = fs::read_to_string(&manifest_path)?;
|
|
||||||
let manifest = serde_json::from_str(&content)?;
|
|
||||||
Ok(manifest)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn save(&self, cache_dir: &Path) -> Result<()> {
|
|
||||||
if !cache_dir.exists() {
|
|
||||||
fs::create_dir_all(cache_dir)?;
|
|
||||||
}
|
|
||||||
let manifest_path = cache_dir.join("cache.json");
|
|
||||||
let content = serde_json::to_string_pretty(self)?;
|
|
||||||
fs::write(manifest_path, content)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_cache_root(project_root: &Path) -> PathBuf {
|
|
||||||
project_root.join("cache")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_git_worktree_path(project_root: &Path, repo_url: &str) -> PathBuf {
|
|
||||||
let cache_root = get_cache_root(project_root);
|
|
||||||
let id = normalized_repo_id(repo_url);
|
|
||||||
cache_root.join("git").join(id).join("worktree")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn normalized_repo_id(url: &str) -> String {
|
|
||||||
let mut hash = 0xcbf29ce484222325;
|
|
||||||
for b in url.as_bytes() {
|
|
||||||
hash ^= *b as u64;
|
|
||||||
hash = hash.wrapping_mul(0x100000001b3);
|
|
||||||
}
|
|
||||||
format!("{:016x}", hash)
|
|
||||||
}
|
|
||||||
@ -1,192 +0,0 @@
|
|||||||
use crate::deps::cache::{get_cache_root, get_git_worktree_path, CacheManifest, GitCacheEntry};
|
|
||||||
use crate::manifest::DependencySpec;
|
|
||||||
use std::fs;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::process::Command;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum FetchError {
|
|
||||||
Io(std::io::Error),
|
|
||||||
CloneFailed {
|
|
||||||
url: String,
|
|
||||||
stderr: String,
|
|
||||||
},
|
|
||||||
MissingManifest(PathBuf),
|
|
||||||
InvalidPath(PathBuf),
|
|
||||||
CacheError(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for FetchError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
FetchError::Io(e) => write!(f, "IO error: {}", e),
|
|
||||||
FetchError::CloneFailed { url, stderr } => {
|
|
||||||
write!(f, "Failed to clone git repository from '{}': {}", url, stderr)
|
|
||||||
}
|
|
||||||
FetchError::MissingManifest(path) => {
|
|
||||||
write!(f, "Missing 'prometeu.json' in fetched project at {}", path.display())
|
|
||||||
}
|
|
||||||
FetchError::InvalidPath(path) => {
|
|
||||||
write!(f, "Invalid dependency path: {}", path.display())
|
|
||||||
}
|
|
||||||
FetchError::CacheError(msg) => write!(f, "Cache error: {}", msg),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for FetchError {}
|
|
||||||
|
|
||||||
impl From<std::io::Error> for FetchError {
|
|
||||||
fn from(e: std::io::Error) -> Self {
|
|
||||||
FetchError::Io(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetches a dependency based on its specification.
|
|
||||||
pub fn fetch_dependency(
|
|
||||||
alias: &str,
|
|
||||||
spec: &DependencySpec,
|
|
||||||
base_dir: &Path,
|
|
||||||
root_project_dir: &Path,
|
|
||||||
) -> Result<PathBuf, FetchError> {
|
|
||||||
match spec {
|
|
||||||
DependencySpec::Path(p) => fetch_path(p, base_dir),
|
|
||||||
DependencySpec::Full(full) => {
|
|
||||||
if let Some(p) = &full.path {
|
|
||||||
fetch_path(p, base_dir)
|
|
||||||
} else if let Some(url) = &full.git {
|
|
||||||
let version = full.version.as_deref().unwrap_or("latest");
|
|
||||||
fetch_git(url, version, root_project_dir)
|
|
||||||
} else {
|
|
||||||
Err(FetchError::InvalidPath(PathBuf::from(alias)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn fetch_path(path_str: &str, base_dir: &Path) -> Result<PathBuf, FetchError> {
|
|
||||||
let path = base_dir.join(path_str);
|
|
||||||
if !path.exists() {
|
|
||||||
return Err(FetchError::InvalidPath(path));
|
|
||||||
}
|
|
||||||
|
|
||||||
let canonical = path.canonicalize()?;
|
|
||||||
if !canonical.join("prometeu.json").exists() {
|
|
||||||
return Err(FetchError::MissingManifest(canonical));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(canonical)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn fetch_git(url: &str, version: &str, root_project_dir: &Path) -> Result<PathBuf, FetchError> {
|
|
||||||
let cache_root = get_cache_root(root_project_dir);
|
|
||||||
let mut manifest = CacheManifest::load(&cache_root).map_err(|e| FetchError::CacheError(e.to_string()))?;
|
|
||||||
|
|
||||||
let target_dir = get_git_worktree_path(root_project_dir, url);
|
|
||||||
|
|
||||||
if !target_dir.exists() {
|
|
||||||
fs::create_dir_all(&target_dir)?;
|
|
||||||
|
|
||||||
let output = Command::new("git")
|
|
||||||
.arg("clone")
|
|
||||||
.arg(url)
|
|
||||||
.arg(".")
|
|
||||||
.current_dir(&target_dir)
|
|
||||||
.output()?;
|
|
||||||
|
|
||||||
if !output.status.success() {
|
|
||||||
// Cleanup on failure
|
|
||||||
let _ = fs::remove_dir_all(&target_dir);
|
|
||||||
return Err(FetchError::CloneFailed {
|
|
||||||
url: url.to_string(),
|
|
||||||
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Handle version/pinning (v0 pins to HEAD for now)
|
|
||||||
if version != "latest" {
|
|
||||||
let output = Command::new("git")
|
|
||||||
.arg("checkout")
|
|
||||||
.arg(version)
|
|
||||||
.current_dir(&target_dir)
|
|
||||||
.output()?;
|
|
||||||
|
|
||||||
if !output.status.success() {
|
|
||||||
// We keep the clone but maybe should report error?
|
|
||||||
// For v0 we just attempt it.
|
|
||||||
return Err(FetchError::CloneFailed {
|
|
||||||
url: url.to_string(),
|
|
||||||
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update cache manifest
|
|
||||||
let rel_path = target_dir.strip_prefix(root_project_dir).map_err(|_| FetchError::CacheError("Path outside of project root".to_string()))?;
|
|
||||||
manifest.git.insert(url.to_string(), GitCacheEntry {
|
|
||||||
path: rel_path.to_path_buf(),
|
|
||||||
resolved_ref: version.to_string(),
|
|
||||||
fetched_at: "2026-02-02T00:00:00Z".to_string(), // Use a fixed timestamp or actual one? The requirement said "2026-02-02T00:00:00Z" in example
|
|
||||||
});
|
|
||||||
manifest.save(&cache_root).map_err(|e| FetchError::CacheError(e.to_string()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !target_dir.join("prometeu.json").exists() {
|
|
||||||
return Err(FetchError::MissingManifest(target_dir));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(target_dir)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use std::fs;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_fetch_path_resolves_relative() {
|
|
||||||
let tmp = tempdir().unwrap();
|
|
||||||
let base = tmp.path().join("base");
|
|
||||||
let dep = tmp.path().join("dep");
|
|
||||||
fs::create_dir_all(&base).unwrap();
|
|
||||||
fs::create_dir_all(&dep).unwrap();
|
|
||||||
fs::write(dep.join("prometeu.json"), "{}").unwrap();
|
|
||||||
|
|
||||||
let fetched = fetch_path("../dep", &base).unwrap();
|
|
||||||
assert_eq!(fetched.canonicalize().unwrap(), dep.canonicalize().unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_fetch_git_local_mock() {
|
|
||||||
let tmp = tempdir().unwrap();
|
|
||||||
let project_root = tmp.path().join("project");
|
|
||||||
let remote_dir = tmp.path().join("remote");
|
|
||||||
fs::create_dir_all(&project_root).unwrap();
|
|
||||||
fs::create_dir_all(&remote_dir).unwrap();
|
|
||||||
|
|
||||||
// Init remote git repo
|
|
||||||
let _ = Command::new("git").arg("init").current_dir(&remote_dir).status();
|
|
||||||
let _ = Command::new("git").arg("config").arg("user.email").arg("you@example.com").current_dir(&remote_dir).status();
|
|
||||||
let _ = Command::new("git").arg("config").arg("user.name").arg("Your Name").current_dir(&remote_dir).status();
|
|
||||||
|
|
||||||
fs::write(remote_dir.join("prometeu.json"), r#"{"name": "remote", "version": "1.0.0"}"#).unwrap();
|
|
||||||
let _ = Command::new("git").arg("add").arg(".").current_dir(&remote_dir).status();
|
|
||||||
let _ = Command::new("git").arg("commit").arg("-m").arg("initial").current_dir(&remote_dir).status();
|
|
||||||
|
|
||||||
let url = format!("file://{}", remote_dir.display());
|
|
||||||
let fetched = fetch_git(&url, "latest", &project_root);
|
|
||||||
|
|
||||||
// Only assert if git succeeded (it might not be in all CI envs, though should be here)
|
|
||||||
if let Ok(path) = fetched {
|
|
||||||
assert!(path.exists());
|
|
||||||
assert!(path.join("prometeu.json").exists());
|
|
||||||
|
|
||||||
// Check cache manifest
|
|
||||||
let cache_json = project_root.join("cache/cache.json");
|
|
||||||
assert!(cache_json.exists());
|
|
||||||
let content = fs::read_to_string(cache_json).unwrap();
|
|
||||||
assert!(content.contains(&url));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
pub mod resolver;
|
|
||||||
pub mod fetch;
|
|
||||||
pub mod cache;
|
|
||||||
@ -1,750 +0,0 @@
|
|||||||
use crate::analysis::project_registry::ProjectRegistry;
|
|
||||||
use crate::deps::fetch::{fetch_dependency, FetchError};
|
|
||||||
use crate::manifest::{load_manifest, Manifest};
|
|
||||||
use crate::sources::{discover, ProjectSources, SourceError};
|
|
||||||
use prometeu_analysis::ids::ProjectId;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::{HashMap, HashSet};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
// Re-export for external modules/tests that previously imported from here
|
|
||||||
pub use crate::analysis::project_registry::ProjectKey;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct ResolvedNode {
|
|
||||||
pub id: ProjectId,
|
|
||||||
pub key: ProjectKey,
|
|
||||||
pub path: PathBuf,
|
|
||||||
pub manifest: Manifest,
|
|
||||||
pub sources: ProjectSources,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct ResolvedEdge {
|
|
||||||
pub alias: String,
|
|
||||||
pub to: ProjectId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub enum ResolutionStep {
|
|
||||||
TryResolve {
|
|
||||||
alias: String,
|
|
||||||
spec: String,
|
|
||||||
},
|
|
||||||
Resolved {
|
|
||||||
project_id: ProjectKey,
|
|
||||||
path: PathBuf,
|
|
||||||
},
|
|
||||||
UsingCached {
|
|
||||||
project_id: ProjectKey,
|
|
||||||
},
|
|
||||||
Conflict {
|
|
||||||
name: String,
|
|
||||||
existing_version: String,
|
|
||||||
new_version: String,
|
|
||||||
},
|
|
||||||
Error {
|
|
||||||
message: String,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct ResolutionTrace {
|
|
||||||
pub steps: Vec<ResolutionStep>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct ResolvedGraph {
|
|
||||||
pub nodes: HashMap<ProjectId, ResolvedNode>,
|
|
||||||
pub edges: HashMap<ProjectId, Vec<ResolvedEdge>>,
|
|
||||||
pub root_id: Option<ProjectId>,
|
|
||||||
pub trace: ResolutionTrace,
|
|
||||||
#[serde(skip)]
|
|
||||||
pub registry: ProjectRegistry,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ResolvedGraph {
|
|
||||||
pub fn resolve_import_path(&self, from_node: &ProjectId, import_path: &str) -> Option<PathBuf> {
|
|
||||||
if import_path.starts_with('@') {
|
|
||||||
let parts: Vec<&str> = import_path[1..].splitn(2, ':').collect();
|
|
||||||
if parts.len() == 2 {
|
|
||||||
let alias = parts[0];
|
|
||||||
let module_name = parts[1];
|
|
||||||
|
|
||||||
// Find dependency by alias
|
|
||||||
if let Some(edges) = self.edges.get(from_node) {
|
|
||||||
if let Some(edge) = edges.iter().find(|e| e.alias == alias) {
|
|
||||||
if let Some(node) = self.nodes.get(&edge.to) {
|
|
||||||
// Found the dependency project. Now find the module inside it.
|
|
||||||
let module_path = node.path.join("src/main/modules").join(module_name);
|
|
||||||
return Some(module_path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Local import (relative to current project's src/main/modules)
|
|
||||||
if let Some(node) = self.nodes.get(from_node) {
|
|
||||||
return Some(node.path.join("src/main/modules").join(import_path));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn explain(&self) -> String {
|
|
||||||
let mut out = String::new();
|
|
||||||
out.push_str("--- Dependency Resolution Trace ---\n");
|
|
||||||
for step in &self.trace.steps {
|
|
||||||
match step {
|
|
||||||
ResolutionStep::TryResolve { alias, spec } => {
|
|
||||||
out.push_str(&format!(" [?] Resolving '{}' (spec: {})\n", alias, spec));
|
|
||||||
}
|
|
||||||
ResolutionStep::Resolved { project_id, path } => {
|
|
||||||
out.push_str(&format!(" [✓] Resolved '{}' v{} at {:?}\n", project_id.name, project_id.version, path));
|
|
||||||
}
|
|
||||||
ResolutionStep::UsingCached { project_id } => {
|
|
||||||
out.push_str(&format!(" [.] Using cached '{}' v{}\n", project_id.name, project_id.version));
|
|
||||||
}
|
|
||||||
ResolutionStep::Conflict { name, existing_version, new_version } => {
|
|
||||||
out.push_str(&format!(" [!] CONFLICT for '{}': {} vs {}\n", name, existing_version, new_version));
|
|
||||||
}
|
|
||||||
ResolutionStep::Error { message } => {
|
|
||||||
out.push_str(&format!(" [X] ERROR: {}\n", message));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(root_id) = &self.root_id {
|
|
||||||
out.push_str("\n--- Resolved Dependency Graph ---\n");
|
|
||||||
let mut visited = HashSet::new();
|
|
||||||
if let Some(meta) = self.registry.meta(*root_id) {
|
|
||||||
out.push_str(&format!("{} v{}\n", meta.name, meta.version));
|
|
||||||
}
|
|
||||||
self.print_node(root_id, 0, &mut out, &mut visited);
|
|
||||||
}
|
|
||||||
|
|
||||||
out
|
|
||||||
}
|
|
||||||
|
|
||||||
fn print_node(&self, id: &ProjectId, indent: usize, out: &mut String, visited: &mut HashSet<ProjectId>) {
|
|
||||||
if let Some(edges) = self.edges.get(id) {
|
|
||||||
for edge in edges {
|
|
||||||
let prefix = " ".repeat(indent);
|
|
||||||
if let Some(meta) = self.registry.meta(edge.to) {
|
|
||||||
out.push_str(&format!("{}└── {}: {} v{}\n", prefix, edge.alias, meta.name, meta.version));
|
|
||||||
}
|
|
||||||
if !visited.contains(&edge.to) {
|
|
||||||
visited.insert(edge.to);
|
|
||||||
self.print_node(&edge.to, indent + 1, out, visited);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum ResolveError {
|
|
||||||
CycleDetected(Vec<String>),
|
|
||||||
MissingDependency(PathBuf),
|
|
||||||
VersionConflict {
|
|
||||||
name: String,
|
|
||||||
v1: String,
|
|
||||||
v2: String,
|
|
||||||
},
|
|
||||||
NameCollision {
|
|
||||||
name: String,
|
|
||||||
p1: PathBuf,
|
|
||||||
p2: PathBuf,
|
|
||||||
},
|
|
||||||
ManifestError(crate::manifest::ManifestError),
|
|
||||||
FetchError(FetchError),
|
|
||||||
SourceError(SourceError),
|
|
||||||
IoError {
|
|
||||||
path: PathBuf,
|
|
||||||
source: std::io::Error,
|
|
||||||
},
|
|
||||||
WithTrace {
|
|
||||||
trace: ResolutionTrace,
|
|
||||||
source: Box<ResolveError>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for ResolveError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
ResolveError::CycleDetected(chain) => write!(f, "Cycle detected: {}", chain.join(" -> ")),
|
|
||||||
ResolveError::MissingDependency(path) => write!(f, "Missing dependency at: {}", path.display()),
|
|
||||||
ResolveError::VersionConflict { name, v1, v2 } => {
|
|
||||||
write!(f, "Version conflict for project '{}': {} vs {}", name, v1, v2)
|
|
||||||
}
|
|
||||||
ResolveError::NameCollision { name, p1, p2 } => {
|
|
||||||
write!(f, "Name collision: two distinct projects claiming same name '{}' at {} and {}", name, p1.display(), p2.display())
|
|
||||||
}
|
|
||||||
ResolveError::ManifestError(e) => write!(f, "Manifest error: {}", e),
|
|
||||||
ResolveError::FetchError(e) => write!(f, "Fetch error: {}", e),
|
|
||||||
ResolveError::SourceError(e) => write!(f, "Source error: {}", e),
|
|
||||||
ResolveError::IoError { path, source } => write!(f, "IO error at {}: {}", path.display(), source),
|
|
||||||
ResolveError::WithTrace { source, .. } => write!(f, "{}", source),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for ResolveError {}
|
|
||||||
|
|
||||||
impl From<crate::manifest::ManifestError> for ResolveError {
|
|
||||||
fn from(e: crate::manifest::ManifestError) -> Self {
|
|
||||||
ResolveError::ManifestError(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<FetchError> for ResolveError {
|
|
||||||
fn from(e: FetchError) -> Self {
|
|
||||||
ResolveError::FetchError(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<SourceError> for ResolveError {
|
|
||||||
fn from(e: SourceError) -> Self {
|
|
||||||
match e {
|
|
||||||
SourceError::Manifest(me) => ResolveError::ManifestError(me),
|
|
||||||
SourceError::Io(ioe) => ResolveError::IoError {
|
|
||||||
path: PathBuf::new(),
|
|
||||||
source: ioe,
|
|
||||||
},
|
|
||||||
_ => ResolveError::SourceError(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn resolve_graph(root_dir: &Path) -> Result<ResolvedGraph, ResolveError> {
|
|
||||||
let mut graph = ResolvedGraph::default();
|
|
||||||
let mut visited: HashSet<ProjectId> = HashSet::new();
|
|
||||||
let mut stack: Vec<ProjectId> = Vec::new();
|
|
||||||
|
|
||||||
let root_path = root_dir.canonicalize().map_err(|e| ResolveError::IoError {
|
|
||||||
path: root_dir.to_path_buf(),
|
|
||||||
source: e,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let root_id = match resolve_recursive(&root_path, &root_path, &mut graph, &mut visited, &mut stack) {
|
|
||||||
Ok(id) => id,
|
|
||||||
Err(e) => return Err(ResolveError::WithTrace {
|
|
||||||
trace: graph.trace,
|
|
||||||
source: Box::new(e),
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
graph.root_id = Some(root_id);
|
|
||||||
|
|
||||||
Ok(graph)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resolve_recursive(
|
|
||||||
project_path: &Path,
|
|
||||||
root_project_dir: &Path,
|
|
||||||
graph: &mut ResolvedGraph,
|
|
||||||
visited: &mut HashSet<ProjectId>,
|
|
||||||
stack: &mut Vec<ProjectId>,
|
|
||||||
) -> Result<ProjectId, ResolveError> {
|
|
||||||
let manifest = load_manifest(project_path)?;
|
|
||||||
let sources = discover(project_path)?;
|
|
||||||
let project_key = ProjectKey { name: manifest.name.clone(), version: manifest.version.clone() };
|
|
||||||
let project_id = graph.registry.intern(&project_key);
|
|
||||||
|
|
||||||
// Cycle detection
|
|
||||||
if let Some(pos) = stack.iter().position(|id| id == &project_id) {
|
|
||||||
let mut chain: Vec<String> = stack[pos..]
|
|
||||||
.iter()
|
|
||||||
.map(|id| graph.registry.meta(*id).map(|m| m.name.clone()).unwrap_or_else(|| format!("#{}", id.0)))
|
|
||||||
.collect();
|
|
||||||
chain.push(project_key.name.clone());
|
|
||||||
return Err(ResolveError::CycleDetected(chain));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Collision handling: Name collision / Version conflict
|
|
||||||
for node in graph.nodes.values() {
|
|
||||||
if node.key.name == project_key.name {
|
|
||||||
if node.key.version != project_key.version {
|
|
||||||
graph.trace.steps.push(ResolutionStep::Conflict {
|
|
||||||
name: project_key.name.clone(),
|
|
||||||
existing_version: node.key.version.clone(),
|
|
||||||
new_version: project_key.version.clone(),
|
|
||||||
});
|
|
||||||
return Err(ResolveError::VersionConflict {
|
|
||||||
name: project_key.name.clone(),
|
|
||||||
v1: node.key.version.clone(),
|
|
||||||
v2: project_key.version.clone(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if node.path != project_path {
|
|
||||||
return Err(ResolveError::NameCollision {
|
|
||||||
name: project_key.name.clone(),
|
|
||||||
p1: node.path.clone(),
|
|
||||||
p2: project_path.to_path_buf(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If already fully visited, return the ID
|
|
||||||
if visited.contains(&project_id) {
|
|
||||||
graph.trace.steps.push(ResolutionStep::UsingCached { project_id: project_key.clone() });
|
|
||||||
return Ok(project_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
graph.trace.steps.push(ResolutionStep::Resolved { project_id: project_key.clone(), path: project_path.to_path_buf() });
|
|
||||||
|
|
||||||
visited.insert(project_id);
|
|
||||||
stack.push(project_id);
|
|
||||||
|
|
||||||
let mut edges = Vec::new();
|
|
||||||
for (alias, spec) in &manifest.dependencies {
|
|
||||||
graph.trace.steps.push(ResolutionStep::TryResolve { alias: alias.clone(), spec: format!("{:?}", spec) });
|
|
||||||
|
|
||||||
let dep_path = match fetch_dependency(alias, spec, project_path, root_project_dir) {
|
|
||||||
Ok(p) => p,
|
|
||||||
Err(e) => {
|
|
||||||
graph.trace.steps.push(ResolutionStep::Error { message: format!("Fetch error for '{}': {}", alias, e) });
|
|
||||||
return Err(e.into());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let dep_id = match resolve_recursive(&dep_path, root_project_dir, graph, visited, stack) {
|
|
||||||
Ok(id) => id,
|
|
||||||
Err(e) => return Err(e),
|
|
||||||
};
|
|
||||||
|
|
||||||
edges.push(ResolvedEdge { alias: alias.clone(), to: dep_id });
|
|
||||||
}
|
|
||||||
|
|
||||||
stack.pop();
|
|
||||||
graph.nodes.insert(project_id, ResolvedNode { id: project_id, key: project_key, path: project_path.to_path_buf(), manifest, sources });
|
|
||||||
graph.edges.insert(project_id, edges);
|
|
||||||
|
|
||||||
Ok(project_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use std::fs;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_simple_graph() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let root = dir.path().join("root");
|
|
||||||
let dep = dir.path().join("dep");
|
|
||||||
fs::create_dir_all(&root).unwrap();
|
|
||||||
fs::create_dir_all(&dep).unwrap();
|
|
||||||
|
|
||||||
fs::write(root.join("prometeu.json"), r#"{
|
|
||||||
"name": "root",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib",
|
|
||||||
"dependencies": { "d": "../dep" }
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
fs::write(dep.join("prometeu.json"), r#"{
|
|
||||||
"name": "dep",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"kind": "lib"
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
let graph = resolve_graph(&root).unwrap();
|
|
||||||
assert_eq!(graph.nodes.len(), 2);
|
|
||||||
let root_id = graph.root_id.as_ref().unwrap();
|
|
||||||
let root_meta = graph.registry.meta(*root_id).unwrap();
|
|
||||||
assert_eq!(root_meta.name, "root");
|
|
||||||
|
|
||||||
let edges = graph.edges.get(root_id).unwrap();
|
|
||||||
assert_eq!(edges.len(), 1);
|
|
||||||
assert_eq!(edges[0].alias, "d");
|
|
||||||
let dep_meta = graph.registry.meta(edges[0].to).unwrap();
|
|
||||||
assert_eq!(dep_meta.name, "dep");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_cycle_detection() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let a = dir.path().join("a");
|
|
||||||
let b = dir.path().join("b");
|
|
||||||
fs::create_dir_all(&a).unwrap();
|
|
||||||
fs::create_dir_all(&b).unwrap();
|
|
||||||
|
|
||||||
fs::write(a.join("prometeu.json"), r#"{
|
|
||||||
"name": "a",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib",
|
|
||||||
"dependencies": { "b": "../b" }
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
fs::write(b.join("prometeu.json"), r#"{
|
|
||||||
"name": "b",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib",
|
|
||||||
"dependencies": { "a": "../a" }
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
let err = resolve_graph(&a).unwrap_err();
|
|
||||||
match err {
|
|
||||||
ResolveError::WithTrace { source, .. } => {
|
|
||||||
if let ResolveError::CycleDetected(chain) = *source {
|
|
||||||
assert_eq!(chain, vec!["a", "b", "a"]);
|
|
||||||
} else {
|
|
||||||
panic!("Expected CycleDetected error, got {:?}", source);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => panic!("Expected WithTrace containing CycleDetected error, got {:?}", err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_alias_does_not_change_identity() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let root = dir.path().join("root");
|
|
||||||
let dep = dir.path().join("dep");
|
|
||||||
fs::create_dir_all(&root).unwrap();
|
|
||||||
fs::create_dir_all(&dep).unwrap();
|
|
||||||
|
|
||||||
fs::write(root.join("prometeu.json"), r#"{
|
|
||||||
"name": "root",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib",
|
|
||||||
"dependencies": { "my_alias": "../dep" }
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
fs::write(dep.join("prometeu.json"), r#"{
|
|
||||||
"name": "actual_name",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"kind": "lib"
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
let graph = resolve_graph(&root).unwrap();
|
|
||||||
let root_id = graph.root_id.as_ref().unwrap();
|
|
||||||
let edges = graph.edges.get(root_id).unwrap();
|
|
||||||
assert_eq!(edges[0].alias, "my_alias");
|
|
||||||
let dep_meta = graph.registry.meta(edges[0].to).unwrap();
|
|
||||||
assert_eq!(dep_meta.name, "actual_name");
|
|
||||||
assert!(graph.nodes.contains_key(&edges[0].to));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_version_conflict() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let root = dir.path().join("root");
|
|
||||||
let dep1 = dir.path().join("dep1");
|
|
||||||
let dep2 = dir.path().join("dep2");
|
|
||||||
let shared = dir.path().join("shared1");
|
|
||||||
let shared2 = dir.path().join("shared2");
|
|
||||||
fs::create_dir_all(&root).unwrap();
|
|
||||||
fs::create_dir_all(&dep1).unwrap();
|
|
||||||
fs::create_dir_all(&dep2).unwrap();
|
|
||||||
fs::create_dir_all(&shared).unwrap();
|
|
||||||
fs::create_dir_all(&shared2).unwrap();
|
|
||||||
|
|
||||||
fs::write(root.join("prometeu.json"), r#"{
|
|
||||||
"name": "root",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib",
|
|
||||||
"dependencies": { "d1": "../dep1", "d2": "../dep2" }
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
fs::write(dep1.join("prometeu.json"), r#"{
|
|
||||||
"name": "dep1",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib",
|
|
||||||
"dependencies": { "s": "../shared1" }
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
fs::write(dep2.join("prometeu.json"), r#"{
|
|
||||||
"name": "dep2",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib",
|
|
||||||
"dependencies": { "s": "../shared2" }
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
fs::write(shared.join("prometeu.json"), r#"{
|
|
||||||
"name": "shared",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"kind": "lib"
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
fs::write(shared2.join("prometeu.json"), r#"{
|
|
||||||
"name": "shared",
|
|
||||||
"version": "2.0.0",
|
|
||||||
"kind": "lib"
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
let err = resolve_graph(&root).unwrap_err();
|
|
||||||
match err {
|
|
||||||
ResolveError::WithTrace { source, .. } => {
|
|
||||||
if let ResolveError::VersionConflict { name, .. } = *source {
|
|
||||||
assert_eq!(name, "shared");
|
|
||||||
} else {
|
|
||||||
panic!("Expected VersionConflict error, got {:?}", source);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => panic!("Expected WithTrace containing VersionConflict error, got {:?}", err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_name_collision() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let root = dir.path().join("root");
|
|
||||||
let dep1 = dir.path().join("dep1");
|
|
||||||
let dep2 = dir.path().join("dep2");
|
|
||||||
let p1 = dir.path().join("p1");
|
|
||||||
let p2 = dir.path().join("p2");
|
|
||||||
fs::create_dir_all(&root).unwrap();
|
|
||||||
fs::create_dir_all(&dep1).unwrap();
|
|
||||||
fs::create_dir_all(&dep2).unwrap();
|
|
||||||
fs::create_dir_all(&p1).unwrap();
|
|
||||||
fs::create_dir_all(&p2).unwrap();
|
|
||||||
|
|
||||||
fs::write(root.join("prometeu.json"), r#"{
|
|
||||||
"name": "root",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib",
|
|
||||||
"dependencies": { "d1": "../dep1", "d2": "../dep2" }
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
fs::write(dep1.join("prometeu.json"), r#"{
|
|
||||||
"name": "dep1",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib",
|
|
||||||
"dependencies": { "p": "../p1" }
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
fs::write(dep2.join("prometeu.json"), r#"{
|
|
||||||
"name": "dep2",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib",
|
|
||||||
"dependencies": { "p": "../p2" }
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
// Both p1 and p2 claim to be "collision" version 1.0.0
|
|
||||||
fs::write(p1.join("prometeu.json"), r#"{
|
|
||||||
"name": "collision",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"kind": "lib"
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
fs::write(p2.join("prometeu.json"), r#"{
|
|
||||||
"name": "collision",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"kind": "lib"
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
let err = resolve_graph(&root).unwrap_err();
|
|
||||||
match err {
|
|
||||||
ResolveError::WithTrace { source, .. } => {
|
|
||||||
if let ResolveError::NameCollision { name, .. } = *source {
|
|
||||||
assert_eq!(name, "collision");
|
|
||||||
} else {
|
|
||||||
panic!("Expected NameCollision error, got {:?}", source);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => panic!("Expected WithTrace containing NameCollision error, got {:?}", err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_resolve_with_git_dependency_mock() {
|
|
||||||
let tmp = tempdir().unwrap();
|
|
||||||
let root = tmp.path().join("root");
|
|
||||||
let remote = tmp.path().join("remote");
|
|
||||||
fs::create_dir_all(&root).unwrap();
|
|
||||||
fs::create_dir_all(&remote).unwrap();
|
|
||||||
|
|
||||||
// Setup remote
|
|
||||||
let _ = std::process::Command::new("git").arg("init").current_dir(&remote).status();
|
|
||||||
let _ = std::process::Command::new("git").arg("config").arg("user.email").arg("you@example.com").current_dir(&remote).status();
|
|
||||||
let _ = std::process::Command::new("git").arg("config").arg("user.name").arg("Your Name").current_dir(&remote).status();
|
|
||||||
fs::write(remote.join("prometeu.json"), r#"{"name": "remote", "version": "1.2.3", "kind": "lib"}"#).unwrap();
|
|
||||||
let _ = std::process::Command::new("git").arg("add").arg(".").current_dir(&remote).status();
|
|
||||||
let _ = std::process::Command::new("git").arg("commit").arg("-m").arg("init").current_dir(&remote).status();
|
|
||||||
|
|
||||||
// Setup root
|
|
||||||
fs::write(root.join("prometeu.json"), format!(r#"{{
|
|
||||||
"name": "root",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib",
|
|
||||||
"dependencies": {{
|
|
||||||
"rem": {{ "git": "file://{}" }}
|
|
||||||
}}
|
|
||||||
}}"#, remote.display())).unwrap();
|
|
||||||
|
|
||||||
let graph = resolve_graph(&root);
|
|
||||||
|
|
||||||
if let Ok(graph) = graph {
|
|
||||||
assert_eq!(graph.nodes.len(), 2);
|
|
||||||
let rem_node = graph.nodes.values().find(|n| n.key.name == "remote").unwrap();
|
|
||||||
assert_eq!(rem_node.key.version, "1.2.3");
|
|
||||||
|
|
||||||
// Verify cache manifest was created
|
|
||||||
assert!(root.join("cache/cache.json").exists());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_resolve_import_path() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let root = dir.path().join("root");
|
|
||||||
let sdk = dir.path().join("sdk");
|
|
||||||
fs::create_dir_all(&root).unwrap();
|
|
||||||
fs::create_dir_all(&sdk).unwrap();
|
|
||||||
let root = root.canonicalize().unwrap();
|
|
||||||
let sdk = sdk.canonicalize().unwrap();
|
|
||||||
|
|
||||||
fs::create_dir_all(root.join("src/main/modules")).unwrap();
|
|
||||||
fs::create_dir_all(sdk.join("src/main/modules/math")).unwrap();
|
|
||||||
fs::write(root.join("src/main/modules/main.pbs"), "").unwrap();
|
|
||||||
|
|
||||||
fs::write(root.join("prometeu.json"), r#"{
|
|
||||||
"name": "root",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "app",
|
|
||||||
"dependencies": { "sdk": "../sdk" }
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
fs::write(sdk.join("prometeu.json"), r#"{
|
|
||||||
"name": "sdk",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"kind": "lib"
|
|
||||||
}"#).unwrap();
|
|
||||||
|
|
||||||
let graph = resolve_graph(&root).unwrap();
|
|
||||||
let root_id = graph.root_id.as_ref().unwrap();
|
|
||||||
|
|
||||||
// Resolve @sdk:math
|
|
||||||
let path = graph.resolve_import_path(root_id, "@sdk:math").unwrap();
|
|
||||||
assert_eq!(path.canonicalize().unwrap(), sdk.join("src/main/modules/math").canonicalize().unwrap());
|
|
||||||
|
|
||||||
// Resolve local module
|
|
||||||
let path = graph.resolve_import_path(root_id, "local_mod").unwrap();
|
|
||||||
let expected = root.join("src/main/modules/local_mod");
|
|
||||||
assert_eq!(path, expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_resolution_trace_and_explain() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let root_dir = dir.path().join("root");
|
|
||||||
fs::create_dir_all(&root_dir).unwrap();
|
|
||||||
let root_dir = root_dir.canonicalize().unwrap();
|
|
||||||
|
|
||||||
// Root project
|
|
||||||
fs::write(root_dir.join("prometeu.json"), r#"{
|
|
||||||
"name": "root",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"dependencies": {
|
|
||||||
"dep1": { "path": "../dep1" }
|
|
||||||
}
|
|
||||||
}"#).unwrap();
|
|
||||||
fs::create_dir_all(root_dir.join("src/main/modules")).unwrap();
|
|
||||||
fs::write(root_dir.join("src/main/modules/main.pbs"), "").unwrap();
|
|
||||||
|
|
||||||
// Dep 1
|
|
||||||
let dep1_dir = dir.path().join("dep1");
|
|
||||||
fs::create_dir_all(&dep1_dir).unwrap();
|
|
||||||
let dep1_dir = dep1_dir.canonicalize().unwrap();
|
|
||||||
fs::write(dep1_dir.join("prometeu.json"), r#"{
|
|
||||||
"name": "dep1",
|
|
||||||
"version": "1.1.0"
|
|
||||||
}"#).unwrap();
|
|
||||||
fs::create_dir_all(dep1_dir.join("src/main/modules")).unwrap();
|
|
||||||
fs::write(dep1_dir.join("src/main/modules/main.pbs"), "").unwrap();
|
|
||||||
|
|
||||||
let graph = resolve_graph(&root_dir).unwrap();
|
|
||||||
let explanation = graph.explain();
|
|
||||||
|
|
||||||
assert!(explanation.contains("--- Dependency Resolution Trace ---"));
|
|
||||||
assert!(explanation.contains("[✓] Resolved 'root' v1.0.0"));
|
|
||||||
assert!(explanation.contains("[?] Resolving 'dep1'"));
|
|
||||||
assert!(explanation.contains("[✓] Resolved 'dep1' v1.1.0"));
|
|
||||||
|
|
||||||
assert!(explanation.contains("--- Resolved Dependency Graph ---"));
|
|
||||||
assert!(explanation.contains("root v1.0.0"));
|
|
||||||
assert!(explanation.contains("└── dep1: dep1 v1.1.0"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_conflict_explanation() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let root_dir = dir.path().join("root");
|
|
||||||
fs::create_dir_all(&root_dir).unwrap();
|
|
||||||
let root_dir = root_dir.canonicalize().unwrap();
|
|
||||||
|
|
||||||
// Root -> A, B
|
|
||||||
// A -> C v1
|
|
||||||
// B -> C v2
|
|
||||||
|
|
||||||
fs::write(root_dir.join("prometeu.json"), r#"{
|
|
||||||
"name": "root",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"dependencies": {
|
|
||||||
"a": { "path": "../a" },
|
|
||||||
"b": { "path": "../b" }
|
|
||||||
}
|
|
||||||
}"#).unwrap();
|
|
||||||
fs::create_dir_all(root_dir.join("src/main/modules")).unwrap();
|
|
||||||
fs::write(root_dir.join("src/main/modules/main.pbs"), "").unwrap();
|
|
||||||
|
|
||||||
let a_dir = dir.path().join("a");
|
|
||||||
fs::create_dir_all(&a_dir).unwrap();
|
|
||||||
let a_dir = a_dir.canonicalize().unwrap();
|
|
||||||
fs::write(a_dir.join("prometeu.json"), r#"{
|
|
||||||
"name": "a",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"dependencies": { "c": { "path": "../c1" } }
|
|
||||||
}"#).unwrap();
|
|
||||||
fs::create_dir_all(a_dir.join("src/main/modules")).unwrap();
|
|
||||||
fs::write(a_dir.join("src/main/modules/main.pbs"), "").unwrap();
|
|
||||||
|
|
||||||
let b_dir = dir.path().join("b");
|
|
||||||
fs::create_dir_all(&b_dir).unwrap();
|
|
||||||
let b_dir = b_dir.canonicalize().unwrap();
|
|
||||||
fs::write(b_dir.join("prometeu.json"), r#"{
|
|
||||||
"name": "b",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"dependencies": { "c": { "path": "../c2" } }
|
|
||||||
}"#).unwrap();
|
|
||||||
fs::create_dir_all(b_dir.join("src/main/modules")).unwrap();
|
|
||||||
fs::write(b_dir.join("src/main/modules/main.pbs"), "").unwrap();
|
|
||||||
|
|
||||||
let c1_dir = dir.path().join("c1");
|
|
||||||
fs::create_dir_all(&c1_dir).unwrap();
|
|
||||||
let c1_dir = c1_dir.canonicalize().unwrap();
|
|
||||||
fs::write(c1_dir.join("prometeu.json"), r#"{
|
|
||||||
"name": "c",
|
|
||||||
"version": "1.0.0"
|
|
||||||
}"#).unwrap();
|
|
||||||
fs::create_dir_all(c1_dir.join("src/main/modules")).unwrap();
|
|
||||||
fs::write(c1_dir.join("src/main/modules/main.pbs"), "").unwrap();
|
|
||||||
|
|
||||||
let c2_dir = dir.path().join("c2");
|
|
||||||
fs::create_dir_all(&c2_dir).unwrap();
|
|
||||||
let c2_dir = c2_dir.canonicalize().unwrap();
|
|
||||||
fs::write(c2_dir.join("prometeu.json"), r#"{
|
|
||||||
"name": "c",
|
|
||||||
"version": "2.0.0"
|
|
||||||
}"#).unwrap();
|
|
||||||
fs::create_dir_all(c2_dir.join("src/main/modules")).unwrap();
|
|
||||||
fs::write(c2_dir.join("src/main/modules/main.pbs"), "").unwrap();
|
|
||||||
|
|
||||||
let res = resolve_graph(&root_dir);
|
|
||||||
assert!(res.is_err());
|
|
||||||
|
|
||||||
if let Err(ResolveError::WithTrace { trace, source }) = res {
|
|
||||||
let mut dummy = ResolvedGraph::default();
|
|
||||||
dummy.trace = trace;
|
|
||||||
let explanation = dummy.explain();
|
|
||||||
|
|
||||||
assert!(explanation.contains("[!] CONFLICT for 'c': 1.0.0 vs 2.0.0"));
|
|
||||||
assert!(source.to_string().contains("Version conflict for project 'c': 1.0.0 vs 2.0.0"));
|
|
||||||
} else {
|
|
||||||
panic!("Expected WithTrace error");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
use crate::common::diagnostics::DiagnosticBundle;
|
|
||||||
use crate::ir_lang;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use crate::common::files::FileManager;
|
|
||||||
|
|
||||||
pub mod pbs;
|
|
||||||
|
|
||||||
pub trait Frontend {
|
|
||||||
fn language(&self) -> &'static str;
|
|
||||||
|
|
||||||
fn compile_to_ir(
|
|
||||||
&self,
|
|
||||||
entry: &Path,
|
|
||||||
file_manager: &mut FileManager,
|
|
||||||
) -> Result<ir_lang::Module, DiagnosticBundle>;
|
|
||||||
}
|
|
||||||
@ -1,184 +0,0 @@
|
|||||||
use crate::frontends::pbs::{parser::Parser, SymbolCollector, ModuleSymbols, Resolver, ModuleProvider, Lowerer};
|
|
||||||
use crate::frontends::pbs::typecheck::TypeChecker;
|
|
||||||
use crate::lowering::core_to_vm;
|
|
||||||
use crate::common::spans::FileId;
|
|
||||||
use language_api::traits::{Frontend as CanonFrontend, FrontendUnit};
|
|
||||||
use language_api::types::{
|
|
||||||
Diagnostic as CanonDiagnostic,
|
|
||||||
Severity as CanonSeverity,
|
|
||||||
ExportItem,
|
|
||||||
ItemName,
|
|
||||||
LoweredIr,
|
|
||||||
ImportRef,
|
|
||||||
CanonicalFnKey,
|
|
||||||
SignatureRef,
|
|
||||||
parse_pbs_from_string,
|
|
||||||
};
|
|
||||||
use prometeu_analysis::NameInterner;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
/// Adapter implementing the canonical Frontend contract for PBS.
|
|
||||||
pub struct PbsFrontendAdapter;
|
|
||||||
|
|
||||||
impl CanonFrontend for PbsFrontendAdapter {
|
|
||||||
fn parse_and_analyze(&self, entry_path: &str) -> FrontendUnit {
|
|
||||||
// Minimal translation: run existing PBS pipeline and package results into canonical unit.
|
|
||||||
let path = Path::new(entry_path);
|
|
||||||
let mut diags: Vec<CanonDiagnostic> = Vec::new();
|
|
||||||
|
|
||||||
let source = match std::fs::read_to_string(path) {
|
|
||||||
Ok(s) => s,
|
|
||||||
Err(e) => {
|
|
||||||
diags.push(CanonDiagnostic::error(format!("Failed to read file: {}", e)));
|
|
||||||
return FrontendUnit { diagnostics: diags, imports: vec![], exports: vec![], lowered_ir: LoweredIr::default() };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
let mut parser = Parser::new(&source, FileId(0), &mut interner);
|
|
||||||
let parsed = match parser.parse_file() {
|
|
||||||
Ok(p) => p,
|
|
||||||
Err(d) => {
|
|
||||||
// Translate diagnostics coarsely
|
|
||||||
diags.push(CanonDiagnostic { message: format!("{:?}", d), severity: CanonSeverity::Error });
|
|
||||||
return FrontendUnit { diagnostics: diags, imports: vec![], exports: vec![], lowered_ir: LoweredIr::default() };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut collector = SymbolCollector::new(&interner);
|
|
||||||
let (type_symbols, value_symbols) = match collector.collect(&parsed.arena, parsed.root) {
|
|
||||||
Ok(v) => v,
|
|
||||||
Err(d) => {
|
|
||||||
diags.push(CanonDiagnostic { message: format!("{:?}", d), severity: CanonSeverity::Error });
|
|
||||||
return FrontendUnit { diagnostics: diags, imports: vec![], exports: vec![], lowered_ir: LoweredIr::default() };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let mut module_symbols = ModuleSymbols { type_symbols, value_symbols };
|
|
||||||
|
|
||||||
struct EmptyProvider;
|
|
||||||
impl ModuleProvider for EmptyProvider {
|
|
||||||
fn get_module_symbols(&self, _path: &str) -> Option<&ModuleSymbols> { None }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure primitives are interned in this FE-local interner
|
|
||||||
let primitives = ["int", "bool", "float", "string", "bounded", "void"];
|
|
||||||
for p in primitives { interner.intern(p); }
|
|
||||||
// Resolver scope (immutable borrow of module_symbols limited to this block)
|
|
||||||
let imported_symbols = {
|
|
||||||
let mut resolver = Resolver::new(&module_symbols, &EmptyProvider, &interner);
|
|
||||||
resolver.bootstrap_types(&interner);
|
|
||||||
if let Err(d) = resolver.resolve(&parsed.arena, parsed.root) {
|
|
||||||
diags.push(CanonDiagnostic { message: format!("{:?}", d), severity: CanonSeverity::Error });
|
|
||||||
return FrontendUnit { diagnostics: diags, imports: vec![], exports: vec![], lowered_ir: LoweredIr::default() };
|
|
||||||
}
|
|
||||||
resolver.imported_symbols.clone()
|
|
||||||
};
|
|
||||||
|
|
||||||
// Run PBS typechecker to compute function/method signatures and basic type info
|
|
||||||
// This ensures exported/imported symbols carry `PbsType::Function { params, .. }` so
|
|
||||||
// lowering can perform deterministic overload resolution by exact signature.
|
|
||||||
let mut tc = TypeChecker::new(&mut module_symbols, &imported_symbols, &EmptyProvider, &interner);
|
|
||||||
if let Err(d) = tc.check(&parsed.arena, parsed.root) {
|
|
||||||
diags.push(CanonDiagnostic { message: format!("{:?}", d), severity: CanonSeverity::Error });
|
|
||||||
return FrontendUnit { diagnostics: diags, imports: vec![], exports: vec![], lowered_ir: LoweredIr::default() };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Collect canonical imports from AST imports: `from` must be "@alias:module" and items single identifiers
|
|
||||||
let mut imports: Vec<ImportRef> = Vec::new();
|
|
||||||
if let crate::frontends::pbs::ast::NodeKind::File(file_node) = parsed.arena.kind(parsed.root) {
|
|
||||||
for imp_id in &file_node.imports {
|
|
||||||
if let crate::frontends::pbs::ast::NodeKind::Import(imp) = parsed.arena.kind(*imp_id) {
|
|
||||||
// Parse project/module from `from` string
|
|
||||||
match parse_pbs_from_string(&imp.from) {
|
|
||||||
Ok((project, module)) => {
|
|
||||||
// Resolve item names from spec node
|
|
||||||
if let crate::frontends::pbs::ast::NodeKind::ImportSpec(spec) = parsed.arena.kind(imp.spec) {
|
|
||||||
for &name_id in &spec.path {
|
|
||||||
let name_str = interner.resolve(name_id);
|
|
||||||
match ItemName::new(name_str) {
|
|
||||||
Ok(item) => imports.push(ImportRef::new(project.clone(), module.clone(), item)),
|
|
||||||
Err(e) => diags.push(CanonDiagnostic::error(format!(
|
|
||||||
"Invalid import item '{}': {}",
|
|
||||||
name_str, e
|
|
||||||
))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => diags.push(CanonDiagnostic::error(format!(
|
|
||||||
"Invalid import path '{}': {}",
|
|
||||||
&imp.from, e
|
|
||||||
))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prepare canonical exports from symbol tables (canonical, no string protocols)
|
|
||||||
let mut exports: Vec<ExportItem> = Vec::new();
|
|
||||||
for list in module_symbols.type_symbols.symbols.values() {
|
|
||||||
for sym in list {
|
|
||||||
if crate::frontends::pbs::symbols::Visibility::Pub != sym.visibility { continue; }
|
|
||||||
if let Ok(name) = ItemName::new(interner.resolve(sym.name)) {
|
|
||||||
match sym.kind {
|
|
||||||
crate::frontends::pbs::symbols::SymbolKind::Service => exports.push(ExportItem::Service { name }),
|
|
||||||
_ => exports.push(ExportItem::Type { name }),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for list in module_symbols.value_symbols.symbols.values() {
|
|
||||||
for sym in list {
|
|
||||||
if crate::frontends::pbs::symbols::Visibility::Pub != sym.visibility { continue; }
|
|
||||||
// Build canonical function key for free fns and methods
|
|
||||||
let raw_name = interner.resolve(sym.name);
|
|
||||||
if let crate::frontends::pbs::symbols::SymbolKind::Function = sym.kind {
|
|
||||||
// No legacy string protocol inference (e.g., `svc:`). Owner is provided only via canonical models.
|
|
||||||
let owner_name = None;
|
|
||||||
// We don't have a stable signature id from PBS yet; use 0 as placeholder until resolver exposes it.
|
|
||||||
let sig = SignatureRef(0);
|
|
||||||
if let Ok(name_item) = ItemName::new(raw_name) {
|
|
||||||
let fn_key = CanonicalFnKey::new(owner_name, name_item, sig);
|
|
||||||
exports.push(ExportItem::Function { fn_key });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Lower to VM IR and wrap as LoweredIr bytes
|
|
||||||
let lowerer = Lowerer::new(&parsed.arena, &module_symbols, &imported_symbols, &EmptyProvider, &interner);
|
|
||||||
let module_name = path.file_stem().unwrap().to_string_lossy();
|
|
||||||
let core_program = match lowerer.lower_file(parsed.root, &module_name) {
|
|
||||||
Ok(c) => c,
|
|
||||||
Err(d) => {
|
|
||||||
diags.push(CanonDiagnostic { message: format!("{:?}", d), severity: CanonSeverity::Error });
|
|
||||||
return FrontendUnit { diagnostics: diags, imports, exports, lowered_ir: LoweredIr::default() };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if let Err(e) = crate::ir_core::validate_program(&core_program) {
|
|
||||||
diags.push(CanonDiagnostic::error(format!("Core IR Invariant Violation: {}", e)));
|
|
||||||
return FrontendUnit { diagnostics: diags, imports, exports, lowered_ir: LoweredIr::default() };
|
|
||||||
}
|
|
||||||
let vm_ir = match core_to_vm::lower_program(&core_program) {
|
|
||||||
Ok(m) => m,
|
|
||||||
Err(e) => {
|
|
||||||
diags.push(CanonDiagnostic::error(format!("Lowering error: {}", e)));
|
|
||||||
return FrontendUnit { diagnostics: diags, imports, exports, lowered_ir: LoweredIr::default() };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut bytes = Vec::new();
|
|
||||||
// Serialize VM IR using bincode-like debug encoding (placeholder); for now use JSON as opaque bytes.
|
|
||||||
// Backend owns the meaning; format tag indicates VM-IR.
|
|
||||||
if let Ok(s) = serde_json::to_string(&vm_ir) {
|
|
||||||
bytes.extend_from_slice(s.as_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
FrontendUnit {
|
|
||||||
diagnostics: diags,
|
|
||||||
imports,
|
|
||||||
exports,
|
|
||||||
lowered_ir: LoweredIr::new("vm-ir-json", bytes),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,305 +0,0 @@
|
|||||||
use crate::common::spans::Span;
|
|
||||||
use prometeu_analysis::{NameId, NodeId};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
// Use canonical NodeId from prometeu-analysis
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
|
|
||||||
pub struct AstArena {
|
|
||||||
pub nodes: Vec<NodeKind>,
|
|
||||||
pub spans: Vec<Span>,
|
|
||||||
pub roots: Vec<NodeId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct ParsedAst {
|
|
||||||
pub arena: AstArena,
|
|
||||||
pub root: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstArena {
|
|
||||||
pub fn push(&mut self, kind: NodeKind, span: Span) -> NodeId {
|
|
||||||
let id = NodeId(self.nodes.len() as u32);
|
|
||||||
self.nodes.push(kind);
|
|
||||||
self.spans.push(span);
|
|
||||||
id
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn kind(&self, id: NodeId) -> &NodeKind {
|
|
||||||
&self.nodes[id.0 as usize]
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn span(&self, id: NodeId) -> Span {
|
|
||||||
self.spans[id.0 as usize].clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
#[serde(tag = "kind")]
|
|
||||||
pub enum NodeKind {
|
|
||||||
File(FileNodeArena),
|
|
||||||
Import(ImportNodeArena),
|
|
||||||
ImportSpec(ImportSpecNodeArena),
|
|
||||||
ServiceDecl(ServiceDeclNodeArena),
|
|
||||||
ServiceFnSig(ServiceFnSigNodeArena),
|
|
||||||
ServiceFnDecl(ServiceFnDeclNodeArena),
|
|
||||||
FnDecl(FnDeclNodeArena),
|
|
||||||
TypeDecl(TypeDeclNodeArena),
|
|
||||||
TypeBody(TypeBodyNodeArena),
|
|
||||||
Block(BlockNodeArena),
|
|
||||||
LetStmt(LetStmtNodeArena),
|
|
||||||
ExprStmt(ExprStmtNodeArena),
|
|
||||||
ReturnStmt(ReturnStmtNodeArena),
|
|
||||||
IntLit(IntLitNodeArena),
|
|
||||||
FloatLit(FloatLitNodeArena),
|
|
||||||
BoundedLit(BoundedLitNodeArena),
|
|
||||||
StringLit(StringLitNodeArena),
|
|
||||||
Ident(IdentNodeArena),
|
|
||||||
Call(CallNodeArena),
|
|
||||||
Unary(UnaryNodeArena),
|
|
||||||
Binary(BinaryNodeArena),
|
|
||||||
Cast(CastNodeArena),
|
|
||||||
IfExpr(IfExprNodeArena),
|
|
||||||
WhenExpr(WhenExprNodeArena),
|
|
||||||
WhenArm(WhenArmNodeArena),
|
|
||||||
TypeName(TypeNameNodeArena),
|
|
||||||
TypeApp(TypeAppNodeArena),
|
|
||||||
ConstructorDecl(ConstructorDeclNodeArena),
|
|
||||||
ConstantDecl(ConstantDeclNodeArena),
|
|
||||||
Alloc(AllocNodeArena),
|
|
||||||
Mutate(MutateNodeArena),
|
|
||||||
Borrow(BorrowNodeArena),
|
|
||||||
Peek(PeekNodeArena),
|
|
||||||
MemberAccess(MemberAccessNodeArena),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct FileNodeArena {
|
|
||||||
pub imports: Vec<NodeId>,
|
|
||||||
pub decls: Vec<NodeId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct ImportNodeArena {
|
|
||||||
pub spec: NodeId,
|
|
||||||
pub from: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct ImportSpecNodeArena {
|
|
||||||
pub path: Vec<NameId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct ServiceDeclNodeArena {
|
|
||||||
pub vis: Option<String>,
|
|
||||||
pub name: NameId,
|
|
||||||
pub extends: Option<NameId>,
|
|
||||||
pub members: Vec<NodeId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct ServiceFnSigNodeArena {
|
|
||||||
pub name: NameId,
|
|
||||||
pub params: Vec<ParamNodeArena>,
|
|
||||||
pub ret: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct ServiceFnDeclNodeArena {
|
|
||||||
pub name: NameId,
|
|
||||||
pub params: Vec<ParamNodeArena>,
|
|
||||||
pub ret: NodeId,
|
|
||||||
pub body: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct ParamNodeArena {
|
|
||||||
pub span: Span,
|
|
||||||
pub name: NameId,
|
|
||||||
pub ty: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct FnDeclNodeArena {
|
|
||||||
pub vis: Option<String>,
|
|
||||||
pub name: NameId,
|
|
||||||
pub params: Vec<ParamNodeArena>,
|
|
||||||
pub ret: Option<NodeId>,
|
|
||||||
pub else_fallback: Option<NodeId>,
|
|
||||||
pub body: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct TypeDeclNodeArena {
|
|
||||||
pub vis: Option<String>,
|
|
||||||
pub type_kind: String,
|
|
||||||
pub name: NameId,
|
|
||||||
pub is_host: bool,
|
|
||||||
pub params: Vec<ParamNodeArena>,
|
|
||||||
pub constructors: Vec<NodeId>,
|
|
||||||
pub constants: Vec<NodeId>,
|
|
||||||
pub body: Option<NodeId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct ConstructorDeclNodeArena {
|
|
||||||
pub params: Vec<ParamNodeArena>,
|
|
||||||
pub initializers: Vec<NodeId>,
|
|
||||||
pub name: NameId,
|
|
||||||
pub body: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct ConstantDeclNodeArena {
|
|
||||||
pub name: NameId,
|
|
||||||
pub value: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct TypeBodyNodeArena {
|
|
||||||
pub members: Vec<TypeMemberNodeArena>,
|
|
||||||
pub methods: Vec<NodeId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct TypeMemberNodeArena {
|
|
||||||
pub span: Span,
|
|
||||||
pub name: NameId,
|
|
||||||
pub ty: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct BlockNodeArena {
|
|
||||||
pub stmts: Vec<NodeId>,
|
|
||||||
pub tail: Option<NodeId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct LetStmtNodeArena {
|
|
||||||
pub name: NameId,
|
|
||||||
pub is_mut: bool,
|
|
||||||
pub ty: Option<NodeId>,
|
|
||||||
pub init: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct ExprStmtNodeArena {
|
|
||||||
pub expr: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct ReturnStmtNodeArena {
|
|
||||||
pub expr: Option<NodeId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct IntLitNodeArena {
|
|
||||||
pub value: i64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct FloatLitNodeArena {
|
|
||||||
pub value: f64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct BoundedLitNodeArena {
|
|
||||||
pub value: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct StringLitNodeArena {
|
|
||||||
pub value: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct IdentNodeArena {
|
|
||||||
pub name: NameId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct CallNodeArena {
|
|
||||||
pub callee: NodeId,
|
|
||||||
pub args: Vec<NodeId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct UnaryNodeArena {
|
|
||||||
pub op: String,
|
|
||||||
pub expr: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct BinaryNodeArena {
|
|
||||||
pub op: String,
|
|
||||||
pub left: NodeId,
|
|
||||||
pub right: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct CastNodeArena {
|
|
||||||
pub expr: NodeId,
|
|
||||||
pub ty: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct IfExprNodeArena {
|
|
||||||
pub cond: NodeId,
|
|
||||||
pub then_block: NodeId,
|
|
||||||
pub else_block: Option<NodeId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct WhenExprNodeArena {
|
|
||||||
pub arms: Vec<NodeId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct WhenArmNodeArena {
|
|
||||||
pub cond: NodeId,
|
|
||||||
pub body: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct TypeNameNodeArena {
|
|
||||||
pub name: NameId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct TypeAppNodeArena {
|
|
||||||
pub base: NameId,
|
|
||||||
pub args: Vec<NodeId>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct AllocNodeArena {
|
|
||||||
pub ty: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct MutateNodeArena {
|
|
||||||
pub target: NodeId,
|
|
||||||
pub binding: NameId,
|
|
||||||
pub body: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct BorrowNodeArena {
|
|
||||||
pub target: NodeId,
|
|
||||||
pub binding: NameId,
|
|
||||||
pub body: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct PeekNodeArena {
|
|
||||||
pub target: NodeId,
|
|
||||||
pub binding: NameId,
|
|
||||||
pub body: NodeId,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct MemberAccessNodeArena {
|
|
||||||
pub object: NodeId,
|
|
||||||
pub member: NameId,
|
|
||||||
}
|
|
||||||
@ -1,242 +0,0 @@
|
|||||||
use crate::common::diagnostics::{Diagnostic, DiagnosticBundle, Severity};
|
|
||||||
use crate::frontends::pbs::ast::*;
|
|
||||||
use crate::frontends::pbs::symbols::*;
|
|
||||||
use crate::semantics::export_surface::ExportSurfaceKind;
|
|
||||||
use prometeu_analysis::{NameInterner, NodeId};
|
|
||||||
|
|
||||||
pub struct SymbolCollector<'a> {
|
|
||||||
interner: &'a NameInterner,
|
|
||||||
type_symbols: SymbolTable,
|
|
||||||
value_symbols: SymbolTable,
|
|
||||||
diagnostics: Vec<Diagnostic>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> SymbolCollector<'a> {
|
|
||||||
pub fn new(interner: &'a NameInterner) -> Self {
|
|
||||||
Self {
|
|
||||||
interner,
|
|
||||||
type_symbols: SymbolTable::new(),
|
|
||||||
value_symbols: SymbolTable::new(),
|
|
||||||
diagnostics: Vec::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn collect(
|
|
||||||
&mut self,
|
|
||||||
arena: &AstArena,
|
|
||||||
root: NodeId,
|
|
||||||
) -> Result<(SymbolTable, SymbolTable), DiagnosticBundle> {
|
|
||||||
let file = match arena.kind(root) {
|
|
||||||
NodeKind::File(file) => file,
|
|
||||||
_ => {
|
|
||||||
return Err(DiagnosticBundle::error(
|
|
||||||
"E_COLLECT_INVALID_ROOT",
|
|
||||||
"Expected File node as root".to_string(),
|
|
||||||
arena.span(root),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
for decl in &file.decls {
|
|
||||||
match arena.kind(*decl) {
|
|
||||||
NodeKind::FnDecl(fn_decl) => self.collect_fn(arena, *decl, fn_decl),
|
|
||||||
NodeKind::ServiceDecl(service_decl) => self.collect_service(arena, *decl, service_decl),
|
|
||||||
NodeKind::TypeDecl(type_decl) => self.collect_type(arena, *decl, type_decl),
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !self.diagnostics.is_empty() {
|
|
||||||
return Err(DiagnosticBundle {
|
|
||||||
diagnostics: self.diagnostics.clone(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((
|
|
||||||
std::mem::replace(&mut self.type_symbols, SymbolTable::new()),
|
|
||||||
std::mem::replace(&mut self.value_symbols, SymbolTable::new()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn collect_fn(&mut self, arena: &AstArena, id: NodeId, decl: &FnDeclNodeArena) {
|
|
||||||
let vis = match decl.vis.as_deref() {
|
|
||||||
Some("pub") => Visibility::Pub,
|
|
||||||
Some("mod") => Visibility::Mod,
|
|
||||||
_ => Visibility::FilePrivate,
|
|
||||||
};
|
|
||||||
|
|
||||||
let span = arena.span(id);
|
|
||||||
self.check_export_eligibility(SymbolKind::Function, vis, &span);
|
|
||||||
|
|
||||||
let symbol = Symbol {
|
|
||||||
name: decl.name,
|
|
||||||
kind: SymbolKind::Function,
|
|
||||||
namespace: Namespace::Value,
|
|
||||||
visibility: vis,
|
|
||||||
ty: None, // Will be resolved later
|
|
||||||
is_host: false,
|
|
||||||
span: span.clone(),
|
|
||||||
origin: None,
|
|
||||||
};
|
|
||||||
self.insert_value_symbol(symbol);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn collect_service(&mut self, arena: &AstArena, id: NodeId, decl: &ServiceDeclNodeArena) {
|
|
||||||
let vis = match decl.vis.as_deref() {
|
|
||||||
Some("pub") => Visibility::Pub,
|
|
||||||
_ => Visibility::Mod, // Defaults to Mod
|
|
||||||
};
|
|
||||||
|
|
||||||
let span = arena.span(id);
|
|
||||||
|
|
||||||
self.check_export_eligibility(SymbolKind::Service, vis, &span);
|
|
||||||
|
|
||||||
let symbol = Symbol {
|
|
||||||
name: decl.name,
|
|
||||||
kind: SymbolKind::Service,
|
|
||||||
namespace: Namespace::Type, // Service is a type
|
|
||||||
visibility: vis,
|
|
||||||
ty: None,
|
|
||||||
is_host: false,
|
|
||||||
span: span.clone(),
|
|
||||||
origin: None,
|
|
||||||
};
|
|
||||||
self.insert_type_symbol(symbol);
|
|
||||||
|
|
||||||
for member in &decl.members {
|
|
||||||
match arena.kind(*member) {
|
|
||||||
NodeKind::ServiceFnDecl(method) => {
|
|
||||||
// Export also as a value symbol (function) — simple name (unqualified)
|
|
||||||
let sym = Symbol {
|
|
||||||
name: method.name,
|
|
||||||
kind: SymbolKind::Function,
|
|
||||||
namespace: Namespace::Value,
|
|
||||||
visibility: vis, // herda do service
|
|
||||||
ty: None,
|
|
||||||
is_host: false,
|
|
||||||
span: arena.span(*member),
|
|
||||||
// No legacy string protocol markers (e.g., `svc:Service`). Canonical owner is handled elsewhere.
|
|
||||||
origin: None,
|
|
||||||
};
|
|
||||||
self.insert_value_symbol(sym);
|
|
||||||
}
|
|
||||||
NodeKind::ServiceFnSig(method) => {
|
|
||||||
// Mesmo para assinaturas sem corpo, export surface deve conhecer o símbolo
|
|
||||||
let sym = Symbol {
|
|
||||||
name: method.name,
|
|
||||||
kind: SymbolKind::Function,
|
|
||||||
namespace: Namespace::Value,
|
|
||||||
visibility: vis,
|
|
||||||
ty: None,
|
|
||||||
is_host: false,
|
|
||||||
span: arena.span(*member),
|
|
||||||
origin: None,
|
|
||||||
};
|
|
||||||
self.insert_value_symbol(sym);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn collect_type(&mut self, arena: &AstArena, id: NodeId, decl: &TypeDeclNodeArena) {
|
|
||||||
let vis = match decl.vis.as_deref() {
|
|
||||||
Some("pub") => Visibility::Pub,
|
|
||||||
Some("mod") => Visibility::Mod,
|
|
||||||
_ => Visibility::FilePrivate,
|
|
||||||
};
|
|
||||||
let kind = match decl.type_kind.as_str() {
|
|
||||||
"struct" => SymbolKind::Struct,
|
|
||||||
"contract" => SymbolKind::Contract,
|
|
||||||
"error" => SymbolKind::ErrorType,
|
|
||||||
_ => SymbolKind::Struct, // Default
|
|
||||||
};
|
|
||||||
|
|
||||||
let span = arena.span(id);
|
|
||||||
|
|
||||||
self.check_export_eligibility(kind.clone(), vis, &span);
|
|
||||||
|
|
||||||
let symbol = Symbol {
|
|
||||||
name: decl.name,
|
|
||||||
kind,
|
|
||||||
namespace: Namespace::Type,
|
|
||||||
visibility: vis,
|
|
||||||
ty: None,
|
|
||||||
is_host: decl.is_host,
|
|
||||||
span: span.clone(),
|
|
||||||
origin: None,
|
|
||||||
};
|
|
||||||
self.insert_type_symbol(symbol);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_type_symbol(&mut self, symbol: Symbol) {
|
|
||||||
// Check for collision in value namespace first
|
|
||||||
if let Some(existing) = self.value_symbols.get(symbol.name) {
|
|
||||||
let existing = existing.clone();
|
|
||||||
self.error_collision(&symbol, &existing);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Err(()) = self.type_symbols.insert(symbol.clone()) {
|
|
||||||
if let Some(existing) = self.type_symbols.get(symbol.name).cloned() {
|
|
||||||
self.error_duplicate(&symbol, &existing);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert_value_symbol(&mut self, symbol: Symbol) {
|
|
||||||
// Check for collision in type namespace first
|
|
||||||
if let Some(existing) = self.type_symbols.get(symbol.name) {
|
|
||||||
let existing = existing.clone();
|
|
||||||
self.error_collision(&symbol, &existing);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Err(()) = self.value_symbols.insert(symbol.clone()) {
|
|
||||||
if let Some(existing) = self.value_symbols.get(symbol.name).cloned() {
|
|
||||||
self.error_duplicate(&symbol, &existing);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn error_duplicate(&mut self, symbol: &Symbol, existing: &Symbol) {
|
|
||||||
self.diagnostics.push(Diagnostic {
|
|
||||||
severity: Severity::Error,
|
|
||||||
code: "E_RESOLVE_DUPLICATE_SYMBOL".to_string(),
|
|
||||||
message: format!(
|
|
||||||
"Duplicate symbol '{}' already defined at {:?}",
|
|
||||||
self.interner.resolve(symbol.name),
|
|
||||||
&existing.span
|
|
||||||
),
|
|
||||||
span: symbol.span.clone(),
|
|
||||||
related: Vec::new(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn error_collision(&mut self, symbol: &Symbol, existing: &Symbol) {
|
|
||||||
self.diagnostics.push(Diagnostic {
|
|
||||||
severity: Severity::Error,
|
|
||||||
code: "E_RESOLVE_NAMESPACE_COLLISION".to_string(),
|
|
||||||
message: format!(
|
|
||||||
"DebugSymbol '{}' collides with another symbol in the {:?} namespace defined at {:?}",
|
|
||||||
self.interner.resolve(symbol.name),
|
|
||||||
existing.namespace,
|
|
||||||
&existing.span
|
|
||||||
),
|
|
||||||
span: symbol.span.clone(),
|
|
||||||
related: Vec::new(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_export_eligibility(&mut self, kind: SymbolKind, vis: Visibility, span: &crate::common::spans::Span) {
|
|
||||||
if let Err(msg) = ExportSurfaceKind::validate_visibility(kind, vis) {
|
|
||||||
self.diagnostics.push(Diagnostic {
|
|
||||||
severity: Severity::Error,
|
|
||||||
code: "E_SEMANTIC_EXPORT_RESTRICTION".to_string(),
|
|
||||||
message: msg,
|
|
||||||
span: span.clone(),
|
|
||||||
related: Vec::new(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,294 +0,0 @@
|
|||||||
use crate::frontends::pbs::types::PbsType;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
pub struct ContractMethod {
|
|
||||||
pub id: u32,
|
|
||||||
pub params: Vec<PbsType>,
|
|
||||||
pub return_type: PbsType,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ContractRegistry {
|
|
||||||
mappings: HashMap<String, HashMap<String, ContractMethod>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ContractRegistry {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
let mut mappings = HashMap::new();
|
|
||||||
|
|
||||||
// GFX mappings
|
|
||||||
let mut gfx = HashMap::new();
|
|
||||||
gfx.insert("clear".to_string(), ContractMethod {
|
|
||||||
id: 0x1010,
|
|
||||||
params: vec![PbsType::Struct("Color".to_string())],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
gfx.insert("fillRect".to_string(), ContractMethod {
|
|
||||||
id: 0x1002,
|
|
||||||
params: vec![PbsType::Int, PbsType::Int, PbsType::Int, PbsType::Int, PbsType::Struct("Color".to_string())],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
gfx.insert("drawLine".to_string(), ContractMethod {
|
|
||||||
id: 0x1003,
|
|
||||||
params: vec![PbsType::Int, PbsType::Int, PbsType::Int, PbsType::Int, PbsType::Struct("Color".to_string())],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
gfx.insert("drawCircle".to_string(), ContractMethod {
|
|
||||||
id: 0x1004,
|
|
||||||
params: vec![PbsType::Int, PbsType::Int, PbsType::Int, PbsType::Struct("Color".to_string())],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
gfx.insert("drawDisc".to_string(), ContractMethod {
|
|
||||||
id: 0x1005,
|
|
||||||
params: vec![PbsType::Int, PbsType::Int, PbsType::Int, PbsType::Struct("Color".to_string())],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
gfx.insert("drawSquare".to_string(), ContractMethod {
|
|
||||||
id: 0x1006,
|
|
||||||
params: vec![PbsType::Int, PbsType::Int, PbsType::Int, PbsType::Struct("Color".to_string())],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
gfx.insert("setSprite".to_string(), ContractMethod {
|
|
||||||
id: 0x1007,
|
|
||||||
params: vec![PbsType::Int, PbsType::Int, PbsType::Int],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
gfx.insert("drawText".to_string(), ContractMethod {
|
|
||||||
id: 0x1008,
|
|
||||||
params: vec![PbsType::Int, PbsType::Int, PbsType::String, PbsType::Struct("Color".to_string())],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
mappings.insert("Gfx".to_string(), gfx);
|
|
||||||
|
|
||||||
// Input legacy mappings (kept for backward compatibility)
|
|
||||||
let mut input = HashMap::new();
|
|
||||||
input.insert("pad".to_string(), ContractMethod {
|
|
||||||
id: 0x2010,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Pad".to_string()),
|
|
||||||
});
|
|
||||||
input.insert("touch".to_string(), ContractMethod {
|
|
||||||
id: 0x2011,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Touch".to_string()),
|
|
||||||
});
|
|
||||||
mappings.insert("Input".to_string(), input);
|
|
||||||
|
|
||||||
// New Pad service-based mappings (each button as a method returning Button)
|
|
||||||
let mut pad = HashMap::new();
|
|
||||||
// NOTE: The syscalls for per-button access must be handled by the runtime.
|
|
||||||
// We reserve the 0x2200..0x220B range for Pad buttons returning a Button struct (6 bytes).
|
|
||||||
pad.insert("up".to_string(), ContractMethod {
|
|
||||||
id: 0x2200,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
pad.insert("down".to_string(), ContractMethod {
|
|
||||||
id: 0x2201,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
pad.insert("left".to_string(), ContractMethod {
|
|
||||||
id: 0x2202,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
pad.insert("right".to_string(), ContractMethod {
|
|
||||||
id: 0x2203,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
pad.insert("a".to_string(), ContractMethod {
|
|
||||||
id: 0x2204,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
pad.insert("b".to_string(), ContractMethod {
|
|
||||||
id: 0x2205,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
pad.insert("x".to_string(), ContractMethod {
|
|
||||||
id: 0x2206,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
pad.insert("y".to_string(), ContractMethod {
|
|
||||||
id: 0x2207,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
pad.insert("l".to_string(), ContractMethod {
|
|
||||||
id: 0x2208,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
pad.insert("r".to_string(), ContractMethod {
|
|
||||||
id: 0x2209,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
pad.insert("start".to_string(), ContractMethod {
|
|
||||||
id: 0x220A,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
pad.insert("select".to_string(), ContractMethod {
|
|
||||||
id: 0x220B,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
mappings.insert("Pad".to_string(), pad);
|
|
||||||
|
|
||||||
// Touch mappings (service-based)
|
|
||||||
let mut touch = HashMap::new();
|
|
||||||
// SDK agora expõe screen_x/screen_y/finger()
|
|
||||||
touch.insert("screen_x".to_string(), ContractMethod {
|
|
||||||
id: 0x2101,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Int,
|
|
||||||
});
|
|
||||||
touch.insert("screen_y".to_string(), ContractMethod {
|
|
||||||
id: 0x2102,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Int,
|
|
||||||
});
|
|
||||||
// Novo syscall dedicado para retornar Button completo do touch (dedo)
|
|
||||||
touch.insert("finger".to_string(), ContractMethod {
|
|
||||||
id: 0x2107,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Struct("Button".to_string()),
|
|
||||||
});
|
|
||||||
mappings.insert("Touch".to_string(), touch);
|
|
||||||
|
|
||||||
// Audio mappings
|
|
||||||
let mut audio = HashMap::new();
|
|
||||||
audio.insert("playSample".to_string(), ContractMethod {
|
|
||||||
id: 0x3001,
|
|
||||||
params: vec![PbsType::Int],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
audio.insert("play".to_string(), ContractMethod {
|
|
||||||
id: 0x3002,
|
|
||||||
params: vec![PbsType::Int],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
mappings.insert("Audio".to_string(), audio);
|
|
||||||
|
|
||||||
// FS mappings
|
|
||||||
let mut fs = HashMap::new();
|
|
||||||
fs.insert("open".to_string(), ContractMethod {
|
|
||||||
id: 0x4001,
|
|
||||||
params: vec![PbsType::String, PbsType::String],
|
|
||||||
return_type: PbsType::Int,
|
|
||||||
});
|
|
||||||
fs.insert("read".to_string(), ContractMethod {
|
|
||||||
id: 0x4002,
|
|
||||||
params: vec![PbsType::Int],
|
|
||||||
return_type: PbsType::String,
|
|
||||||
});
|
|
||||||
fs.insert("write".to_string(), ContractMethod {
|
|
||||||
id: 0x4003,
|
|
||||||
params: vec![PbsType::Int, PbsType::String],
|
|
||||||
return_type: PbsType::Int,
|
|
||||||
});
|
|
||||||
fs.insert("close".to_string(), ContractMethod {
|
|
||||||
id: 0x4004,
|
|
||||||
params: vec![PbsType::Int],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
fs.insert("listDir".to_string(), ContractMethod {
|
|
||||||
id: 0x4005,
|
|
||||||
params: vec![PbsType::String],
|
|
||||||
return_type: PbsType::String,
|
|
||||||
});
|
|
||||||
fs.insert("exists".to_string(), ContractMethod {
|
|
||||||
id: 0x4006,
|
|
||||||
params: vec![PbsType::String],
|
|
||||||
return_type: PbsType::Bool,
|
|
||||||
});
|
|
||||||
fs.insert("delete".to_string(), ContractMethod {
|
|
||||||
id: 0x4007,
|
|
||||||
params: vec![PbsType::String],
|
|
||||||
return_type: PbsType::Bool,
|
|
||||||
});
|
|
||||||
mappings.insert("Fs".to_string(), fs);
|
|
||||||
|
|
||||||
// Log mappings (host)
|
|
||||||
let mut log = HashMap::new();
|
|
||||||
log.insert("write".to_string(), ContractMethod {
|
|
||||||
id: 0x5001,
|
|
||||||
params: vec![PbsType::Int, PbsType::String],
|
|
||||||
// Log syscalls não retornam valor (void) — evita lixo de pilha
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
log.insert("writeTag".to_string(), ContractMethod {
|
|
||||||
id: 0x5002,
|
|
||||||
params: vec![PbsType::Int, PbsType::Int, PbsType::String],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
// O contrato host exposto no SDK é LogHost (não-público)
|
|
||||||
mappings.insert("LogHost".to_string(), log);
|
|
||||||
|
|
||||||
// System mappings
|
|
||||||
let mut system = HashMap::new();
|
|
||||||
system.insert("hasCart".to_string(), ContractMethod {
|
|
||||||
id: 0x0001,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Bool,
|
|
||||||
});
|
|
||||||
system.insert("runCart".to_string(), ContractMethod {
|
|
||||||
id: 0x0002,
|
|
||||||
params: vec![],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
mappings.insert("System".to_string(), system);
|
|
||||||
|
|
||||||
// Asset mappings
|
|
||||||
let mut asset = HashMap::new();
|
|
||||||
asset.insert("load".to_string(), ContractMethod {
|
|
||||||
id: 0x6001,
|
|
||||||
params: vec![PbsType::String],
|
|
||||||
return_type: PbsType::Int,
|
|
||||||
});
|
|
||||||
asset.insert("status".to_string(), ContractMethod {
|
|
||||||
id: 0x6002,
|
|
||||||
params: vec![PbsType::Int],
|
|
||||||
return_type: PbsType::Int,
|
|
||||||
});
|
|
||||||
asset.insert("commit".to_string(), ContractMethod {
|
|
||||||
id: 0x6003,
|
|
||||||
params: vec![PbsType::Int],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
asset.insert("cancel".to_string(), ContractMethod {
|
|
||||||
id: 0x6004,
|
|
||||||
params: vec![PbsType::Int],
|
|
||||||
return_type: PbsType::Void,
|
|
||||||
});
|
|
||||||
mappings.insert("Asset".to_string(), asset);
|
|
||||||
|
|
||||||
// Bank mappings
|
|
||||||
let mut bank = HashMap::new();
|
|
||||||
bank.insert("info".to_string(), ContractMethod {
|
|
||||||
id: 0x6101,
|
|
||||||
params: vec![PbsType::Int],
|
|
||||||
return_type: PbsType::String,
|
|
||||||
});
|
|
||||||
bank.insert("slotInfo".to_string(), ContractMethod {
|
|
||||||
id: 0x6102,
|
|
||||||
params: vec![PbsType::Int, PbsType::Int],
|
|
||||||
return_type: PbsType::String,
|
|
||||||
});
|
|
||||||
mappings.insert("Bank".to_string(), bank);
|
|
||||||
|
|
||||||
Self { mappings }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn resolve(&self, contract: &str, method: &str) -> Option<u32> {
|
|
||||||
self.mappings.get(contract).and_then(|m| m.get(method)).map(|m| m.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_method(&self, contract: &str, method: &str) -> Option<&ContractMethod> {
|
|
||||||
self.mappings.get(contract).and_then(|m| m.get(method))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,37 +0,0 @@
|
|||||||
use crate::common::diagnostics::DiagnosticBundle;
|
|
||||||
use crate::frontends::pbs::ast::ParsedAst;
|
|
||||||
use crate::frontends::pbs::resolver::ModuleProvider;
|
|
||||||
use crate::frontends::pbs::symbols::ModuleSymbols;
|
|
||||||
use crate::frontends::pbs::typecheck::TypeChecker;
|
|
||||||
use prometeu_analysis::NameInterner;
|
|
||||||
|
|
||||||
/// Unified frontend entrypoint: typecheck a pre-collected module, ensuring all public
|
|
||||||
/// symbols are typed. This function does not perform collection — callers must provide
|
|
||||||
/// a `ModuleSymbols` that already contains all symbols for the current module (across files).
|
|
||||||
///
|
|
||||||
/// Invariant after success:
|
|
||||||
/// - Every public function/service method has `sym.ty = Some(PbsType::Function { ... })`.
|
|
||||||
pub fn build_typed_module_symbols(
|
|
||||||
parsed: &ParsedAst,
|
|
||||||
current_module: &mut ModuleSymbols,
|
|
||||||
imported_symbols: &ModuleSymbols,
|
|
||||||
interner: &mut NameInterner,
|
|
||||||
) -> Result<(), DiagnosticBundle> {
|
|
||||||
// 0) Ensure primitive names are interned (resolver/typechecker expect them present)
|
|
||||||
for p in ["int", "bool", "float", "string", "bounded", "void"] {
|
|
||||||
interner.intern(p);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Typecheck using an empty provider (exports typing must not consult provider)
|
|
||||||
struct EmptyProvider;
|
|
||||||
impl ModuleProvider for EmptyProvider {
|
|
||||||
fn get_module_symbols(&self, _from_path: &str) -> Option<&ModuleSymbols> { None }
|
|
||||||
}
|
|
||||||
let provider = EmptyProvider;
|
|
||||||
|
|
||||||
let mut checker = TypeChecker::new(current_module, imported_symbols, &provider, &interner);
|
|
||||||
checker.check(&parsed.arena, parsed.root)?;
|
|
||||||
|
|
||||||
// Done — typed symbols populated in-place
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@ -1,442 +0,0 @@
|
|||||||
use super::token::{Token, TokenKind};
|
|
||||||
use crate::common::spans::{FileId, Span};
|
|
||||||
use std::iter::Peekable;
|
|
||||||
use std::str::Chars;
|
|
||||||
|
|
||||||
pub struct Lexer<'a> {
|
|
||||||
chars: Peekable<Chars<'a>>,
|
|
||||||
file_id: FileId,
|
|
||||||
pos: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Lexer<'a> {
|
|
||||||
pub fn new(source: &'a str, file_id: FileId) -> Self {
|
|
||||||
Self {
|
|
||||||
chars: source.chars().peekable(),
|
|
||||||
file_id,
|
|
||||||
pos: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn peek(&mut self) -> Option<char> {
|
|
||||||
self.chars.peek().copied()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<char> {
|
|
||||||
let c = self.chars.next();
|
|
||||||
if let Some(c) = c {
|
|
||||||
self.pos += c.len_utf8() as u32;
|
|
||||||
}
|
|
||||||
c
|
|
||||||
}
|
|
||||||
|
|
||||||
fn skip_whitespace(&mut self) {
|
|
||||||
while let Some(c) = self.peek() {
|
|
||||||
if c.is_whitespace() {
|
|
||||||
self.next();
|
|
||||||
} else if c == '/' {
|
|
||||||
if self.peek_next() == Some('/') {
|
|
||||||
// Line comment
|
|
||||||
self.next(); // /
|
|
||||||
self.next(); // /
|
|
||||||
while let Some(c) = self.peek() {
|
|
||||||
if c == '\n' {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
self.next();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn peek_next(&self) -> Option<char> {
|
|
||||||
let mut cloned = self.chars.clone();
|
|
||||||
cloned.next();
|
|
||||||
cloned.peek().copied()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn next_token(&mut self) -> Token {
|
|
||||||
self.skip_whitespace();
|
|
||||||
|
|
||||||
let start = self.pos;
|
|
||||||
let c = match self.next() {
|
|
||||||
Some(c) => c,
|
|
||||||
None => return Token::new(TokenKind::Eof, Span::new(self.file_id, start, start)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let kind = match c {
|
|
||||||
'(' => TokenKind::OpenParen,
|
|
||||||
')' => TokenKind::CloseParen,
|
|
||||||
'{' => TokenKind::OpenBrace,
|
|
||||||
'}' => TokenKind::CloseBrace,
|
|
||||||
'[' => {
|
|
||||||
if self.peek() == Some('[') {
|
|
||||||
self.next();
|
|
||||||
TokenKind::OpenDoubleBracket
|
|
||||||
} else {
|
|
||||||
TokenKind::OpenBracket
|
|
||||||
}
|
|
||||||
}
|
|
||||||
']' => {
|
|
||||||
if self.peek() == Some(']') {
|
|
||||||
self.next();
|
|
||||||
TokenKind::CloseDoubleBracket
|
|
||||||
} else {
|
|
||||||
TokenKind::CloseBracket
|
|
||||||
}
|
|
||||||
}
|
|
||||||
',' => TokenKind::Comma,
|
|
||||||
'.' => TokenKind::Dot,
|
|
||||||
':' => TokenKind::Colon,
|
|
||||||
';' => TokenKind::Semicolon,
|
|
||||||
'=' => {
|
|
||||||
if self.peek() == Some('=') {
|
|
||||||
self.next();
|
|
||||||
TokenKind::Eq
|
|
||||||
} else {
|
|
||||||
TokenKind::Assign
|
|
||||||
}
|
|
||||||
}
|
|
||||||
'+' => TokenKind::Plus,
|
|
||||||
'-' => {
|
|
||||||
if self.peek() == Some('>') {
|
|
||||||
self.next();
|
|
||||||
TokenKind::Arrow
|
|
||||||
} else {
|
|
||||||
TokenKind::Minus
|
|
||||||
}
|
|
||||||
}
|
|
||||||
'*' => TokenKind::Star,
|
|
||||||
'/' => TokenKind::Slash,
|
|
||||||
'%' => TokenKind::Percent,
|
|
||||||
'!' => {
|
|
||||||
if self.peek() == Some('=') {
|
|
||||||
self.next();
|
|
||||||
TokenKind::Neq
|
|
||||||
} else {
|
|
||||||
TokenKind::Not
|
|
||||||
}
|
|
||||||
}
|
|
||||||
'<' => {
|
|
||||||
if self.peek() == Some('=') {
|
|
||||||
self.next();
|
|
||||||
TokenKind::Lte
|
|
||||||
} else {
|
|
||||||
TokenKind::Lt
|
|
||||||
}
|
|
||||||
}
|
|
||||||
'>' => {
|
|
||||||
if self.peek() == Some('=') {
|
|
||||||
self.next();
|
|
||||||
TokenKind::Gte
|
|
||||||
} else {
|
|
||||||
TokenKind::Gt
|
|
||||||
}
|
|
||||||
}
|
|
||||||
'&' => {
|
|
||||||
if self.peek() == Some('&') {
|
|
||||||
self.next();
|
|
||||||
TokenKind::And
|
|
||||||
} else {
|
|
||||||
TokenKind::Invalid("&".to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
'|' => {
|
|
||||||
if self.peek() == Some('|') {
|
|
||||||
self.next();
|
|
||||||
TokenKind::Or
|
|
||||||
} else {
|
|
||||||
TokenKind::Invalid("|".to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
'"' => self.lex_string(),
|
|
||||||
'0'..='9' => self.lex_number(c),
|
|
||||||
c if is_identifier_start(c) => self.lex_identifier(c),
|
|
||||||
_ => TokenKind::Invalid(c.to_string()),
|
|
||||||
};
|
|
||||||
|
|
||||||
Token::new(kind, Span::new(self.file_id, start, self.pos))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lex_string(&mut self) -> TokenKind {
|
|
||||||
let mut s = String::new();
|
|
||||||
while let Some(c) = self.peek() {
|
|
||||||
if c == '"' {
|
|
||||||
self.next();
|
|
||||||
return TokenKind::StringLit(s);
|
|
||||||
}
|
|
||||||
if c == '\n' {
|
|
||||||
break; // Unterminated string
|
|
||||||
}
|
|
||||||
s.push(self.next().unwrap());
|
|
||||||
}
|
|
||||||
TokenKind::Invalid("Unterminated string".to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lex_number(&mut self, first: char) -> TokenKind {
|
|
||||||
let mut s = String::new();
|
|
||||||
s.push(first);
|
|
||||||
let mut is_float = false;
|
|
||||||
|
|
||||||
while let Some(c) = self.peek() {
|
|
||||||
if c.is_ascii_digit() {
|
|
||||||
s.push(self.next().unwrap());
|
|
||||||
} else if c == '.' && !is_float {
|
|
||||||
if let Some(next_c) = self.peek_next() {
|
|
||||||
if next_c.is_ascii_digit() {
|
|
||||||
is_float = true;
|
|
||||||
s.push(self.next().unwrap()); // .
|
|
||||||
s.push(self.next().unwrap()); // next digit
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.peek() == Some('b') && !is_float {
|
|
||||||
self.next(); // consume 'b'
|
|
||||||
if let Ok(val) = s.parse::<u32>() {
|
|
||||||
return TokenKind::BoundedLit(val);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if is_float {
|
|
||||||
if let Ok(val) = s.parse::<f64>() {
|
|
||||||
return TokenKind::FloatLit(val);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if let Ok(val) = s.parse::<i64>() {
|
|
||||||
return TokenKind::IntLit(val);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenKind::Invalid(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lex_identifier(&mut self, first: char) -> TokenKind {
|
|
||||||
let mut s = String::new();
|
|
||||||
s.push(first);
|
|
||||||
while let Some(c) = self.peek() {
|
|
||||||
if is_identifier_part(c) {
|
|
||||||
s.push(self.next().unwrap());
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match s.as_str() {
|
|
||||||
"import" => TokenKind::Import,
|
|
||||||
"pub" => TokenKind::Pub,
|
|
||||||
"mod" => TokenKind::Mod,
|
|
||||||
"service" => TokenKind::Service,
|
|
||||||
"fn" => TokenKind::Fn,
|
|
||||||
"let" => TokenKind::Let,
|
|
||||||
"mut" => TokenKind::Mut,
|
|
||||||
"declare" => TokenKind::Declare,
|
|
||||||
"struct" => TokenKind::Struct,
|
|
||||||
"contract" => TokenKind::Contract,
|
|
||||||
"host" => TokenKind::Host,
|
|
||||||
"error" => TokenKind::Error,
|
|
||||||
"optional" => TokenKind::Optional,
|
|
||||||
"result" => TokenKind::Result,
|
|
||||||
"some" => TokenKind::Some,
|
|
||||||
"none" => TokenKind::None,
|
|
||||||
"ok" => TokenKind::Ok,
|
|
||||||
"err" => TokenKind::Err,
|
|
||||||
"if" => TokenKind::If,
|
|
||||||
"else" => TokenKind::Else,
|
|
||||||
"when" => TokenKind::When,
|
|
||||||
"for" => TokenKind::For,
|
|
||||||
"in" => TokenKind::In,
|
|
||||||
"return" => TokenKind::Return,
|
|
||||||
"handle" => TokenKind::Handle,
|
|
||||||
"borrow" => TokenKind::Borrow,
|
|
||||||
"mutate" => TokenKind::Mutate,
|
|
||||||
"peek" => TokenKind::Peek,
|
|
||||||
"take" => TokenKind::Take,
|
|
||||||
"alloc" => TokenKind::Alloc,
|
|
||||||
"weak" => TokenKind::Weak,
|
|
||||||
"as" => TokenKind::As,
|
|
||||||
"bounded" => TokenKind::Bounded,
|
|
||||||
_ => TokenKind::Identifier(s),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_identifier_start(c: char) -> bool {
|
|
||||||
c.is_alphabetic() || c == '_'
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_identifier_part(c: char) -> bool {
|
|
||||||
c.is_alphanumeric() || c == '_'
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::frontends::pbs::token::TokenKind;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lex_basic_tokens() {
|
|
||||||
let source = "( ) { } [ ] , . : ; -> = == + - * / % ! != < > <= >= && ||";
|
|
||||||
let mut lexer = Lexer::new(source, FileId(0));
|
|
||||||
|
|
||||||
let expected = vec![
|
|
||||||
TokenKind::OpenParen, TokenKind::CloseParen,
|
|
||||||
TokenKind::OpenBrace, TokenKind::CloseBrace,
|
|
||||||
TokenKind::OpenBracket, TokenKind::CloseBracket,
|
|
||||||
TokenKind::Comma, TokenKind::Dot, TokenKind::Colon, TokenKind::Semicolon,
|
|
||||||
TokenKind::Arrow, TokenKind::Assign, TokenKind::Eq,
|
|
||||||
TokenKind::Plus, TokenKind::Minus, TokenKind::Star, TokenKind::Slash, TokenKind::Percent,
|
|
||||||
TokenKind::Not, TokenKind::Neq,
|
|
||||||
TokenKind::Lt, TokenKind::Gt, TokenKind::Lte, TokenKind::Gte,
|
|
||||||
TokenKind::And, TokenKind::Or,
|
|
||||||
TokenKind::Eof,
|
|
||||||
];
|
|
||||||
|
|
||||||
for kind in expected {
|
|
||||||
let token = lexer.next_token();
|
|
||||||
assert_eq!(token.kind, kind);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lex_keywords() {
|
|
||||||
let source = "import pub mod service fn let mut declare struct contract host error optional result some none ok err if else when for in return handle borrow mutate peek take alloc weak as";
|
|
||||||
let mut lexer = Lexer::new(source, FileId(0));
|
|
||||||
|
|
||||||
let expected = vec![
|
|
||||||
TokenKind::Import, TokenKind::Pub, TokenKind::Mod, TokenKind::Service,
|
|
||||||
TokenKind::Fn, TokenKind::Let, TokenKind::Mut, TokenKind::Declare,
|
|
||||||
TokenKind::Struct, TokenKind::Contract, TokenKind::Host, TokenKind::Error,
|
|
||||||
TokenKind::Optional, TokenKind::Result, TokenKind::Some, TokenKind::None,
|
|
||||||
TokenKind::Ok, TokenKind::Err, TokenKind::If, TokenKind::Else,
|
|
||||||
TokenKind::When, TokenKind::For, TokenKind::In, TokenKind::Return,
|
|
||||||
TokenKind::Handle, TokenKind::Borrow, TokenKind::Mutate, TokenKind::Peek,
|
|
||||||
TokenKind::Take, TokenKind::Alloc, TokenKind::Weak, TokenKind::As,
|
|
||||||
TokenKind::Eof,
|
|
||||||
];
|
|
||||||
|
|
||||||
for kind in expected {
|
|
||||||
let token = lexer.next_token();
|
|
||||||
assert_eq!(token.kind, kind);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lex_identifiers() {
|
|
||||||
let source = "foo bar _baz qux123";
|
|
||||||
let mut lexer = Lexer::new(source, FileId(0));
|
|
||||||
|
|
||||||
let expected = vec![
|
|
||||||
TokenKind::Identifier("foo".to_string()),
|
|
||||||
TokenKind::Identifier("bar".to_string()),
|
|
||||||
TokenKind::Identifier("_baz".to_string()),
|
|
||||||
TokenKind::Identifier("qux123".to_string()),
|
|
||||||
TokenKind::Eof,
|
|
||||||
];
|
|
||||||
|
|
||||||
for kind in expected {
|
|
||||||
let token = lexer.next_token();
|
|
||||||
assert_eq!(token.kind, kind);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lex_literals() {
|
|
||||||
let source = "123 3.14 255b \"hello world\"";
|
|
||||||
let mut lexer = Lexer::new(source, FileId(0));
|
|
||||||
|
|
||||||
let expected = vec![
|
|
||||||
TokenKind::IntLit(123),
|
|
||||||
TokenKind::FloatLit(3.14),
|
|
||||||
TokenKind::BoundedLit(255),
|
|
||||||
TokenKind::StringLit("hello world".to_string()),
|
|
||||||
TokenKind::Eof,
|
|
||||||
];
|
|
||||||
|
|
||||||
for kind in expected {
|
|
||||||
let token = lexer.next_token();
|
|
||||||
assert_eq!(token.kind, kind);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lex_comments() {
|
|
||||||
let source = "let x = 10; // this is a comment\nlet y = 20;";
|
|
||||||
let mut lexer = Lexer::new(source, FileId(0));
|
|
||||||
|
|
||||||
let expected = vec![
|
|
||||||
TokenKind::Let,
|
|
||||||
TokenKind::Identifier("x".to_string()),
|
|
||||||
TokenKind::Assign,
|
|
||||||
TokenKind::IntLit(10),
|
|
||||||
TokenKind::Semicolon,
|
|
||||||
TokenKind::Let,
|
|
||||||
TokenKind::Identifier("y".to_string()),
|
|
||||||
TokenKind::Assign,
|
|
||||||
TokenKind::IntLit(20),
|
|
||||||
TokenKind::Semicolon,
|
|
||||||
TokenKind::Eof,
|
|
||||||
];
|
|
||||||
|
|
||||||
for kind in expected {
|
|
||||||
let token = lexer.next_token();
|
|
||||||
assert_eq!(token.kind, kind);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lex_spans() {
|
|
||||||
let source = "let x = 10;";
|
|
||||||
let mut lexer = Lexer::new(source, FileId(0));
|
|
||||||
|
|
||||||
let t1 = lexer.next_token(); // let
|
|
||||||
assert_eq!(t1.span.start, 0);
|
|
||||||
assert_eq!(t1.span.end, 3);
|
|
||||||
|
|
||||||
let t2 = lexer.next_token(); // x
|
|
||||||
assert_eq!(t2.span.start, 4);
|
|
||||||
assert_eq!(t2.span.end, 5);
|
|
||||||
|
|
||||||
let t3 = lexer.next_token(); // =
|
|
||||||
assert_eq!(t3.span.start, 6);
|
|
||||||
assert_eq!(t3.span.end, 7);
|
|
||||||
|
|
||||||
let t4 = lexer.next_token(); // 10
|
|
||||||
assert_eq!(t4.span.start, 8);
|
|
||||||
assert_eq!(t4.span.end, 10);
|
|
||||||
|
|
||||||
let t5 = lexer.next_token(); // ;
|
|
||||||
assert_eq!(t5.span.start, 10);
|
|
||||||
assert_eq!(t5.span.end, 11);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lex_invalid_tokens() {
|
|
||||||
let source = "@ #";
|
|
||||||
let mut lexer = Lexer::new(source, FileId(0));
|
|
||||||
|
|
||||||
assert!(matches!(lexer.next_token().kind, TokenKind::Invalid(_)));
|
|
||||||
assert!(matches!(lexer.next_token().kind, TokenKind::Invalid(_)));
|
|
||||||
assert_eq!(lexer.next_token().kind, TokenKind::Eof);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lex_unterminated_string() {
|
|
||||||
let source = "\"hello";
|
|
||||||
let mut lexer = Lexer::new(source, FileId(0));
|
|
||||||
|
|
||||||
assert!(matches!(lexer.next_token().kind, TokenKind::Invalid(_)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,108 +0,0 @@
|
|||||||
pub mod token;
|
|
||||||
pub mod lexer;
|
|
||||||
pub mod ast;
|
|
||||||
pub mod parser;
|
|
||||||
pub mod types;
|
|
||||||
pub mod symbols;
|
|
||||||
pub mod collector;
|
|
||||||
pub mod resolver;
|
|
||||||
pub mod resolve;
|
|
||||||
pub mod typecheck;
|
|
||||||
pub mod lowering;
|
|
||||||
pub mod contracts;
|
|
||||||
pub mod frontend;
|
|
||||||
pub mod adapter;
|
|
||||||
|
|
||||||
pub use collector::SymbolCollector;
|
|
||||||
pub use lexer::Lexer;
|
|
||||||
pub use lowering::Lowerer;
|
|
||||||
pub use resolver::{ModuleProvider, Resolver};
|
|
||||||
pub use symbols::{ModuleSymbols, Namespace, Symbol, SymbolKind, SymbolTable, Visibility};
|
|
||||||
pub use token::{Token, TokenKind};
|
|
||||||
pub use typecheck::TypeChecker;
|
|
||||||
pub use frontend::build_typed_module_symbols;
|
|
||||||
|
|
||||||
use crate::common::diagnostics::DiagnosticBundle;
|
|
||||||
use crate::common::files::FileManager;
|
|
||||||
use crate::frontends::Frontend;
|
|
||||||
use crate::ir_lang;
|
|
||||||
use crate::lowering::core_to_vm;
|
|
||||||
use prometeu_analysis::NameInterner;
|
|
||||||
use crate::common::spans::FileId;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
pub struct PbsFrontend;
|
|
||||||
|
|
||||||
impl Frontend for PbsFrontend {
|
|
||||||
fn language(&self) -> &'static str {
|
|
||||||
"pbs"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn compile_to_ir(
|
|
||||||
&self,
|
|
||||||
entry: &Path,
|
|
||||||
file_manager: &mut FileManager,
|
|
||||||
) -> Result<ir_lang::Module, DiagnosticBundle> {
|
|
||||||
let source = std::fs::read_to_string(entry).map_err(|e| {
|
|
||||||
crate::common::diagnostics::DiagnosticBundle::error(
|
|
||||||
"E_FRONTEND_IO",
|
|
||||||
format!("Failed to read file: {}", e),
|
|
||||||
crate::common::spans::Span::new(FileId::INVALID, 0, 0),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
let file_id = file_manager.add(entry.to_path_buf(), source.clone());
|
|
||||||
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
let mut parser = parser::Parser::new(&source, FileId(file_id as u32), &mut interner);
|
|
||||||
let parsed = parser.parse_file()?;
|
|
||||||
|
|
||||||
let mut collector = SymbolCollector::new(&interner);
|
|
||||||
let (type_symbols, value_symbols) =
|
|
||||||
collector.collect(&parsed.arena, parsed.root)?;
|
|
||||||
let mut module_symbols = ModuleSymbols { type_symbols, value_symbols };
|
|
||||||
|
|
||||||
struct EmptyProvider;
|
|
||||||
impl ModuleProvider for EmptyProvider {
|
|
||||||
fn get_module_symbols(&self, _path: &str) -> Option<&ModuleSymbols> { None }
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut resolver = Resolver::new(&module_symbols, &EmptyProvider, &interner);
|
|
||||||
// Step 3: Resolve and type expressions
|
|
||||||
// Ensure primitives are interned in the shared FileManager interner
|
|
||||||
{
|
|
||||||
let inter = file_manager.interner_mut();
|
|
||||||
let primitives = ["int", "bool", "float", "string", "bounded", "void"];
|
|
||||||
for p in primitives { inter.intern(p); }
|
|
||||||
}
|
|
||||||
let inter_ro = file_manager.interner();
|
|
||||||
resolver.bootstrap_types(inter_ro);
|
|
||||||
resolver.resolve(&parsed.arena, parsed.root)?;
|
|
||||||
let imported_symbols = resolver.imported_symbols;
|
|
||||||
|
|
||||||
let mut typechecker = TypeChecker::new(&mut module_symbols, &imported_symbols, &EmptyProvider, &interner);
|
|
||||||
typechecker.check(&parsed.arena, parsed.root)?;
|
|
||||||
|
|
||||||
// Lower to Core IR
|
|
||||||
let lowerer = Lowerer::new(&parsed.arena, &module_symbols, &imported_symbols, &EmptyProvider, &interner);
|
|
||||||
let module_name = entry.file_stem().unwrap().to_string_lossy();
|
|
||||||
let core_program = lowerer.lower_file(parsed.root, &module_name)?;
|
|
||||||
|
|
||||||
// Validate Core IR Invariants
|
|
||||||
crate::ir_core::validate_program(&core_program).map_err(|e| {
|
|
||||||
crate::common::diagnostics::DiagnosticBundle::error(
|
|
||||||
"E_CORE_INVARIANT",
|
|
||||||
format!("Core IR Invariant Violation: {}", e),
|
|
||||||
crate::common::spans::Span::new(FileId::INVALID, 0, 0),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Lower Core IR to VM IR
|
|
||||||
core_to_vm::lower_program(&core_program).map_err(|e| {
|
|
||||||
crate::common::diagnostics::DiagnosticBundle::error(
|
|
||||||
"E_LOWERING",
|
|
||||||
format!("Lowering error: {}", e),
|
|
||||||
crate::common::spans::Span::new(FileId::INVALID, 0, 0),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,568 +0,0 @@
|
|||||||
use crate::common::diagnostics::{Diagnostic, Severity};
|
|
||||||
use crate::frontends::pbs::ast::{AstArena, NodeKind};
|
|
||||||
use crate::analysis::symbols::{Symbol, SymbolArena, SymbolKind, DefIndex, DefKey, Namespace, NodeToSymbol, RefIndex};
|
|
||||||
use prometeu_analysis::{NameInterner, NodeId, ModuleId};
|
|
||||||
|
|
||||||
pub fn build_def_index(
|
|
||||||
arena: &AstArena,
|
|
||||||
module: ModuleId,
|
|
||||||
_interner: &NameInterner,
|
|
||||||
imports: Option<(&SymbolArena, &DefIndex)>,
|
|
||||||
) -> (SymbolArena, DefIndex, RefIndex, NodeToSymbol, Vec<Diagnostic>) {
|
|
||||||
let mut symbols = SymbolArena::new();
|
|
||||||
let mut index = DefIndex::new();
|
|
||||||
let mut ref_index = RefIndex::new();
|
|
||||||
let mut node_to_symbol = NodeToSymbol::new();
|
|
||||||
let mut diagnostics = Vec::new();
|
|
||||||
|
|
||||||
// Passo 1: Coletar definições top-level
|
|
||||||
for &root_id in &arena.roots {
|
|
||||||
let root_kind = arena.kind(root_id);
|
|
||||||
if let NodeKind::File(file_node) = root_kind {
|
|
||||||
for &decl_id in &file_node.decls {
|
|
||||||
let decl_kind = arena.kind(decl_id);
|
|
||||||
let span = arena.span(decl_id);
|
|
||||||
|
|
||||||
let result = match decl_kind {
|
|
||||||
NodeKind::FnDecl(fn_decl) => {
|
|
||||||
Some((fn_decl.name, SymbolKind::Function, Namespace::Value, fn_decl.vis.as_deref() == Some("pub")))
|
|
||||||
}
|
|
||||||
NodeKind::TypeDecl(type_decl) => {
|
|
||||||
let kind = match type_decl.type_kind.as_str() {
|
|
||||||
"struct" => SymbolKind::Struct,
|
|
||||||
"contract" => SymbolKind::Contract,
|
|
||||||
"error" => SymbolKind::ErrorType,
|
|
||||||
_ => SymbolKind::Type,
|
|
||||||
};
|
|
||||||
Some((type_decl.name, kind, Namespace::Type, type_decl.vis.as_deref() == Some("pub")))
|
|
||||||
}
|
|
||||||
NodeKind::ServiceDecl(service_decl) => {
|
|
||||||
Some((service_decl.name, SymbolKind::Service, Namespace::Service, service_decl.vis.as_deref() == Some("pub")))
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some((name, kind, namespace, exported)) = result {
|
|
||||||
let symbol = Symbol {
|
|
||||||
name,
|
|
||||||
kind,
|
|
||||||
exported,
|
|
||||||
module,
|
|
||||||
decl_span: span.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let symbol_id = symbols.insert(symbol);
|
|
||||||
node_to_symbol.bind_node(decl_id, symbol_id);
|
|
||||||
let key = DefKey {
|
|
||||||
module,
|
|
||||||
name,
|
|
||||||
namespace,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Err(mut diag) = index.insert_symbol(key, symbol_id) {
|
|
||||||
diag.span = span;
|
|
||||||
diagnostics.push(diag);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Passo 2: Resolver referências (Identifiers)
|
|
||||||
for &root_id in &arena.roots {
|
|
||||||
walk_node(root_id, arena, module, &index, imports, &mut ref_index, &mut node_to_symbol, &mut diagnostics);
|
|
||||||
}
|
|
||||||
|
|
||||||
(symbols, index, ref_index, node_to_symbol, diagnostics)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn walk_node(
|
|
||||||
node_id: NodeId,
|
|
||||||
arena: &AstArena,
|
|
||||||
module: ModuleId,
|
|
||||||
index: &DefIndex,
|
|
||||||
imports: Option<(&SymbolArena, &DefIndex)>,
|
|
||||||
ref_index: &mut RefIndex,
|
|
||||||
node_to_symbol: &mut NodeToSymbol,
|
|
||||||
diagnostics: &mut Vec<Diagnostic>,
|
|
||||||
) {
|
|
||||||
let kind = arena.kind(node_id);
|
|
||||||
let span = arena.span(node_id);
|
|
||||||
|
|
||||||
match kind {
|
|
||||||
NodeKind::File(file) => {
|
|
||||||
for &decl in &file.decls {
|
|
||||||
walk_node(decl, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NodeKind::FnDecl(fn_decl) => {
|
|
||||||
walk_node(fn_decl.body, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
NodeKind::ServiceFnDecl(_decl) => {
|
|
||||||
// Não resolve corpo/locais nesta fase (handled em resolver.rs)
|
|
||||||
}
|
|
||||||
NodeKind::Block(block) => {
|
|
||||||
for &stmt in &block.stmts {
|
|
||||||
walk_node(stmt, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
if let Some(tail) = block.tail {
|
|
||||||
walk_node(tail, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NodeKind::LetStmt(let_stmt) => {
|
|
||||||
walk_node(let_stmt.init, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
NodeKind::ExprStmt(expr_stmt) => {
|
|
||||||
walk_node(expr_stmt.expr, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
NodeKind::ReturnStmt(ret) => {
|
|
||||||
if let Some(expr) = ret.expr {
|
|
||||||
walk_node(expr, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NodeKind::Ident(ident) => {
|
|
||||||
let key = DefKey {
|
|
||||||
module,
|
|
||||||
name: ident.name,
|
|
||||||
namespace: Namespace::Value,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(symbol_id) = index.get(key) {
|
|
||||||
ref_index.record_ref(symbol_id, span.clone());
|
|
||||||
node_to_symbol.bind_node(node_id, symbol_id);
|
|
||||||
} else if let Some((import_arena, import_index)) = imports {
|
|
||||||
if let Some((_, symbol_id)) = import_index.get_by_name_any_module(ident.name, Namespace::Value) {
|
|
||||||
let symbol = import_arena.get(symbol_id);
|
|
||||||
if !symbol.exported && symbol.module != module {
|
|
||||||
diagnostics.push(Diagnostic {
|
|
||||||
severity: Severity::Error,
|
|
||||||
code: "E_RESOLVE_VISIBILITY".to_string(),
|
|
||||||
message: format!("Symbol is not exported from module {:?}", symbol.module),
|
|
||||||
span: span.clone(),
|
|
||||||
related: Vec::new(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
ref_index.record_ref(symbol_id, span.clone());
|
|
||||||
node_to_symbol.bind_node(node_id, symbol_id);
|
|
||||||
} else {
|
|
||||||
diagnostics.push(Diagnostic {
|
|
||||||
severity: Severity::Error,
|
|
||||||
code: "E_RESOLVE_UNDEFINED_IDENTIFIER".to_string(),
|
|
||||||
message: "Undefined identifier".to_string(),
|
|
||||||
span: span.clone(),
|
|
||||||
related: Vec::new(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
diagnostics.push(Diagnostic {
|
|
||||||
severity: Severity::Error,
|
|
||||||
code: "E_RESOLVE_UNDEFINED_IDENTIFIER".to_string(),
|
|
||||||
message: "Undefined identifier".to_string(),
|
|
||||||
span: span.clone(),
|
|
||||||
related: Vec::new(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NodeKind::Call(call) => {
|
|
||||||
walk_node(call.callee, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
for &arg in &call.args {
|
|
||||||
walk_node(arg, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NodeKind::Unary(unary) => {
|
|
||||||
walk_node(unary.expr, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
NodeKind::Binary(binary) => {
|
|
||||||
walk_node(binary.left, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
walk_node(binary.right, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
NodeKind::Cast(cast) => {
|
|
||||||
walk_node(cast.expr, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
NodeKind::IfExpr(if_expr) => {
|
|
||||||
walk_node(if_expr.cond, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
walk_node(if_expr.then_block, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
if let Some(else_block) = if_expr.else_block {
|
|
||||||
walk_node(else_block, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NodeKind::WhenExpr(when_expr) => {
|
|
||||||
for &arm in &when_expr.arms {
|
|
||||||
walk_node(arm, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NodeKind::WhenArm(when_arm) => {
|
|
||||||
walk_node(when_arm.cond, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
walk_node(when_arm.body, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
NodeKind::Alloc(alloc) => {
|
|
||||||
walk_node(alloc.ty, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
NodeKind::Mutate(mutate) => {
|
|
||||||
walk_node(mutate.target, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
walk_node(mutate.body, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
NodeKind::Borrow(borrow) => {
|
|
||||||
walk_node(borrow.target, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
walk_node(borrow.body, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
NodeKind::Peek(peek) => {
|
|
||||||
walk_node(peek.target, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
walk_node(peek.body, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
NodeKind::MemberAccess(member) => {
|
|
||||||
walk_node(member.object, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
NodeKind::TypeDecl(type_decl) => {
|
|
||||||
if let Some(body) = type_decl.body {
|
|
||||||
walk_node(body, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NodeKind::TypeBody(body) => {
|
|
||||||
for &method in &body.methods {
|
|
||||||
walk_node(method, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NodeKind::ServiceDecl(service) => {
|
|
||||||
for &member in &service.members {
|
|
||||||
walk_node(member, arena, module, index, imports, ref_index, node_to_symbol, diagnostics);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Literal nodes or those that do not contain other nodes to walk (at this level)
|
|
||||||
NodeKind::IntLit(_) | NodeKind::FloatLit(_) | NodeKind::BoundedLit(_) | NodeKind::StringLit(_) |
|
|
||||||
NodeKind::TypeName(_) | NodeKind::TypeApp(_) | NodeKind::Import(_) | NodeKind::ImportSpec(_) |
|
|
||||||
NodeKind::ServiceFnSig(_) | NodeKind::ConstructorDecl(_) | NodeKind::ConstantDecl(_) => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::frontends::pbs::ast::{
|
|
||||||
AstArena, BlockNodeArena, CallNodeArena, ExprStmtNodeArena, FileNodeArena, FnDeclNodeArena,
|
|
||||||
IdentNodeArena, TypeDeclNodeArena,
|
|
||||||
};
|
|
||||||
use crate::common::spans::{Span, FileId};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_build_def_index_success() {
|
|
||||||
let mut arena = AstArena::default();
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
|
|
||||||
// Create a dummy body for the function to avoid panic/invalid access
|
|
||||||
let body_id = arena.push(NodeKind::Block(BlockNodeArena { stmts: vec![], tail: None }), Span::new(FileId(0), 0, 0));
|
|
||||||
|
|
||||||
let fn_name = interner.intern("my_func");
|
|
||||||
let fn_id = arena.push(NodeKind::FnDecl(FnDeclNodeArena {
|
|
||||||
vis: Some("pub".to_string()),
|
|
||||||
name: fn_name,
|
|
||||||
params: vec![],
|
|
||||||
ret: None,
|
|
||||||
else_fallback: None,
|
|
||||||
body: body_id,
|
|
||||||
}), Span::new(FileId(0), 10, 20));
|
|
||||||
|
|
||||||
let type_name = interner.intern("MyStruct");
|
|
||||||
let type_id = arena.push(NodeKind::TypeDecl(TypeDeclNodeArena {
|
|
||||||
vis: None,
|
|
||||||
type_kind: "struct".to_string(),
|
|
||||||
name: type_name,
|
|
||||||
is_host: false,
|
|
||||||
params: vec![],
|
|
||||||
constructors: vec![],
|
|
||||||
constants: vec![],
|
|
||||||
body: None,
|
|
||||||
}), Span::new(FileId(0), 30, 40));
|
|
||||||
|
|
||||||
let file_id = arena.push(NodeKind::File(FileNodeArena {
|
|
||||||
imports: vec![],
|
|
||||||
decls: vec![fn_id, type_id],
|
|
||||||
}), Span::new(FileId(0), 0, 100));
|
|
||||||
|
|
||||||
arena.roots.push(file_id);
|
|
||||||
|
|
||||||
let (symbols, index, _ref_index, _node_to_symbol, diagnostics) = build_def_index(&arena, ModuleId(1), &interner, None);
|
|
||||||
|
|
||||||
assert!(diagnostics.is_empty());
|
|
||||||
assert_eq!(symbols.symbols.len(), 2);
|
|
||||||
|
|
||||||
let fn_sym_id = index.get(DefKey { module: ModuleId(1), name: fn_name, namespace: Namespace::Value }).unwrap();
|
|
||||||
let fn_sym = symbols.get(fn_sym_id);
|
|
||||||
assert_eq!(fn_sym.name, fn_name);
|
|
||||||
assert_eq!(fn_sym.kind, SymbolKind::Function);
|
|
||||||
assert!(fn_sym.exported);
|
|
||||||
|
|
||||||
let type_sym_id = index.get(DefKey { module: ModuleId(1), name: type_name, namespace: Namespace::Type }).unwrap();
|
|
||||||
let type_sym = symbols.get(type_sym_id);
|
|
||||||
assert_eq!(type_sym.name, type_name);
|
|
||||||
assert_eq!(type_sym.kind, SymbolKind::Struct);
|
|
||||||
assert!(!type_sym.exported);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_build_def_index_duplicate() {
|
|
||||||
let mut arena = AstArena::default();
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
|
|
||||||
let name = interner.intern("conflict");
|
|
||||||
|
|
||||||
let body_id = arena.push(NodeKind::Block(BlockNodeArena { stmts: vec![], tail: None }), Span::new(FileId(0), 0, 0));
|
|
||||||
|
|
||||||
let fn1_id = arena.push(NodeKind::FnDecl(FnDeclNodeArena {
|
|
||||||
vis: None,
|
|
||||||
name,
|
|
||||||
params: vec![],
|
|
||||||
ret: None,
|
|
||||||
else_fallback: None,
|
|
||||||
body: body_id,
|
|
||||||
}), Span::new(FileId(0), 10, 20));
|
|
||||||
|
|
||||||
let fn2_id = arena.push(NodeKind::FnDecl(FnDeclNodeArena {
|
|
||||||
vis: None,
|
|
||||||
name,
|
|
||||||
params: vec![],
|
|
||||||
ret: None,
|
|
||||||
else_fallback: None,
|
|
||||||
body: body_id,
|
|
||||||
}), Span::new(FileId(0), 30, 40));
|
|
||||||
|
|
||||||
let file_id = arena.push(NodeKind::File(FileNodeArena {
|
|
||||||
imports: vec![],
|
|
||||||
decls: vec![fn1_id, fn2_id],
|
|
||||||
}), Span::new(FileId(0), 0, 100));
|
|
||||||
|
|
||||||
arena.roots.push(file_id);
|
|
||||||
|
|
||||||
let (_symbols, _index, _ref_index, _node_to_symbol, diagnostics) = build_def_index(&arena, ModuleId(1), &interner, None);
|
|
||||||
|
|
||||||
assert_eq!(diagnostics.len(), 1);
|
|
||||||
assert_eq!(diagnostics[0].code, "E_RESOLVE_DUPLICATE_SYMBOL");
|
|
||||||
assert_eq!(diagnostics[0].span, Span::new(FileId(0), 30, 40));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_to_symbol_binding() {
|
|
||||||
let mut arena = AstArena::default();
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
|
|
||||||
let name = interner.intern("bound_func");
|
|
||||||
let body_id = arena.push(NodeKind::Block(BlockNodeArena { stmts: vec![], tail: None }), Span::new(FileId(0), 0, 0));
|
|
||||||
let decl_id = arena.push(NodeKind::FnDecl(FnDeclNodeArena {
|
|
||||||
vis: None,
|
|
||||||
name,
|
|
||||||
params: vec![],
|
|
||||||
ret: None,
|
|
||||||
else_fallback: None,
|
|
||||||
body: body_id,
|
|
||||||
}), Span::new(FileId(0), 10, 20));
|
|
||||||
|
|
||||||
let file_id = arena.push(NodeKind::File(FileNodeArena {
|
|
||||||
imports: vec![],
|
|
||||||
decls: vec![decl_id],
|
|
||||||
}), Span::new(FileId(0), 0, 100));
|
|
||||||
|
|
||||||
arena.roots.push(file_id);
|
|
||||||
|
|
||||||
let (symbols, _index, _ref_index, node_to_symbol, _diagnostics) = build_def_index(&arena, ModuleId(1), &interner, None);
|
|
||||||
|
|
||||||
let symbol_id = node_to_symbol.get(decl_id).expect("Node should be bound to a symbol");
|
|
||||||
let symbol = symbols.get(symbol_id);
|
|
||||||
assert_eq!(symbol.name, name);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_resolve_undefined_identifier() {
|
|
||||||
let mut arena = AstArena::default();
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
|
|
||||||
let ident_name = interner.intern("unknown");
|
|
||||||
let ident_id = arena.push(NodeKind::Ident(IdentNodeArena { name: ident_name }), Span::new(FileId(0), 50, 60));
|
|
||||||
let expr_stmt = arena.push(NodeKind::ExprStmt(ExprStmtNodeArena { expr: ident_id }), Span::new(FileId(0), 50, 60));
|
|
||||||
let body_id = arena.push(NodeKind::Block(BlockNodeArena { stmts: vec![expr_stmt], tail: None }), Span::new(FileId(0), 40, 70));
|
|
||||||
|
|
||||||
let fn_id = arena.push(NodeKind::FnDecl(FnDeclNodeArena {
|
|
||||||
vis: None,
|
|
||||||
name: interner.intern("main"),
|
|
||||||
params: vec![],
|
|
||||||
ret: None,
|
|
||||||
else_fallback: None,
|
|
||||||
body: body_id,
|
|
||||||
}), Span::new(FileId(0), 10, 80));
|
|
||||||
|
|
||||||
let file_id = arena.push(NodeKind::File(FileNodeArena {
|
|
||||||
imports: vec![],
|
|
||||||
decls: vec![fn_id],
|
|
||||||
}), Span::new(FileId(0), 0, 100));
|
|
||||||
|
|
||||||
arena.roots.push(file_id);
|
|
||||||
|
|
||||||
let (_symbols, _index, _ref_index, _node_to_symbol, diagnostics) = build_def_index(&arena, ModuleId(1), &interner, None);
|
|
||||||
|
|
||||||
assert_eq!(diagnostics.len(), 1);
|
|
||||||
assert_eq!(diagnostics[0].code, "E_RESOLVE_UNDEFINED_IDENTIFIER");
|
|
||||||
assert_eq!(diagnostics[0].span, Span::new(FileId(0), 50, 60));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_resolve_reference_success() {
|
|
||||||
let mut arena = AstArena::default();
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
|
|
||||||
// fn target() {}
|
|
||||||
let target_name = interner.intern("target");
|
|
||||||
let target_body = arena.push(NodeKind::Block(BlockNodeArena { stmts: vec![], tail: None }), Span::new(FileId(0), 5, 5));
|
|
||||||
let target_id = arena.push(NodeKind::FnDecl(FnDeclNodeArena {
|
|
||||||
vis: None,
|
|
||||||
name: target_name,
|
|
||||||
params: vec![],
|
|
||||||
ret: None,
|
|
||||||
else_fallback: None,
|
|
||||||
body: target_body,
|
|
||||||
}), Span::new(FileId(0), 0, 10));
|
|
||||||
|
|
||||||
// fn caller() { target(); }
|
|
||||||
let ident_id = arena.push(NodeKind::Ident(IdentNodeArena { name: target_name }), Span::new(FileId(0), 50, 56));
|
|
||||||
let call_id = arena.push(NodeKind::Call(CallNodeArena { callee: ident_id, args: vec![] }), Span::new(FileId(0), 50, 58));
|
|
||||||
let expr_stmt = arena.push(NodeKind::ExprStmt(ExprStmtNodeArena { expr: call_id }), Span::new(FileId(0), 50, 58));
|
|
||||||
let body_id = arena.push(NodeKind::Block(BlockNodeArena { stmts: vec![expr_stmt], tail: None }), Span::new(FileId(0), 40, 70));
|
|
||||||
|
|
||||||
let caller_id = arena.push(NodeKind::FnDecl(FnDeclNodeArena {
|
|
||||||
vis: None,
|
|
||||||
name: interner.intern("caller"),
|
|
||||||
params: vec![],
|
|
||||||
ret: None,
|
|
||||||
else_fallback: None,
|
|
||||||
body: body_id,
|
|
||||||
}), Span::new(FileId(0), 30, 80));
|
|
||||||
|
|
||||||
let file_id = arena.push(NodeKind::File(FileNodeArena {
|
|
||||||
imports: vec![],
|
|
||||||
decls: vec![target_id, caller_id],
|
|
||||||
}), Span::new(FileId(0), 0, 100));
|
|
||||||
|
|
||||||
arena.roots.push(file_id);
|
|
||||||
|
|
||||||
let (_symbols, index, ref_index, node_to_symbol, diagnostics) = build_def_index(&arena, ModuleId(1), &interner, None);
|
|
||||||
|
|
||||||
assert!(diagnostics.is_empty(), "Diagnostics should be empty: {:?}", diagnostics);
|
|
||||||
|
|
||||||
let target_sym_id = index.get(DefKey { module: ModuleId(1), name: target_name, namespace: Namespace::Value }).expect("target should be in index");
|
|
||||||
let refs = ref_index.refs_of(target_sym_id);
|
|
||||||
assert_eq!(refs.len(), 1);
|
|
||||||
assert_eq!(refs[0], Span::new(FileId(0), 50, 56));
|
|
||||||
|
|
||||||
let bound_id = node_to_symbol.get(ident_id).expect("ident should be bound to symbol");
|
|
||||||
assert_eq!(bound_id, target_sym_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_resolve_visibility_violation() {
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
|
|
||||||
// Módulo 1: define função privada
|
|
||||||
let mut arena1 = AstArena::default();
|
|
||||||
let target_name = interner.intern("private_func");
|
|
||||||
let body1 = arena1.push(NodeKind::Block(BlockNodeArena { stmts: vec![], tail: None }), Span::new(FileId(0), 0, 0));
|
|
||||||
let decl1 = arena1.push(NodeKind::FnDecl(FnDeclNodeArena {
|
|
||||||
vis: None, // Privado
|
|
||||||
name: target_name,
|
|
||||||
params: vec![],
|
|
||||||
ret: None,
|
|
||||||
else_fallback: None,
|
|
||||||
body: body1,
|
|
||||||
}), Span::new(FileId(0), 0, 10));
|
|
||||||
let file1 = arena1.push(NodeKind::File(FileNodeArena {
|
|
||||||
imports: vec![],
|
|
||||||
decls: vec![decl1],
|
|
||||||
}), Span::new(FileId(0), 0, 100));
|
|
||||||
arena1.roots.push(file1);
|
|
||||||
|
|
||||||
let (symbols1, index1, _, _, _) = build_def_index(&arena1, ModuleId(1), &interner, None);
|
|
||||||
|
|
||||||
// Módulo 2: tenta usar função privada do Módulo 1
|
|
||||||
let mut arena2 = AstArena::default();
|
|
||||||
let ident_id = arena2.push(NodeKind::Ident(IdentNodeArena { name: target_name }), Span::new(FileId(0), 50, 62));
|
|
||||||
let expr_stmt = arena2.push(NodeKind::ExprStmt(ExprStmtNodeArena { expr: ident_id }), Span::new(FileId(0), 50, 62));
|
|
||||||
let body2 = arena2.push(NodeKind::Block(BlockNodeArena { stmts: vec![expr_stmt], tail: None }), Span::new(FileId(0), 40, 70));
|
|
||||||
let caller = arena2.push(NodeKind::FnDecl(FnDeclNodeArena {
|
|
||||||
vis: Some("pub".to_string()),
|
|
||||||
name: interner.intern("caller"),
|
|
||||||
params: vec![],
|
|
||||||
ret: None,
|
|
||||||
else_fallback: None,
|
|
||||||
body: body2,
|
|
||||||
}), Span::new(FileId(0), 30, 80));
|
|
||||||
let file2 = arena2.push(NodeKind::File(FileNodeArena {
|
|
||||||
imports: vec![],
|
|
||||||
decls: vec![caller],
|
|
||||||
}), Span::new(FileId(0), 0, 100));
|
|
||||||
arena2.roots.push(file2);
|
|
||||||
|
|
||||||
let (_symbols2, _index2, _ref_index2, _node_to_symbol2, diagnostics) =
|
|
||||||
build_def_index(&arena2, ModuleId(2), &interner, Some((&symbols1, &index1)));
|
|
||||||
|
|
||||||
assert_eq!(diagnostics.len(), 1);
|
|
||||||
assert_eq!(diagnostics[0].code, "E_RESOLVE_VISIBILITY");
|
|
||||||
assert_eq!(diagnostics[0].span, Span::new(FileId(0), 50, 62));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_determinism() {
|
|
||||||
let mut arena = AstArena::default();
|
|
||||||
let mut interner = NameInterner::new();
|
|
||||||
|
|
||||||
let target_name = interner.intern("target");
|
|
||||||
let target_body = arena.push(NodeKind::Block(BlockNodeArena { stmts: vec![], tail: None }), Span::new(FileId(0), 5, 5));
|
|
||||||
let target_id = arena.push(NodeKind::FnDecl(FnDeclNodeArena {
|
|
||||||
vis: Some("pub".to_string()),
|
|
||||||
name: target_name,
|
|
||||||
params: vec![],
|
|
||||||
ret: None,
|
|
||||||
else_fallback: None,
|
|
||||||
body: target_body,
|
|
||||||
}), Span::new(FileId(0), 0, 10));
|
|
||||||
|
|
||||||
let caller_name = interner.intern("caller");
|
|
||||||
let ident_id = arena.push(NodeKind::Ident(IdentNodeArena { name: target_name }), Span::new(FileId(0), 50, 56));
|
|
||||||
let call_id = arena.push(NodeKind::Call(CallNodeArena { callee: ident_id, args: vec![] }), Span::new(FileId(0), 50, 58));
|
|
||||||
let expr_stmt = arena.push(NodeKind::ExprStmt(ExprStmtNodeArena { expr: call_id }), Span::new(FileId(0), 50, 58));
|
|
||||||
let body_id = arena.push(NodeKind::Block(BlockNodeArena { stmts: vec![expr_stmt], tail: None }), Span::new(FileId(0), 40, 70));
|
|
||||||
|
|
||||||
let caller_id = arena.push(NodeKind::FnDecl(FnDeclNodeArena {
|
|
||||||
vis: None,
|
|
||||||
name: caller_name,
|
|
||||||
params: vec![],
|
|
||||||
ret: None,
|
|
||||||
else_fallback: None,
|
|
||||||
body: body_id,
|
|
||||||
}), Span::new(FileId(0), 30, 80));
|
|
||||||
|
|
||||||
let file_id = arena.push(NodeKind::File(FileNodeArena {
|
|
||||||
imports: vec![],
|
|
||||||
decls: vec![target_id, caller_id],
|
|
||||||
}), Span::new(FileId(0), 0, 100));
|
|
||||||
|
|
||||||
arena.roots.push(file_id);
|
|
||||||
|
|
||||||
let run1 = build_def_index(&arena, ModuleId(1), &interner, None);
|
|
||||||
let run2 = build_def_index(&arena, ModuleId(1), &interner, None);
|
|
||||||
|
|
||||||
// runX is (SymbolArena, DefIndex, RefIndex, NodeToSymbol, Vec<Diagnostic>)
|
|
||||||
|
|
||||||
let json1_symbols = serde_json::to_string(&run1.0).unwrap();
|
|
||||||
let json2_symbols = serde_json::to_string(&run2.0).unwrap();
|
|
||||||
assert_eq!(json1_symbols, json2_symbols, "SymbolArena should be deterministic");
|
|
||||||
|
|
||||||
let json1_refs = serde_json::to_string(&run1.2).unwrap();
|
|
||||||
let json2_refs = serde_json::to_string(&run2.2).unwrap();
|
|
||||||
assert_eq!(json1_refs, json2_refs, "RefIndex should be deterministic");
|
|
||||||
|
|
||||||
let json1_node_to_symbol = serde_json::to_string(&run1.3).unwrap();
|
|
||||||
let json2_node_to_symbol = serde_json::to_string(&run2.3).unwrap();
|
|
||||||
assert_eq!(json1_node_to_symbol, json2_node_to_symbol, "NodeToSymbol should be deterministic");
|
|
||||||
|
|
||||||
let json1_diags = serde_json::to_string(&run1.4).unwrap();
|
|
||||||
let json2_diags = serde_json::to_string(&run2.4).unwrap();
|
|
||||||
assert_eq!(json1_diags, json2_diags, "Diagnostics should be deterministic");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,101 +0,0 @@
|
|||||||
use crate::common::spans::Span;
|
|
||||||
use crate::frontends::pbs::types::PbsType;
|
|
||||||
use prometeu_analysis::NameId;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
pub enum Visibility {
|
|
||||||
FilePrivate,
|
|
||||||
Mod,
|
|
||||||
Pub,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, PartialOrd, Ord)]
|
|
||||||
pub enum SymbolKind {
|
|
||||||
Function,
|
|
||||||
Service,
|
|
||||||
Struct,
|
|
||||||
Contract,
|
|
||||||
ErrorType,
|
|
||||||
Local,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
pub enum Namespace {
|
|
||||||
Type,
|
|
||||||
Value,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct Symbol {
|
|
||||||
pub name: NameId,
|
|
||||||
pub kind: SymbolKind,
|
|
||||||
pub namespace: Namespace,
|
|
||||||
pub visibility: Visibility,
|
|
||||||
pub ty: Option<PbsType>,
|
|
||||||
pub is_host: bool,
|
|
||||||
pub span: Span,
|
|
||||||
pub origin: Option<String>, // e.g. "@sdk:gfx" or "./other"
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct SymbolTable {
|
|
||||||
// Allow multiple entries per name for overloaded functions (Value namespace only).
|
|
||||||
// For Type namespace, multiple entries are rejected by `insert`.
|
|
||||||
pub symbols: HashMap<NameId, Vec<Symbol>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct ModuleSymbols {
|
|
||||||
pub type_symbols: SymbolTable,
|
|
||||||
pub value_symbols: SymbolTable,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ModuleSymbols {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
type_symbols: SymbolTable::new(),
|
|
||||||
value_symbols: SymbolTable::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SymbolTable {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
symbols: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert(&mut self, symbol: Symbol) -> Result<(), ()> {
|
|
||||||
match self.symbols.get_mut(&symbol.name) {
|
|
||||||
Some(list) => {
|
|
||||||
// If an entry already exists:
|
|
||||||
// - Allow multiple Function symbols (overloads)
|
|
||||||
// - Reject duplicates for any other kind
|
|
||||||
let all_funcs = list.iter().all(|s| s.kind == SymbolKind::Function);
|
|
||||||
if symbol.kind == SymbolKind::Function && all_funcs {
|
|
||||||
list.push(symbol);
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
self.symbols.insert(symbol.name, vec![symbol]);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the first symbol for this name (primarily for non-overloaded types/services).
|
|
||||||
pub fn get(&self, name: NameId) -> Option<&Symbol> {
|
|
||||||
self.symbols.get(&name).and_then(|v| v.first())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns all symbols (e.g., all function overloads) registered under this name.
|
|
||||||
pub fn get_all(&self, name: NameId) -> Option<&[Symbol]> {
|
|
||||||
self.symbols.get(&name).map(|v| v.as_slice())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,95 +0,0 @@
|
|||||||
use crate::common::spans::Span;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
|
||||||
pub enum TokenKind {
|
|
||||||
// Keywords
|
|
||||||
Import,
|
|
||||||
Pub,
|
|
||||||
Mod,
|
|
||||||
Service,
|
|
||||||
Fn,
|
|
||||||
Let,
|
|
||||||
Mut,
|
|
||||||
Declare,
|
|
||||||
Struct,
|
|
||||||
Contract,
|
|
||||||
Host,
|
|
||||||
Error,
|
|
||||||
Optional,
|
|
||||||
Result,
|
|
||||||
Some,
|
|
||||||
None,
|
|
||||||
Ok,
|
|
||||||
Err,
|
|
||||||
If,
|
|
||||||
Else,
|
|
||||||
When,
|
|
||||||
For,
|
|
||||||
In,
|
|
||||||
Return,
|
|
||||||
Handle,
|
|
||||||
Borrow,
|
|
||||||
Mutate,
|
|
||||||
Peek,
|
|
||||||
Take,
|
|
||||||
Alloc,
|
|
||||||
Weak,
|
|
||||||
As,
|
|
||||||
Bounded,
|
|
||||||
|
|
||||||
// Identifiers and Literals
|
|
||||||
Identifier(String),
|
|
||||||
IntLit(i64),
|
|
||||||
FloatLit(f64),
|
|
||||||
BoundedLit(u32),
|
|
||||||
StringLit(String),
|
|
||||||
|
|
||||||
// Punctuation
|
|
||||||
OpenParen, // (
|
|
||||||
CloseParen, // )
|
|
||||||
OpenBrace, // {
|
|
||||||
CloseBrace, // }
|
|
||||||
OpenBracket, // [
|
|
||||||
CloseBracket, // ]
|
|
||||||
OpenDoubleBracket, // [[
|
|
||||||
CloseDoubleBracket, // ]]
|
|
||||||
Comma, // ,
|
|
||||||
Dot, // .
|
|
||||||
Colon, // :
|
|
||||||
Semicolon, // ;
|
|
||||||
Arrow, // ->
|
|
||||||
|
|
||||||
// Operators
|
|
||||||
Assign, // =
|
|
||||||
Plus, // +
|
|
||||||
Minus, // -
|
|
||||||
Star, // *
|
|
||||||
Slash, // /
|
|
||||||
Percent, // %
|
|
||||||
Eq, // ==
|
|
||||||
Neq, // !=
|
|
||||||
Lt, // <
|
|
||||||
Gt, // >
|
|
||||||
Lte, // <=
|
|
||||||
Gte, // >=
|
|
||||||
And, // &&
|
|
||||||
Or, // ||
|
|
||||||
Not, // !
|
|
||||||
|
|
||||||
// Special
|
|
||||||
Eof,
|
|
||||||
Invalid(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
|
||||||
pub struct Token {
|
|
||||||
pub kind: TokenKind,
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Token {
|
|
||||||
pub fn new(kind: TokenKind, span: Span) -> Self {
|
|
||||||
Self { kind, span }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,53 +0,0 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
pub enum PbsType {
|
|
||||||
Int,
|
|
||||||
Float,
|
|
||||||
Bool,
|
|
||||||
String,
|
|
||||||
Void,
|
|
||||||
None,
|
|
||||||
Bounded,
|
|
||||||
Optional(Box<PbsType>),
|
|
||||||
Result(Box<PbsType>, Box<PbsType>),
|
|
||||||
Struct(String),
|
|
||||||
Service(String),
|
|
||||||
Contract(String),
|
|
||||||
ErrorType(String),
|
|
||||||
Function {
|
|
||||||
params: Vec<PbsType>,
|
|
||||||
return_type: Box<PbsType>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for PbsType {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
PbsType::Int => write!(f, "int"),
|
|
||||||
PbsType::Float => write!(f, "float"),
|
|
||||||
PbsType::Bool => write!(f, "bool"),
|
|
||||||
PbsType::String => write!(f, "string"),
|
|
||||||
PbsType::Void => write!(f, "void"),
|
|
||||||
PbsType::None => write!(f, "none"),
|
|
||||||
PbsType::Bounded => write!(f, "bounded"),
|
|
||||||
PbsType::Optional(inner) => write!(f, "optional<{}>", inner),
|
|
||||||
PbsType::Result(ok, err) => write!(f, "result<{}, {}>", ok, err),
|
|
||||||
PbsType::Struct(name) => write!(f, "{}", name),
|
|
||||||
PbsType::Service(name) => write!(f, "{}", name),
|
|
||||||
PbsType::Contract(name) => write!(f, "{}", name),
|
|
||||||
PbsType::ErrorType(name) => write!(f, "{}", name),
|
|
||||||
PbsType::Function { params, return_type } => {
|
|
||||||
write!(f, "fn(")?;
|
|
||||||
for (i, param) in params.iter().enumerate() {
|
|
||||||
if i > 0 {
|
|
||||||
write!(f, ", ")?;
|
|
||||||
}
|
|
||||||
write!(f, "{}", param)?;
|
|
||||||
}
|
|
||||||
write!(f, ") -> {}", return_type)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
use super::instr::Instr;
|
|
||||||
use super::terminator::Terminator;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
/// A basic block in a function's control flow graph.
|
|
||||||
/// Contains a sequence of instructions and ends with a terminator.
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct Block {
|
|
||||||
pub id: u32,
|
|
||||||
pub instrs: Vec<Instr>,
|
|
||||||
pub terminator: Terminator,
|
|
||||||
}
|
|
||||||
@ -1,98 +0,0 @@
|
|||||||
use super::ids::ConstId;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
/// Represents a constant value that can be stored in the constant pool.
|
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
|
||||||
pub enum ConstantValue {
|
|
||||||
Int(i64),
|
|
||||||
Float(f64),
|
|
||||||
String(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A stable constant pool that handles deduplication and provides IDs.
|
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct ConstPool {
|
|
||||||
pub constants: Vec<ConstantValue>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ConstPool {
|
|
||||||
/// Creates a new, empty constant pool.
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self::default()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Inserts a value into the pool if it doesn't already exist.
|
|
||||||
/// Returns the corresponding `ConstId`.
|
|
||||||
pub fn insert(&mut self, value: ConstantValue) -> ConstId {
|
|
||||||
if let Some(pos) = self.constants.iter().position(|c| c == &value) {
|
|
||||||
ConstId(pos as u32)
|
|
||||||
} else {
|
|
||||||
let id = self.constants.len() as u32;
|
|
||||||
self.constants.push(value);
|
|
||||||
ConstId(id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Retrieves a value from the pool by its `ConstId`.
|
|
||||||
pub fn get(&self, id: ConstId) -> Option<&ConstantValue> {
|
|
||||||
self.constants.get(id.0 as usize)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_int(&mut self, value: i64) -> ConstId {
|
|
||||||
self.insert(ConstantValue::Int(value))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_float(&mut self, value: f64) -> ConstId {
|
|
||||||
self.insert(ConstantValue::Float(value))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_string(&mut self, value: String) -> ConstId {
|
|
||||||
self.insert(ConstantValue::String(value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::ir_core::ids::ConstId;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_const_pool_deduplication() {
|
|
||||||
let mut pool = ConstPool::new();
|
|
||||||
|
|
||||||
let id1 = pool.insert(ConstantValue::Int(42));
|
|
||||||
let id2 = pool.insert(ConstantValue::String("hello".to_string()));
|
|
||||||
let id3 = pool.insert(ConstantValue::Int(42));
|
|
||||||
|
|
||||||
assert_eq!(id1, id3);
|
|
||||||
assert_ne!(id1, id2);
|
|
||||||
assert_eq!(pool.constants.len(), 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_const_pool_deterministic_assignment() {
|
|
||||||
let mut pool = ConstPool::new();
|
|
||||||
|
|
||||||
let id0 = pool.insert(ConstantValue::Int(10));
|
|
||||||
let id1 = pool.insert(ConstantValue::Int(20));
|
|
||||||
let id2 = pool.insert(ConstantValue::Int(30));
|
|
||||||
|
|
||||||
assert_eq!(id0, ConstId(0));
|
|
||||||
assert_eq!(id1, ConstId(1));
|
|
||||||
assert_eq!(id2, ConstId(2));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_const_pool_serialization() {
|
|
||||||
let mut pool = ConstPool::new();
|
|
||||||
pool.insert(ConstantValue::Int(42));
|
|
||||||
pool.insert(ConstantValue::String("test".to_string()));
|
|
||||||
pool.insert(ConstantValue::Float(3.14));
|
|
||||||
|
|
||||||
let json = serde_json::to_string_pretty(&pool).unwrap();
|
|
||||||
|
|
||||||
assert!(json.contains("\"Int\": 42"));
|
|
||||||
assert!(json.contains("\"String\": \"test\""));
|
|
||||||
assert!(json.contains("\"Float\": 3.14"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,30 +0,0 @@
|
|||||||
use super::block::Block;
|
|
||||||
use super::ids::{FunctionId, SigId};
|
|
||||||
use super::types::Type;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct Param {
|
|
||||||
pub name: String,
|
|
||||||
pub ty: Type,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A function within a module, composed of basic blocks forming a CFG.
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct Function {
|
|
||||||
pub id: FunctionId,
|
|
||||||
pub name: String,
|
|
||||||
#[serde(skip)]
|
|
||||||
pub sig: SigId,
|
|
||||||
pub params: Vec<Param>,
|
|
||||||
pub return_type: Type,
|
|
||||||
pub blocks: Vec<Block>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub local_types: HashMap<u32, Type>,
|
|
||||||
|
|
||||||
pub param_slots: u16,
|
|
||||||
pub local_slots: u16,
|
|
||||||
pub return_slots: u16,
|
|
||||||
}
|
|
||||||
@ -1,31 +0,0 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
/// Unique identifier for a function within a program.
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct FunctionId(pub u32);
|
|
||||||
|
|
||||||
/// Unique identifier for a constant value.
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct ConstId(pub u32);
|
|
||||||
|
|
||||||
/// Unique identifier for a type.
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct TypeId(pub u32);
|
|
||||||
|
|
||||||
/// Unique identifier for a function signature (params + return type).
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Default)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct SigId(pub u32);
|
|
||||||
|
|
||||||
/// Unique identifier for a value (usually a local slot).
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct ValueId(pub u32);
|
|
||||||
|
|
||||||
/// Unique identifier for a field within a HIP object.
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct FieldId(pub u32);
|
|
||||||
@ -1,84 +0,0 @@
|
|||||||
use super::ids::{ConstId, FieldId, FunctionId, TypeId, ValueId, SigId};
|
|
||||||
use crate::common::spans::Span;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct Instr {
|
|
||||||
pub kind: InstrKind,
|
|
||||||
pub span: Option<Span>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Instr {
|
|
||||||
pub fn new(kind: InstrKind, span: Option<Span>) -> Self {
|
|
||||||
Self { kind, span }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Instructions within a basic block.
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub enum InstrKind {
|
|
||||||
/// Placeholder for constant loading.
|
|
||||||
PushConst(ConstId),
|
|
||||||
/// Push a bounded value (0..0xFFFF).
|
|
||||||
PushBounded(u32),
|
|
||||||
/// Placeholder for function calls.
|
|
||||||
Call(FunctionId, u32),
|
|
||||||
/// External calls (imports).
|
|
||||||
/// Carries dependency alias, module path, optional owner (service), base function name,
|
|
||||||
/// precise signature id, and arg count.
|
|
||||||
ImportCall {
|
|
||||||
dep_alias: String,
|
|
||||||
module_path: String,
|
|
||||||
owner: Option<String>,
|
|
||||||
base_name: String,
|
|
||||||
sig: SigId,
|
|
||||||
arg_count: u32,
|
|
||||||
},
|
|
||||||
/// Host calls (syscalls). (id, return_slots)
|
|
||||||
HostCall(u32, u32),
|
|
||||||
/// Variable access.
|
|
||||||
GetLocal(u32),
|
|
||||||
SetLocal(u32),
|
|
||||||
/// Stack operations.
|
|
||||||
Pop,
|
|
||||||
Dup,
|
|
||||||
/// Arithmetic.
|
|
||||||
Add,
|
|
||||||
Sub,
|
|
||||||
Mul,
|
|
||||||
Div,
|
|
||||||
Neg,
|
|
||||||
/// Logical/Comparison.
|
|
||||||
Eq,
|
|
||||||
Neq,
|
|
||||||
Lt,
|
|
||||||
Lte,
|
|
||||||
Gt,
|
|
||||||
Gte,
|
|
||||||
And,
|
|
||||||
Or,
|
|
||||||
Not,
|
|
||||||
/// HIP operations.
|
|
||||||
Alloc { ty: TypeId, slots: u32 },
|
|
||||||
BeginPeek { gate: ValueId },
|
|
||||||
BeginBorrow { gate: ValueId },
|
|
||||||
BeginMutate { gate: ValueId },
|
|
||||||
EndPeek,
|
|
||||||
EndBorrow,
|
|
||||||
EndMutate,
|
|
||||||
/// Reads from heap at gate + field. Pops gate, pushes value.
|
|
||||||
GateLoadField { gate: ValueId, field: FieldId },
|
|
||||||
/// Writes to heap at gate + field. Pops gate and value.
|
|
||||||
GateStoreField { gate: ValueId, field: FieldId, value: ValueId },
|
|
||||||
/// Reads from heap at gate + index.
|
|
||||||
GateLoadIndex { gate: ValueId, index: ValueId },
|
|
||||||
/// Writes to heap at gate + index.
|
|
||||||
GateStoreIndex { gate: ValueId, index: ValueId, value: ValueId },
|
|
||||||
Free,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<InstrKind> for Instr {
|
|
||||||
fn from(kind: InstrKind) -> Self {
|
|
||||||
Self::new(kind, None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,128 +0,0 @@
|
|||||||
pub mod ids;
|
|
||||||
pub mod const_pool;
|
|
||||||
pub mod types;
|
|
||||||
pub mod program;
|
|
||||||
pub mod module;
|
|
||||||
pub mod function;
|
|
||||||
pub mod block;
|
|
||||||
pub mod instr;
|
|
||||||
pub mod terminator;
|
|
||||||
pub mod validate;
|
|
||||||
pub mod signature;
|
|
||||||
|
|
||||||
pub use block::*;
|
|
||||||
pub use const_pool::*;
|
|
||||||
pub use function::*;
|
|
||||||
pub use ids::*;
|
|
||||||
pub use instr::*;
|
|
||||||
pub use module::*;
|
|
||||||
pub use program::*;
|
|
||||||
pub use terminator::*;
|
|
||||||
pub use types::*;
|
|
||||||
pub use validate::*;
|
|
||||||
pub use signature::*;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use serde_json;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ir_core_manual_construction() {
|
|
||||||
let mut const_pool = ConstPool::new();
|
|
||||||
const_pool.insert(ConstantValue::String("hello".to_string()));
|
|
||||||
|
|
||||||
let program = Program {
|
|
||||||
const_pool,
|
|
||||||
modules: vec![Module {
|
|
||||||
name: "main".to_string(),
|
|
||||||
functions: vec![Function {
|
|
||||||
id: FunctionId(10),
|
|
||||||
name: "entry".to_string(),
|
|
||||||
sig: {
|
|
||||||
let mut i = global_signature_interner().lock().unwrap();
|
|
||||||
i.intern(Signature { params: vec![], return_type: Type::Void })
|
|
||||||
},
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
params: vec![],
|
|
||||||
return_type: Type::Void,
|
|
||||||
blocks: vec![Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::PushConst(ConstId(0))),
|
|
||||||
Instr::from(InstrKind::Call(FunctionId(11), 0)),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
}],
|
|
||||||
local_types: std::collections::HashMap::new(),
|
|
||||||
}],
|
|
||||||
}],
|
|
||||||
field_offsets: std::collections::HashMap::new(),
|
|
||||||
field_types: std::collections::HashMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let json = serde_json::to_string_pretty(&program).unwrap();
|
|
||||||
|
|
||||||
let expected = r#"{
|
|
||||||
"const_pool": {
|
|
||||||
"constants": [
|
|
||||||
{
|
|
||||||
"String": "hello"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"modules": [
|
|
||||||
{
|
|
||||||
"name": "main",
|
|
||||||
"functions": [
|
|
||||||
{
|
|
||||||
"id": 10,
|
|
||||||
"name": "entry",
|
|
||||||
"params": [],
|
|
||||||
"return_type": "Void",
|
|
||||||
"blocks": [
|
|
||||||
{
|
|
||||||
"id": 0,
|
|
||||||
"instrs": [
|
|
||||||
{
|
|
||||||
"kind": {
|
|
||||||
"PushConst": 0
|
|
||||||
},
|
|
||||||
"span": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"kind": {
|
|
||||||
"Call": [
|
|
||||||
11,
|
|
||||||
0
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"span": null
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"terminator": "Return"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"local_types": {},
|
|
||||||
"param_slots": 0,
|
|
||||||
"local_slots": 0,
|
|
||||||
"return_slots": 0
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"field_offsets": {},
|
|
||||||
"field_types": {}
|
|
||||||
}"#;
|
|
||||||
assert_eq!(json, expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ir_core_ids() {
|
|
||||||
assert_eq!(serde_json::to_string(&FunctionId(1)).unwrap(), "1");
|
|
||||||
assert_eq!(serde_json::to_string(&ConstId(2)).unwrap(), "2");
|
|
||||||
assert_eq!(serde_json::to_string(&TypeId(3)).unwrap(), "3");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,9 +0,0 @@
|
|||||||
use super::function::Function;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
/// A module within a program, containing functions and other declarations.
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct Module {
|
|
||||||
pub name: String,
|
|
||||||
pub functions: Vec<Function>,
|
|
||||||
}
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
use super::const_pool::ConstPool;
|
|
||||||
use super::ids::FieldId;
|
|
||||||
use super::module::Module;
|
|
||||||
use super::types::Type;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct Program {
|
|
||||||
pub const_pool: ConstPool,
|
|
||||||
pub modules: Vec<Module>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub field_offsets: HashMap<FieldId, u32>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub field_types: HashMap<FieldId, Type>,
|
|
||||||
}
|
|
||||||
@ -1,366 +0,0 @@
|
|||||||
use crate::ir_core::ids::SigId;
|
|
||||||
use crate::ir_core::types::Type;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::{Mutex, OnceLock};
|
|
||||||
|
|
||||||
/// Canonical function signature: params + return type.
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
|
||||||
pub struct Signature {
|
|
||||||
pub params: Vec<Type>,
|
|
||||||
pub return_type: Type,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Signature {
|
|
||||||
/// Stable, deterministic descriptor. Example:
|
|
||||||
/// fn(int;string)->void
|
|
||||||
/// fn(array{int;4};optional{string})->result{void;error{E}}
|
|
||||||
pub fn descriptor(&self) -> String {
|
|
||||||
let mut s = String::new();
|
|
||||||
s.push_str("fn(");
|
|
||||||
for (i, p) in self.params.iter().enumerate() {
|
|
||||||
if i > 0 {
|
|
||||||
s.push(';');
|
|
||||||
}
|
|
||||||
encode_type(p, &mut s);
|
|
||||||
}
|
|
||||||
s.push(')');
|
|
||||||
s.push_str("->");
|
|
||||||
encode_type(&self.return_type, &mut s);
|
|
||||||
s
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse a descriptor previously produced by `descriptor()`
|
|
||||||
pub fn from_descriptor(desc: &str) -> Result<Self, String> {
|
|
||||||
// Expect prefix: fn( ... )-> ...
|
|
||||||
if !desc.starts_with("fn(") {
|
|
||||||
return Err("Invalid descriptor: missing fn(".to_string());
|
|
||||||
}
|
|
||||||
let rest = &desc[3..];
|
|
||||||
let close = rest.find(')').ok_or_else(|| "Invalid descriptor: missing ')'".to_string())?;
|
|
||||||
let params_blob = &rest[..close];
|
|
||||||
let after = &rest[close + 1..];
|
|
||||||
let arrow = after.strip_prefix("->").ok_or_else(|| "Invalid descriptor: missing '->'".to_string())?;
|
|
||||||
|
|
||||||
let params = if params_blob.is_empty() {
|
|
||||||
Vec::new()
|
|
||||||
} else {
|
|
||||||
let mut v = Vec::new();
|
|
||||||
for part in split_top_level(params_blob, ';')? {
|
|
||||||
let (ty, consumed) = decode_type(part)?;
|
|
||||||
if consumed != part.len() {
|
|
||||||
return Err("Trailing garbage in parameter".to_string());
|
|
||||||
}
|
|
||||||
v.push(ty);
|
|
||||||
}
|
|
||||||
v
|
|
||||||
};
|
|
||||||
|
|
||||||
let (return_type, consumed) = decode_type(arrow)?;
|
|
||||||
if consumed != arrow.len() {
|
|
||||||
return Err("Trailing garbage after return type".to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Signature { params, return_type })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Global signature interner. Thread-safe and process-wide for this compiler instance.
|
|
||||||
pub struct SignatureInterner {
|
|
||||||
map: HashMap<Signature, SigId>,
|
|
||||||
rev: Vec<Signature>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SignatureInterner {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self { map: HashMap::new(), rev: Vec::new() }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn intern(&mut self, sig: Signature) -> SigId {
|
|
||||||
if let Some(id) = self.map.get(&sig) {
|
|
||||||
return *id;
|
|
||||||
}
|
|
||||||
let id = SigId(self.rev.len() as u32);
|
|
||||||
self.rev.push(sig.clone());
|
|
||||||
self.map.insert(sig, id);
|
|
||||||
id
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn resolve(&self, id: SigId) -> Option<&Signature> {
|
|
||||||
self.rev.get(id.0 as usize)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static GLOBAL_INTERNER: OnceLock<Mutex<SignatureInterner>> = OnceLock::new();
|
|
||||||
|
|
||||||
pub fn global_signature_interner() -> &'static Mutex<SignatureInterner> {
|
|
||||||
GLOBAL_INTERNER.get_or_init(|| Mutex::new(SignatureInterner::new()))
|
|
||||||
}
|
|
||||||
|
|
||||||
// ==============
|
|
||||||
// Encoding/Decoding helpers for `Type`
|
|
||||||
// Canonical grammar (EBNF-ish):
|
|
||||||
// Type := "void" | "int" | "bounded" | "float" | "bool" | "string"
|
|
||||||
// | "struct{" Name "}" | "service{" Name "}" | "contract{" Name "}"
|
|
||||||
// | "error{" Name "}" | "array{" Type ";" UInt "}"
|
|
||||||
// | "optional{" Type "}" | "result{" Type ";" Type "}"
|
|
||||||
// | "fn(" [Type { ";" Type }] ")" "->" Type
|
|
||||||
// Name := escaped UTF-8 without '}' and ';' (escape via '\')
|
|
||||||
|
|
||||||
fn encode_type(ty: &Type, out: &mut String) {
|
|
||||||
match ty {
|
|
||||||
Type::Void => out.push_str("void"),
|
|
||||||
Type::Int => out.push_str("int"),
|
|
||||||
Type::Bounded => out.push_str("bounded"),
|
|
||||||
Type::Float => out.push_str("float"),
|
|
||||||
Type::Bool => out.push_str("bool"),
|
|
||||||
Type::String => out.push_str("string"),
|
|
||||||
Type::Optional(inner) => {
|
|
||||||
out.push_str("optional{");
|
|
||||||
encode_type(inner, out);
|
|
||||||
out.push('}');
|
|
||||||
}
|
|
||||||
Type::Result(ok, err) => {
|
|
||||||
out.push_str("result{");
|
|
||||||
encode_type(ok, out);
|
|
||||||
out.push(';');
|
|
||||||
encode_type(err, out);
|
|
||||||
out.push('}');
|
|
||||||
}
|
|
||||||
Type::Struct(name) => {
|
|
||||||
out.push_str("struct{");
|
|
||||||
encode_name(name, out);
|
|
||||||
out.push('}');
|
|
||||||
}
|
|
||||||
Type::Service(name) => {
|
|
||||||
out.push_str("service{");
|
|
||||||
encode_name(name, out);
|
|
||||||
out.push('}');
|
|
||||||
}
|
|
||||||
Type::Contract(name) => {
|
|
||||||
out.push_str("contract{");
|
|
||||||
encode_name(name, out);
|
|
||||||
out.push('}');
|
|
||||||
}
|
|
||||||
Type::ErrorType(name) => {
|
|
||||||
out.push_str("error{");
|
|
||||||
encode_name(name, out);
|
|
||||||
out.push('}');
|
|
||||||
}
|
|
||||||
Type::Array(inner, n) => {
|
|
||||||
out.push_str("array{");
|
|
||||||
encode_type(inner, out);
|
|
||||||
out.push(';');
|
|
||||||
out.push_str(&n.to_string());
|
|
||||||
out.push('}');
|
|
||||||
}
|
|
||||||
Type::Function { params, return_type } => {
|
|
||||||
out.push_str("fn(");
|
|
||||||
for (i, p) in params.iter().enumerate() {
|
|
||||||
if i > 0 { out.push(';'); }
|
|
||||||
encode_type(p, out);
|
|
||||||
}
|
|
||||||
out.push(')');
|
|
||||||
out.push_str("->");
|
|
||||||
encode_type(return_type, out);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn encode_name(name: &str, out: &mut String) {
|
|
||||||
for ch in name.chars() {
|
|
||||||
match ch {
|
|
||||||
'\\' => out.push_str("\\\\"),
|
|
||||||
'}' => out.push_str("\\}"),
|
|
||||||
';' => out.push_str("\\;"),
|
|
||||||
_ => out.push(ch),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn decode_name(s: &str) -> Result<(String, usize), String> {
|
|
||||||
let mut out = String::new();
|
|
||||||
let mut chars = s.chars().peekable();
|
|
||||||
let mut consumed = 0;
|
|
||||||
while let Some(&c) = chars.peek() {
|
|
||||||
if c == '}' { break; }
|
|
||||||
consumed += 1;
|
|
||||||
chars.next();
|
|
||||||
if c == '\\' {
|
|
||||||
let next = chars.next().ok_or_else(|| "Invalid escape in name".to_string())?;
|
|
||||||
consumed += 1;
|
|
||||||
out.push(next);
|
|
||||||
} else {
|
|
||||||
out.push(c);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok((out, consumed))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn split_top_level(input: &str, sep: char) -> Result<Vec<&str>, String> {
|
|
||||||
let mut parts = Vec::new();
|
|
||||||
let mut depth_brace = 0i32;
|
|
||||||
let mut depth_fn = 0i32; // counts '(' nesting for nested fn types
|
|
||||||
let mut start = 0usize;
|
|
||||||
for (i, ch) in input.char_indices() {
|
|
||||||
match ch {
|
|
||||||
'{' => depth_brace += 1,
|
|
||||||
'}' => depth_brace -= 1,
|
|
||||||
'(' => depth_fn += 1,
|
|
||||||
')' => depth_fn -= 1,
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
if ch == sep && depth_brace == 0 && depth_fn == 0 {
|
|
||||||
parts.push(&input[start..i]);
|
|
||||||
start = i + ch.len_utf8();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
parts.push(&input[start..]);
|
|
||||||
if depth_brace != 0 || depth_fn != 0 {
|
|
||||||
return Err("Unbalanced delimiters".to_string());
|
|
||||||
}
|
|
||||||
Ok(parts)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn decode_type(s: &str) -> Result<(Type, usize), String> {
|
|
||||||
// Order matters; check longer keywords first to avoid prefix issues
|
|
||||||
let keywords = [
|
|
||||||
"optional{",
|
|
||||||
"result{",
|
|
||||||
"struct{",
|
|
||||||
"service{",
|
|
||||||
"contract{",
|
|
||||||
"error{",
|
|
||||||
"array{",
|
|
||||||
"fn(",
|
|
||||||
"void",
|
|
||||||
"int",
|
|
||||||
"bounded",
|
|
||||||
"float",
|
|
||||||
"bool",
|
|
||||||
"string",
|
|
||||||
];
|
|
||||||
|
|
||||||
for kw in keywords {
|
|
||||||
if s.starts_with(kw) {
|
|
||||||
match kw {
|
|
||||||
"void" => return Ok((Type::Void, 4)),
|
|
||||||
"int" => return Ok((Type::Int, 3)),
|
|
||||||
"bounded" => return Ok((Type::Bounded, 7)),
|
|
||||||
"float" => return Ok((Type::Float, 5)),
|
|
||||||
"bool" => return Ok((Type::Bool, 4)),
|
|
||||||
"string" => return Ok((Type::String, 6)),
|
|
||||||
"optional{" => {
|
|
||||||
let (inner, used) = decode_type(&s[9..])?;
|
|
||||||
let rest = &s[9 + used..];
|
|
||||||
if !rest.starts_with('}') { return Err("Missing '}' for optional".to_string()); }
|
|
||||||
return Ok((Type::Optional(Box::new(inner)), 9 + used + 1));
|
|
||||||
}
|
|
||||||
"result{" => {
|
|
||||||
let (ok, used_ok) = decode_type(&s[7..])?;
|
|
||||||
let rest = &s[7 + used_ok..];
|
|
||||||
if !rest.starts_with(';') { return Err("Missing ';' in result".to_string()); }
|
|
||||||
let (err, used_err) = decode_type(&rest[1..])?;
|
|
||||||
let rest2 = &rest[1 + used_err..];
|
|
||||||
if !rest2.starts_with('}') { return Err("Missing '}' for result".to_string()); }
|
|
||||||
return Ok((Type::Result(Box::new(ok), Box::new(err)), 7 + used_ok + 1 + used_err + 1));
|
|
||||||
}
|
|
||||||
"struct{" => {
|
|
||||||
let (name, used) = decode_name(&s[7..])?;
|
|
||||||
let rest = &s[7 + used..];
|
|
||||||
if !rest.starts_with('}') { return Err("Missing '}' for struct".to_string()); }
|
|
||||||
return Ok((Type::Struct(name), 7 + used + 1));
|
|
||||||
}
|
|
||||||
"service{" => {
|
|
||||||
let (name, used) = decode_name(&s[8..])?;
|
|
||||||
let rest = &s[8 + used..];
|
|
||||||
if !rest.starts_with('}') { return Err("Missing '}' for service".to_string()); }
|
|
||||||
return Ok((Type::Service(name), 8 + used + 1));
|
|
||||||
}
|
|
||||||
"contract{" => {
|
|
||||||
let (name, used) = decode_name(&s[9..])?;
|
|
||||||
let rest = &s[9 + used..];
|
|
||||||
if !rest.starts_with('}') { return Err("Missing '}' for contract".to_string()); }
|
|
||||||
return Ok((Type::Contract(name), 9 + used + 1));
|
|
||||||
}
|
|
||||||
"error{" => {
|
|
||||||
let (name, used) = decode_name(&s[6..])?;
|
|
||||||
let rest = &s[6 + used..];
|
|
||||||
if !rest.starts_with('}') { return Err("Missing '}' for error".to_string()); }
|
|
||||||
return Ok((Type::ErrorType(name), 6 + used + 1));
|
|
||||||
}
|
|
||||||
"array{" => {
|
|
||||||
let (inner, used) = decode_type(&s[6..])?;
|
|
||||||
let rest = &s[6 + used..];
|
|
||||||
if !rest.starts_with(';') { return Err("Missing ';' in array".to_string()); }
|
|
||||||
// parse UInt
|
|
||||||
let rest_num = &rest[1..];
|
|
||||||
let mut n_str = String::new();
|
|
||||||
let mut consumed = 0usize;
|
|
||||||
for ch in rest_num.chars() {
|
|
||||||
if ch.is_ascii_digit() { n_str.push(ch); consumed += 1; } else { break; }
|
|
||||||
}
|
|
||||||
if n_str.is_empty() { return Err("Missing array size".to_string()); }
|
|
||||||
let n: u32 = n_str.parse().map_err(|_| "Invalid array size".to_string())?;
|
|
||||||
let rest2 = &rest_num[consumed..];
|
|
||||||
if !rest2.starts_with('}') { return Err("Missing '}' for array".to_string()); }
|
|
||||||
return Ok((Type::Array(Box::new(inner), n), 6 + used + 1 + consumed + 1));
|
|
||||||
}
|
|
||||||
"fn(" => {
|
|
||||||
// parse params until ')'
|
|
||||||
let after = &s[3..];
|
|
||||||
let close = after.find(')').ok_or_else(|| "Missing ')' in fn type".to_string())?;
|
|
||||||
let params_blob = &after[..close];
|
|
||||||
let mut params = Vec::new();
|
|
||||||
if !params_blob.is_empty() {
|
|
||||||
for part in split_top_level(params_blob, ';')? {
|
|
||||||
let (p, used) = decode_type(part)?;
|
|
||||||
if used != part.len() { return Err("Trailing data in fn param".to_string()); }
|
|
||||||
params.push(p);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let rest = &after[close + 1..];
|
|
||||||
if !rest.starts_with("->") { return Err("Missing '->' in fn type".to_string()); }
|
|
||||||
let (ret, used_ret) = decode_type(&rest[2..])?;
|
|
||||||
return Ok((Type::Function { params, return_type: Box::new(ret) }, 3 + close + 1 + 2 + used_ret));
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err("Unknown type in descriptor".to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn descriptors_are_different_for_overloads() {
|
|
||||||
let sig_i = Signature {
|
|
||||||
params: vec![Type::Int],
|
|
||||||
return_type: Type::Void,
|
|
||||||
};
|
|
||||||
let sig_s = Signature {
|
|
||||||
params: vec![Type::String],
|
|
||||||
return_type: Type::Void,
|
|
||||||
};
|
|
||||||
assert_ne!(sig_i.descriptor(), sig_s.descriptor());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn descriptor_round_trip_stable() {
|
|
||||||
let sig = Signature {
|
|
||||||
params: vec![
|
|
||||||
Type::Array(Box::new(Type::Int), 4),
|
|
||||||
Type::Optional(Box::new(Type::String)),
|
|
||||||
Type::Result(Box::new(Type::Int), Box::new(Type::ErrorType("E42".into()))),
|
|
||||||
],
|
|
||||||
return_type: Type::Void,
|
|
||||||
};
|
|
||||||
let d1 = sig.descriptor();
|
|
||||||
let parsed = Signature::from_descriptor(&d1).expect("parse ok");
|
|
||||||
let d2 = parsed.descriptor();
|
|
||||||
assert_eq!(d1, d2);
|
|
||||||
assert_eq!(sig, parsed);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
/// Terminators that end a basic block and handle control flow.
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub enum Terminator {
|
|
||||||
/// Returns from the current function.
|
|
||||||
Return,
|
|
||||||
/// Unconditional jump to another block (by index/ID).
|
|
||||||
Jump(u32),
|
|
||||||
/// Conditional jump: pops a bool, if false jumps to target, else continues to next block?
|
|
||||||
/// Actually, in a CFG, we usually have two targets for a conditional jump.
|
|
||||||
JumpIfFalse {
|
|
||||||
target: u32,
|
|
||||||
else_target: u32,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@ -1,53 +0,0 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
|
||||||
pub enum Type {
|
|
||||||
Void,
|
|
||||||
Int,
|
|
||||||
Bounded,
|
|
||||||
Float,
|
|
||||||
Bool,
|
|
||||||
String,
|
|
||||||
Optional(Box<Type>),
|
|
||||||
Result(Box<Type>, Box<Type>),
|
|
||||||
Struct(String),
|
|
||||||
Service(String),
|
|
||||||
Contract(String),
|
|
||||||
ErrorType(String),
|
|
||||||
Array(Box<Type>, u32),
|
|
||||||
Function {
|
|
||||||
params: Vec<Type>,
|
|
||||||
return_type: Box<Type>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Type {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
Type::Void => write!(f, "void"),
|
|
||||||
Type::Int => write!(f, "int"),
|
|
||||||
Type::Bounded => write!(f, "bounded"),
|
|
||||||
Type::Float => write!(f, "float"),
|
|
||||||
Type::Bool => write!(f, "bool"),
|
|
||||||
Type::String => write!(f, "string"),
|
|
||||||
Type::Optional(inner) => write!(f, "optional<{}>", inner),
|
|
||||||
Type::Result(ok, err) => write!(f, "result<{}, {}>", ok, err),
|
|
||||||
Type::Struct(name) => write!(f, "{}", name),
|
|
||||||
Type::Service(name) => write!(f, "{}", name),
|
|
||||||
Type::Contract(name) => write!(f, "{}", name),
|
|
||||||
Type::ErrorType(name) => write!(f, "{}", name),
|
|
||||||
Type::Array(inner, size) => write!(f, "array<{}>[{}]", inner, size),
|
|
||||||
Type::Function { params, return_type } => {
|
|
||||||
write!(f, "fn(")?;
|
|
||||||
for (i, param) in params.iter().enumerate() {
|
|
||||||
if i > 0 {
|
|
||||||
write!(f, ", ")?;
|
|
||||||
}
|
|
||||||
write!(f, "{}", param)?;
|
|
||||||
}
|
|
||||||
write!(f, ") -> {}", return_type)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,362 +0,0 @@
|
|||||||
use super::ids::ValueId;
|
|
||||||
use super::instr::InstrKind;
|
|
||||||
use super::program::Program;
|
|
||||||
use super::terminator::Terminator;
|
|
||||||
use std::collections::{HashMap, VecDeque};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
|
||||||
pub enum HipOpKind {
|
|
||||||
Peek,
|
|
||||||
Borrow,
|
|
||||||
Mutate,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
|
||||||
pub struct HipOp {
|
|
||||||
pub kind: HipOpKind,
|
|
||||||
pub gate: ValueId,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn validate_program(program: &Program) -> Result<(), String> {
|
|
||||||
for module in &program.modules {
|
|
||||||
for func in &module.functions {
|
|
||||||
validate_function(func)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn validate_function(func: &super::function::Function) -> Result<(), String> {
|
|
||||||
let mut block_entry_stacks: HashMap<u32, Vec<HipOp>> = HashMap::new();
|
|
||||||
let mut worklist: VecDeque<u32> = VecDeque::new();
|
|
||||||
|
|
||||||
if func.blocks.is_empty() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assume the first block is the entry block (usually ID 0)
|
|
||||||
let entry_block_id = func.blocks[0].id;
|
|
||||||
block_entry_stacks.insert(entry_block_id, Vec::new());
|
|
||||||
worklist.push_back(entry_block_id);
|
|
||||||
|
|
||||||
let blocks_by_id: HashMap<u32, &super::block::Block> = func.blocks.iter().map(|b| (b.id, b)).collect();
|
|
||||||
let mut visited_with_stack: HashMap<u32, Vec<HipOp>> = HashMap::new();
|
|
||||||
|
|
||||||
while let Some(block_id) = worklist.pop_front() {
|
|
||||||
let block = blocks_by_id.get(&block_id).ok_or_else(|| format!("Invalid block ID: {}", block_id))?;
|
|
||||||
let mut current_stack = block_entry_stacks.get(&block_id).unwrap().clone();
|
|
||||||
|
|
||||||
// If we've already visited this block with the same stack, skip it to avoid infinite loops
|
|
||||||
if let Some(prev_stack) = visited_with_stack.get(&block_id) {
|
|
||||||
if prev_stack == ¤t_stack {
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
return Err(format!("Block {} reached with inconsistent HIP stacks: {:?} vs {:?}", block_id, prev_stack, current_stack));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
visited_with_stack.insert(block_id, current_stack.clone());
|
|
||||||
|
|
||||||
for instr in &block.instrs {
|
|
||||||
match &instr.kind {
|
|
||||||
InstrKind::BeginPeek { gate } => {
|
|
||||||
current_stack.push(HipOp { kind: HipOpKind::Peek, gate: *gate });
|
|
||||||
}
|
|
||||||
InstrKind::BeginBorrow { gate } => {
|
|
||||||
current_stack.push(HipOp { kind: HipOpKind::Borrow, gate: *gate });
|
|
||||||
}
|
|
||||||
InstrKind::BeginMutate { gate } => {
|
|
||||||
current_stack.push(HipOp { kind: HipOpKind::Mutate, gate: *gate });
|
|
||||||
}
|
|
||||||
InstrKind::EndPeek => {
|
|
||||||
match current_stack.pop() {
|
|
||||||
Some(op) if op.kind == HipOpKind::Peek => {},
|
|
||||||
Some(op) => return Err(format!("EndPeek doesn't match current HIP op: {:?}", op)),
|
|
||||||
None => return Err("EndPeek without matching BeginPeek".to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
InstrKind::EndBorrow => {
|
|
||||||
match current_stack.pop() {
|
|
||||||
Some(op) if op.kind == HipOpKind::Borrow => {},
|
|
||||||
Some(op) => return Err(format!("EndBorrow doesn't match current HIP op: {:?}", op)),
|
|
||||||
None => return Err("EndBorrow without matching BeginBorrow".to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
InstrKind::EndMutate => {
|
|
||||||
match current_stack.pop() {
|
|
||||||
Some(op) if op.kind == HipOpKind::Mutate => {},
|
|
||||||
Some(op) => return Err(format!("EndMutate doesn't match current HIP op: {:?}", op)),
|
|
||||||
None => return Err("EndMutate without matching BeginMutate".to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
InstrKind::GateLoadField { .. } | InstrKind::GateLoadIndex { .. } => {
|
|
||||||
if current_stack.is_empty() {
|
|
||||||
return Err("GateLoad outside of HIP operation".to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
InstrKind::GateStoreField { .. } | InstrKind::GateStoreIndex { .. } => {
|
|
||||||
match current_stack.last() {
|
|
||||||
Some(op) if op.kind == HipOpKind::Mutate => {},
|
|
||||||
_ => return Err("GateStore outside of BeginMutate".to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
InstrKind::Call(id, _) => {
|
|
||||||
if id.0 == 0 {
|
|
||||||
return Err("Call to FunctionId(0)".to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
InstrKind::Alloc { ty, .. } => {
|
|
||||||
if ty.0 == 0 {
|
|
||||||
return Err("Alloc with TypeId(0)".to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match &block.terminator {
|
|
||||||
Terminator::Return => {
|
|
||||||
if !current_stack.is_empty() {
|
|
||||||
return Err(format!("Function returns with non-empty HIP stack: {:?}", current_stack));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Terminator::Jump(target) => {
|
|
||||||
propagate_stack(&mut block_entry_stacks, &mut worklist, *target, ¤t_stack)?;
|
|
||||||
}
|
|
||||||
Terminator::JumpIfFalse { target, else_target } => {
|
|
||||||
propagate_stack(&mut block_entry_stacks, &mut worklist, *target, ¤t_stack)?;
|
|
||||||
propagate_stack(&mut block_entry_stacks, &mut worklist, *else_target, ¤t_stack)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn propagate_stack(
|
|
||||||
entry_stacks: &mut HashMap<u32, Vec<HipOp>>,
|
|
||||||
worklist: &mut VecDeque<u32>,
|
|
||||||
target: u32,
|
|
||||||
stack: &Vec<HipOp>
|
|
||||||
) -> Result<(), String> {
|
|
||||||
if let Some(existing) = entry_stacks.get(&target) {
|
|
||||||
if existing != stack {
|
|
||||||
return Err(format!("Control flow merge at block {} with inconsistent HIP stacks: {:?} vs {:?}", target, existing, stack));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
entry_stacks.insert(target, stack.clone());
|
|
||||||
worklist.push_back(target);
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::ir_core::*;
|
|
||||||
|
|
||||||
fn create_dummy_function(blocks: Vec<Block>) -> Function {
|
|
||||||
Function {
|
|
||||||
id: FunctionId(1),
|
|
||||||
name: "test".to_string(),
|
|
||||||
sig: {
|
|
||||||
let mut i = global_signature_interner().lock().unwrap();
|
|
||||||
i.intern(Signature { params: vec![], return_type: Type::Void })
|
|
||||||
},
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
params: vec![],
|
|
||||||
return_type: Type::Void,
|
|
||||||
blocks,
|
|
||||||
local_types: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_dummy_program(func: Function) -> Program {
|
|
||||||
Program {
|
|
||||||
const_pool: ConstPool::new(),
|
|
||||||
modules: vec![Module {
|
|
||||||
name: "test".to_string(),
|
|
||||||
functions: vec![func],
|
|
||||||
}],
|
|
||||||
field_offsets: HashMap::new(),
|
|
||||||
field_types: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_valid_hip_nesting() {
|
|
||||||
let block = Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::BeginPeek { gate: ValueId(0) }),
|
|
||||||
Instr::from(InstrKind::GateLoadField { gate: ValueId(0), field: FieldId(0) }),
|
|
||||||
Instr::from(InstrKind::BeginMutate { gate: ValueId(1) }),
|
|
||||||
Instr::from(InstrKind::GateStoreField { gate: ValueId(1), field: FieldId(0), value: ValueId(2) }),
|
|
||||||
Instr::from(InstrKind::EndMutate),
|
|
||||||
Instr::from(InstrKind::EndPeek),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
};
|
|
||||||
let prog = create_dummy_program(create_dummy_function(vec![block]));
|
|
||||||
assert!(validate_program(&prog).is_ok());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_hip_unbalanced() {
|
|
||||||
let block = Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::BeginPeek { gate: ValueId(0) }),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
};
|
|
||||||
let prog = create_dummy_program(create_dummy_function(vec![block]));
|
|
||||||
let res = validate_program(&prog);
|
|
||||||
assert!(res.is_err());
|
|
||||||
assert!(res.unwrap_err().contains("non-empty HIP stack"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_hip_wrong_end() {
|
|
||||||
let block = Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::BeginPeek { gate: ValueId(0) }),
|
|
||||||
Instr::from(InstrKind::EndMutate),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
};
|
|
||||||
let prog = create_dummy_program(create_dummy_function(vec![block]));
|
|
||||||
let res = validate_program(&prog);
|
|
||||||
assert!(res.is_err());
|
|
||||||
assert!(res.unwrap_err().contains("EndMutate doesn't match"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_store_outside_mutate() {
|
|
||||||
let block = Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::BeginBorrow { gate: ValueId(0) }),
|
|
||||||
Instr::from(InstrKind::GateStoreField { gate: ValueId(0), field: FieldId(0), value: ValueId(1) }),
|
|
||||||
Instr::from(InstrKind::EndBorrow),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
};
|
|
||||||
let prog = create_dummy_program(create_dummy_function(vec![block]));
|
|
||||||
let res = validate_program(&prog);
|
|
||||||
assert!(res.is_err());
|
|
||||||
assert!(res.unwrap_err().contains("GateStore outside of BeginMutate"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_valid_store_in_mutate() {
|
|
||||||
let block = Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::BeginMutate { gate: ValueId(0) }),
|
|
||||||
Instr::from(InstrKind::GateStoreField { gate: ValueId(0), field: FieldId(0), value: ValueId(1) }),
|
|
||||||
Instr::from(InstrKind::EndMutate),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
};
|
|
||||||
let prog = create_dummy_program(create_dummy_function(vec![block]));
|
|
||||||
assert!(validate_program(&prog).is_ok());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_load_outside_hip() {
|
|
||||||
let block = Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::GateLoadField { gate: ValueId(0), field: FieldId(0) }),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
};
|
|
||||||
let prog = create_dummy_program(create_dummy_function(vec![block]));
|
|
||||||
let res = validate_program(&prog);
|
|
||||||
assert!(res.is_err());
|
|
||||||
assert!(res.unwrap_err().contains("GateLoad outside of HIP operation"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_valid_hip_across_blocks() {
|
|
||||||
let block0 = Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::BeginPeek { gate: ValueId(0) }),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Jump(1),
|
|
||||||
};
|
|
||||||
let block1 = Block {
|
|
||||||
id: 1,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::GateLoadField { gate: ValueId(0), field: FieldId(0) }),
|
|
||||||
Instr::from(InstrKind::EndPeek),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
};
|
|
||||||
let prog = create_dummy_program(create_dummy_function(vec![block0, block1]));
|
|
||||||
assert!(validate_program(&prog).is_ok());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_hip_across_blocks_inconsistent() {
|
|
||||||
let block0 = Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::PushConst(ConstId(0))), // cond
|
|
||||||
],
|
|
||||||
terminator: Terminator::JumpIfFalse { target: 2, else_target: 1 },
|
|
||||||
};
|
|
||||||
let block1 = Block {
|
|
||||||
id: 1,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::BeginPeek { gate: ValueId(0) }),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Jump(3),
|
|
||||||
};
|
|
||||||
let block2 = Block {
|
|
||||||
id: 2,
|
|
||||||
instrs: vec![
|
|
||||||
// No BeginPeek here
|
|
||||||
],
|
|
||||||
terminator: Terminator::Jump(3),
|
|
||||||
};
|
|
||||||
let block3 = Block {
|
|
||||||
id: 3,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::EndPeek), // ERROR: block 2 reaches here with empty stack
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
};
|
|
||||||
let prog = create_dummy_program(create_dummy_function(vec![block0, block1, block2, block3]));
|
|
||||||
let res = validate_program(&prog);
|
|
||||||
assert!(res.is_err());
|
|
||||||
assert!(res.unwrap_err().contains("Control flow merge at block 3"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_silent_fallback_checks() {
|
|
||||||
let block_func0 = Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::Call(FunctionId(0), 0)),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
};
|
|
||||||
let prog_func0 = create_dummy_program(create_dummy_function(vec![block_func0]));
|
|
||||||
assert!(validate_program(&prog_func0).is_err());
|
|
||||||
|
|
||||||
let block_ty0 = Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::Alloc { ty: TypeId(0), slots: 1 }),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
};
|
|
||||||
let prog_ty0 = create_dummy_program(create_dummy_function(vec![block_ty0]));
|
|
||||||
assert!(validate_program(&prog_ty0).is_err());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,418 +0,0 @@
|
|||||||
//! # IR Instructions
|
|
||||||
//!
|
|
||||||
//! This module defines the set of instructions used in the Intermediate Representation (IR).
|
|
||||||
//! These instructions are designed to be easy to generate from a high-level AST and
|
|
||||||
//! easy to lower into VM-specific bytecode.
|
|
||||||
|
|
||||||
use crate::common::spans::Span;
|
|
||||||
use crate::ir_core::ids::{FunctionId, SigId};
|
|
||||||
use crate::ir_lang::types::{ConstId, TypeId};
|
|
||||||
|
|
||||||
/// An `Instruction` combines an instruction's behavior (`kind`) with its
|
|
||||||
/// source code location (`span`) for debugging and error reporting.
|
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
|
||||||
pub struct Instruction {
|
|
||||||
pub kind: InstrKind,
|
|
||||||
/// The location in the original source code that generated this instruction.
|
|
||||||
pub span: Option<Span>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Instruction {
|
|
||||||
/// Creates a new instruction with an optional source span.
|
|
||||||
pub fn new(kind: InstrKind, span: Option<Span>) -> Self {
|
|
||||||
Self { kind, span }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A `Label` represents a destination for a jump instruction.
|
|
||||||
/// During the assembly phase, labels are resolved into actual memory offsets.
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
|
||||||
pub struct Label(pub String);
|
|
||||||
|
|
||||||
/// The various types of operations that can be performed in the IR.
|
|
||||||
///
|
|
||||||
/// The IR uses a stack-based model, similar to the final Prometeu ByteCode.
|
|
||||||
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
|
|
||||||
pub enum InstrKind {
|
|
||||||
/// Does nothing.
|
|
||||||
Nop,
|
|
||||||
/// Terminates program execution.
|
|
||||||
Halt,
|
|
||||||
|
|
||||||
// --- Literals ---
|
|
||||||
// These instructions push a constant value from the pool onto the stack.
|
|
||||||
|
|
||||||
/// Pushes a constant from the pool onto the stack.
|
|
||||||
PushConst(ConstId),
|
|
||||||
/// Pushes a bounded value (0..0xFFFF) onto the stack.
|
|
||||||
PushBounded(u32),
|
|
||||||
/// Pushes a boolean onto the stack.
|
|
||||||
PushBool(bool),
|
|
||||||
/// Pushes a `null` value onto the stack.
|
|
||||||
PushNull,
|
|
||||||
|
|
||||||
// --- Stack Operations ---
|
|
||||||
|
|
||||||
/// Removes the top value from the stack.
|
|
||||||
Pop,
|
|
||||||
/// Duplicates the top value on the stack.
|
|
||||||
Dup,
|
|
||||||
/// Swaps the top two values on the stack.
|
|
||||||
Swap,
|
|
||||||
|
|
||||||
// --- Arithmetic ---
|
|
||||||
// These take two values from the stack and push the result.
|
|
||||||
|
|
||||||
/// Addition: `a + b`
|
|
||||||
Add,
|
|
||||||
/// Subtraction: `a - b`
|
|
||||||
Sub,
|
|
||||||
/// Multiplication: `a * b`
|
|
||||||
Mul,
|
|
||||||
/// Division: `a / b`
|
|
||||||
Div,
|
|
||||||
/// Negation: `-a` (takes one value)
|
|
||||||
Neg,
|
|
||||||
|
|
||||||
// --- Logical/Comparison ---
|
|
||||||
|
|
||||||
/// Equality: `a == b`
|
|
||||||
Eq,
|
|
||||||
/// Inequality: `a != b`
|
|
||||||
Neq,
|
|
||||||
/// Less than: `a < b`
|
|
||||||
Lt,
|
|
||||||
/// Greater than: `a > b`
|
|
||||||
Gt,
|
|
||||||
/// Less than or equal: `a <= b`
|
|
||||||
Lte,
|
|
||||||
/// Greater than or equal: `a >= b`
|
|
||||||
Gte,
|
|
||||||
/// Logical AND: `a && b`
|
|
||||||
And,
|
|
||||||
/// Logical OR: `a || b`
|
|
||||||
Or,
|
|
||||||
/// Logical NOT: `!a`
|
|
||||||
Not,
|
|
||||||
|
|
||||||
// --- Bitwise Operations ---
|
|
||||||
|
|
||||||
/// Bitwise AND: `a & b`
|
|
||||||
BitAnd,
|
|
||||||
/// Bitwise OR: `a | b`
|
|
||||||
BitOr,
|
|
||||||
/// Bitwise XOR: `a ^ b`
|
|
||||||
BitXor,
|
|
||||||
/// Shift Left: `a << b`
|
|
||||||
Shl,
|
|
||||||
/// Shift Right: `a >> b`
|
|
||||||
Shr,
|
|
||||||
|
|
||||||
// --- Variable Access ---
|
|
||||||
|
|
||||||
/// Retrieves a value from a local variable slot and pushes it onto the stack.
|
|
||||||
LocalLoad { slot: u32 },
|
|
||||||
/// Pops a value from the stack and stores it in a local variable slot.
|
|
||||||
LocalStore { slot: u32 },
|
|
||||||
/// Retrieves a value from a global variable slot and pushes it onto the stack.
|
|
||||||
GetGlobal(u32),
|
|
||||||
/// Pops a value from the stack and stores it in a global variable slot.
|
|
||||||
SetGlobal(u32),
|
|
||||||
|
|
||||||
// --- Control Flow ---
|
|
||||||
|
|
||||||
/// Unconditionally jumps to the specified label.
|
|
||||||
Jmp(Label),
|
|
||||||
/// Pops a boolean from the stack. If false, jumps to the specified label.
|
|
||||||
JmpIfFalse(Label),
|
|
||||||
/// Defines a location that can be jumped to. Does not emit code by itself.
|
|
||||||
Label(Label),
|
|
||||||
/// Calls a function by ID with the specified number of arguments.
|
|
||||||
/// Arguments should be pushed onto the stack before calling.
|
|
||||||
Call { func_id: FunctionId, arg_count: u32 },
|
|
||||||
/// Calls a function from another project.
|
|
||||||
ImportCall {
|
|
||||||
dep_alias: String,
|
|
||||||
module_path: String,
|
|
||||||
/// Optional service/type owner for methods (e.g., "Log"). `None` for free functions.
|
|
||||||
owner: Option<String>,
|
|
||||||
/// Unqualified function/method name (e.g., "debug").
|
|
||||||
base_name: String,
|
|
||||||
/// Exact signature id selected by the frontend.
|
|
||||||
sig: SigId,
|
|
||||||
arg_count: u32,
|
|
||||||
},
|
|
||||||
/// Returns from the current function. The return value (if any) should be on top of the stack.
|
|
||||||
Ret,
|
|
||||||
|
|
||||||
// --- OS / System ---
|
|
||||||
|
|
||||||
/// Triggers a system call (e.g., drawing to the screen, reading input).
|
|
||||||
Syscall(u32),
|
|
||||||
/// Special instruction to synchronize with the hardware frame clock.
|
|
||||||
FrameSync,
|
|
||||||
|
|
||||||
// --- HIP / Memory ---
|
|
||||||
|
|
||||||
/// Allocates memory on the heap.
|
|
||||||
Alloc { type_id: TypeId, slots: u32 },
|
|
||||||
/// Reads from heap at gate + offset. Pops gate, pushes value.
|
|
||||||
GateLoad { offset: u32 },
|
|
||||||
/// Writes to heap at gate + offset. Pops gate and value.
|
|
||||||
GateStore { offset: u32 },
|
|
||||||
|
|
||||||
// --- Scope Markers ---
|
|
||||||
GateBeginPeek,
|
|
||||||
GateEndPeek,
|
|
||||||
GateBeginBorrow,
|
|
||||||
GateEndBorrow,
|
|
||||||
GateBeginMutate,
|
|
||||||
GateEndMutate,
|
|
||||||
|
|
||||||
// --- Reference Counting ---
|
|
||||||
|
|
||||||
/// Increments the reference count of a gate handle on the stack.
|
|
||||||
/// Stack: [..., Gate(g)] -> [..., Gate(g)]
|
|
||||||
GateRetain,
|
|
||||||
/// Decrements the reference count of a gate handle and pops it from the stack.
|
|
||||||
/// Stack: [..., Gate(g)] -> [...]
|
|
||||||
GateRelease,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// List of instructions that are sensitive to Reference Counting (RC).
|
|
||||||
/// These instructions must trigger retain/release operations on gate handles.
|
|
||||||
pub const RC_SENSITIVE_OPS: &[&str] = &[
|
|
||||||
"LocalStore",
|
|
||||||
"GateStore",
|
|
||||||
"GateLoad",
|
|
||||||
"Pop",
|
|
||||||
"Ret",
|
|
||||||
"FrameSync",
|
|
||||||
];
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::ir_lang::types::{ConstId, TypeId};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_instr_kind_is_cloneable() {
|
|
||||||
let instr = InstrKind::Alloc { type_id: TypeId(1), slots: 2 };
|
|
||||||
let cloned = instr.clone();
|
|
||||||
match cloned {
|
|
||||||
InstrKind::Alloc { type_id, slots } => {
|
|
||||||
assert_eq!(type_id, TypeId(1));
|
|
||||||
assert_eq!(slots, 2);
|
|
||||||
}
|
|
||||||
_ => panic!("Clone failed"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_isa_surface_snapshot() {
|
|
||||||
// This test ensures that the instruction set surface remains stable.
|
|
||||||
// If you add/remove/change instructions, this test will fail,
|
|
||||||
// prompting an explicit review of the ISA change.
|
|
||||||
let instructions = vec![
|
|
||||||
InstrKind::Nop,
|
|
||||||
InstrKind::Halt,
|
|
||||||
InstrKind::PushConst(ConstId(0)),
|
|
||||||
InstrKind::PushBounded(0),
|
|
||||||
InstrKind::PushBool(true),
|
|
||||||
InstrKind::PushNull,
|
|
||||||
InstrKind::Pop,
|
|
||||||
InstrKind::Dup,
|
|
||||||
InstrKind::Swap,
|
|
||||||
InstrKind::Add,
|
|
||||||
InstrKind::Sub,
|
|
||||||
InstrKind::Mul,
|
|
||||||
InstrKind::Div,
|
|
||||||
InstrKind::Neg,
|
|
||||||
InstrKind::Eq,
|
|
||||||
InstrKind::Neq,
|
|
||||||
InstrKind::Lt,
|
|
||||||
InstrKind::Gt,
|
|
||||||
InstrKind::Lte,
|
|
||||||
InstrKind::Gte,
|
|
||||||
InstrKind::And,
|
|
||||||
InstrKind::Or,
|
|
||||||
InstrKind::Not,
|
|
||||||
InstrKind::BitAnd,
|
|
||||||
InstrKind::BitOr,
|
|
||||||
InstrKind::BitXor,
|
|
||||||
InstrKind::Shl,
|
|
||||||
InstrKind::Shr,
|
|
||||||
InstrKind::LocalLoad { slot: 0 },
|
|
||||||
InstrKind::LocalStore { slot: 0 },
|
|
||||||
InstrKind::GetGlobal(0),
|
|
||||||
InstrKind::SetGlobal(0),
|
|
||||||
InstrKind::Jmp(Label("target".to_string())),
|
|
||||||
InstrKind::JmpIfFalse(Label("target".to_string())),
|
|
||||||
InstrKind::Label(Label("target".to_string())),
|
|
||||||
InstrKind::Call { func_id: FunctionId(0), arg_count: 0 },
|
|
||||||
InstrKind::ImportCall {
|
|
||||||
dep_alias: "std".to_string(),
|
|
||||||
module_path: "math".to_string(),
|
|
||||||
owner: None,
|
|
||||||
base_name: "abs".to_string(),
|
|
||||||
sig: SigId(1),
|
|
||||||
arg_count: 1,
|
|
||||||
},
|
|
||||||
InstrKind::Ret,
|
|
||||||
InstrKind::Syscall(0),
|
|
||||||
InstrKind::FrameSync,
|
|
||||||
InstrKind::Alloc { type_id: TypeId(0), slots: 0 },
|
|
||||||
InstrKind::GateLoad { offset: 0 },
|
|
||||||
InstrKind::GateStore { offset: 0 },
|
|
||||||
InstrKind::GateBeginPeek,
|
|
||||||
InstrKind::GateEndPeek,
|
|
||||||
InstrKind::GateBeginBorrow,
|
|
||||||
InstrKind::GateEndBorrow,
|
|
||||||
InstrKind::GateBeginMutate,
|
|
||||||
InstrKind::GateEndMutate,
|
|
||||||
InstrKind::GateRetain,
|
|
||||||
InstrKind::GateRelease,
|
|
||||||
];
|
|
||||||
|
|
||||||
let serialized = serde_json::to_string_pretty(&instructions).unwrap();
|
|
||||||
|
|
||||||
// This is a "lock" on the ISA surface.
|
|
||||||
// If the structure of InstrKind changes, the serialization will change.
|
|
||||||
let expected_json = r#"[
|
|
||||||
"Nop",
|
|
||||||
"Halt",
|
|
||||||
{
|
|
||||||
"PushConst": 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"PushBounded": 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"PushBool": true
|
|
||||||
},
|
|
||||||
"PushNull",
|
|
||||||
"Pop",
|
|
||||||
"Dup",
|
|
||||||
"Swap",
|
|
||||||
"Add",
|
|
||||||
"Sub",
|
|
||||||
"Mul",
|
|
||||||
"Div",
|
|
||||||
"Neg",
|
|
||||||
"Eq",
|
|
||||||
"Neq",
|
|
||||||
"Lt",
|
|
||||||
"Gt",
|
|
||||||
"Lte",
|
|
||||||
"Gte",
|
|
||||||
"And",
|
|
||||||
"Or",
|
|
||||||
"Not",
|
|
||||||
"BitAnd",
|
|
||||||
"BitOr",
|
|
||||||
"BitXor",
|
|
||||||
"Shl",
|
|
||||||
"Shr",
|
|
||||||
{
|
|
||||||
"LocalLoad": {
|
|
||||||
"slot": 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"LocalStore": {
|
|
||||||
"slot": 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"GetGlobal": 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"SetGlobal": 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Jmp": "target"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"JmpIfFalse": "target"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Label": "target"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Call": {
|
|
||||||
"func_id": 0,
|
|
||||||
"arg_count": 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ImportCall": {
|
|
||||||
"dep_alias": "std",
|
|
||||||
"module_path": "math",
|
|
||||||
"owner": null,
|
|
||||||
"base_name": "abs",
|
|
||||||
"sig": 1,
|
|
||||||
"arg_count": 1
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"Ret",
|
|
||||||
{
|
|
||||||
"Syscall": 0
|
|
||||||
},
|
|
||||||
"FrameSync",
|
|
||||||
{
|
|
||||||
"Alloc": {
|
|
||||||
"type_id": 0,
|
|
||||||
"slots": 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"GateLoad": {
|
|
||||||
"offset": 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"GateStore": {
|
|
||||||
"offset": 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"GateBeginPeek",
|
|
||||||
"GateEndPeek",
|
|
||||||
"GateBeginBorrow",
|
|
||||||
"GateEndBorrow",
|
|
||||||
"GateBeginMutate",
|
|
||||||
"GateEndMutate",
|
|
||||||
"GateRetain",
|
|
||||||
"GateRelease"
|
|
||||||
]"#;
|
|
||||||
assert_eq!(serialized, expected_json);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_no_ref_leakage_in_instr_names() {
|
|
||||||
// Enforce the rule that "Ref" must never refer to HIP memory in ir_lang.
|
|
||||||
// The snapshot test above already locks the names, but this test
|
|
||||||
// explicitly asserts the absence of the "Ref" substring in HIP-related instructions.
|
|
||||||
let instructions = [
|
|
||||||
"GateLoad", "GateStore", "Alloc",
|
|
||||||
"GateBeginPeek", "GateEndPeek",
|
|
||||||
"GateBeginBorrow", "GateEndBorrow",
|
|
||||||
"GateBeginMutate", "GateEndMutate",
|
|
||||||
"GateRetain", "GateRelease"
|
|
||||||
];
|
|
||||||
|
|
||||||
for name in instructions {
|
|
||||||
assert!(!name.contains("Ref"), "Instruction {} contains forbidden 'Ref' terminology", name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_rc_sensitive_list_exists() {
|
|
||||||
// Required by PR-06: Documentation test or unit assertion that the RC-sensitive list exists
|
|
||||||
assert!(!RC_SENSITIVE_OPS.is_empty(), "RC-sensitive instructions list must not be empty");
|
|
||||||
|
|
||||||
let expected = ["LocalStore", "GateStore", "GateLoad", "Pop", "Ret", "FrameSync"];
|
|
||||||
for op in expected {
|
|
||||||
assert!(RC_SENSITIVE_OPS.contains(&op), "RC-sensitive list must contain {}", op);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,177 +0,0 @@
|
|||||||
//! # VM Intermediate Representation (ir_lang)
|
|
||||||
//!
|
|
||||||
//! This module defines the Intermediate Representation for the Prometeu VM.
|
|
||||||
//!
|
|
||||||
//! ## Memory Model
|
|
||||||
//!
|
|
||||||
//! * Heap is never directly addressable.
|
|
||||||
//! * All HIP (Heap) access is mediated via Gate Pool resolution.
|
|
||||||
//! * `Gate(GateId)` is the only HIP pointer form in `ir_lang`.
|
|
||||||
//!
|
|
||||||
//! ## Reference Counting (RC)
|
|
||||||
//!
|
|
||||||
//! The VM uses Reference Counting to manage HIP memory.
|
|
||||||
//!
|
|
||||||
//! ### RC Rules:
|
|
||||||
//! * **Retain**: Increment `strong_rc` when a gate handle is copied.
|
|
||||||
//! * **Release**: Decrement `strong_rc` when a gate handle is overwritten or dropped.
|
|
||||||
//!
|
|
||||||
//! ### RC-Sensitive Instructions:
|
|
||||||
//! The following instructions are RC-sensitive and must trigger RC updates:
|
|
||||||
//! * `LocalStore`: Release old value, retain new value.
|
|
||||||
//! * `GateStore`: Release old value, retain new value.
|
|
||||||
//! * `Pop`: Release the popped value.
|
|
||||||
//! * `Ret`: Release all live locals in the frame.
|
|
||||||
//! * `FrameSync`: Safe point; reclamation occurs after this point.
|
|
||||||
|
|
||||||
pub mod types;
|
|
||||||
pub mod module;
|
|
||||||
pub mod instr;
|
|
||||||
pub mod validate;
|
|
||||||
|
|
||||||
pub use instr::{InstrKind, Instruction, Label};
|
|
||||||
pub use module::{Function, Global, Module, Param};
|
|
||||||
pub use types::{ConstId, GateId, Type, TypeId, Value};
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::ir_core::const_pool::{ConstPool, ConstantValue};
|
|
||||||
use crate::ir_core::ids::{ConstId, FunctionId};
|
|
||||||
use serde_json;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_vm_ir_serialization() {
|
|
||||||
let mut const_pool = ConstPool::new();
|
|
||||||
const_pool.insert(ConstantValue::String("Hello VM".to_string()));
|
|
||||||
|
|
||||||
let module = Module {
|
|
||||||
name: "test_module".to_string(),
|
|
||||||
const_pool,
|
|
||||||
functions: vec![Function {
|
|
||||||
id: FunctionId(1),
|
|
||||||
name: "main".to_string(),
|
|
||||||
sig: crate::ir_core::SigId(0),
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
params: vec![],
|
|
||||||
return_type: Type::Null,
|
|
||||||
body: vec![
|
|
||||||
Instruction::new(InstrKind::PushConst(types::ConstId(0)), None),
|
|
||||||
Instruction::new(InstrKind::Call { func_id: FunctionId(2), arg_count: 1 }, None),
|
|
||||||
Instruction::new(InstrKind::Ret, None),
|
|
||||||
],
|
|
||||||
}],
|
|
||||||
globals: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
let json = serde_json::to_string_pretty(&module).unwrap();
|
|
||||||
|
|
||||||
let expected = r#"{
|
|
||||||
"name": "test_module",
|
|
||||||
"const_pool": {
|
|
||||||
"constants": [
|
|
||||||
{
|
|
||||||
"String": "Hello VM"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"functions": [
|
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"name": "main",
|
|
||||||
"params": [],
|
|
||||||
"return_type": "Null",
|
|
||||||
"body": [
|
|
||||||
{
|
|
||||||
"kind": {
|
|
||||||
"PushConst": 0
|
|
||||||
},
|
|
||||||
"span": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"kind": {
|
|
||||||
"Call": {
|
|
||||||
"func_id": 2,
|
|
||||||
"arg_count": 1
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"span": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"kind": "Ret",
|
|
||||||
"span": null
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"param_slots": 0,
|
|
||||||
"local_slots": 0,
|
|
||||||
"return_slots": 0
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"globals": []
|
|
||||||
}"#;
|
|
||||||
assert_eq!(json, expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lowering_smoke() {
|
|
||||||
use crate::ir_core;
|
|
||||||
use crate::lowering::lower_program;
|
|
||||||
|
|
||||||
let mut const_pool = ir_core::ConstPool::new();
|
|
||||||
const_pool.insert(ir_core::ConstantValue::Int(42));
|
|
||||||
|
|
||||||
let program = ir_core::Program {
|
|
||||||
const_pool,
|
|
||||||
modules: vec![ir_core::Module {
|
|
||||||
name: "test_core".to_string(),
|
|
||||||
functions: vec![ir_core::Function {
|
|
||||||
id: FunctionId(10),
|
|
||||||
name: "start".to_string(),
|
|
||||||
sig: {
|
|
||||||
let mut i = crate::ir_core::global_signature_interner().lock().unwrap();
|
|
||||||
i.intern(crate::ir_core::Signature { params: vec![], return_type: ir_core::Type::Void })
|
|
||||||
},
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
params: vec![],
|
|
||||||
return_type: ir_core::Type::Void,
|
|
||||||
blocks: vec![ir_core::Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
ir_core::Instr::from(ir_core::InstrKind::PushConst(ConstId(0))),
|
|
||||||
],
|
|
||||||
terminator: ir_core::Terminator::Return,
|
|
||||||
}],
|
|
||||||
local_types: std::collections::HashMap::new(),
|
|
||||||
}],
|
|
||||||
}],
|
|
||||||
field_offsets: std::collections::HashMap::new(),
|
|
||||||
field_types: std::collections::HashMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let vm_module = lower_program(&program).expect("Lowering failed");
|
|
||||||
|
|
||||||
assert_eq!(vm_module.name, "test_core");
|
|
||||||
assert_eq!(vm_module.functions.len(), 1);
|
|
||||||
let func = &vm_module.functions[0];
|
|
||||||
assert_eq!(func.name, "start");
|
|
||||||
assert_eq!(func.id, FunctionId(10));
|
|
||||||
|
|
||||||
assert_eq!(func.body.len(), 3);
|
|
||||||
match &func.body[0].kind {
|
|
||||||
InstrKind::Label(Label(l)) => assert!(l.contains("block_0")),
|
|
||||||
_ => panic!("Expected label"),
|
|
||||||
}
|
|
||||||
match &func.body[1].kind {
|
|
||||||
InstrKind::PushConst(id) => assert_eq!(id.0, 0),
|
|
||||||
_ => panic!("Expected PushConst"),
|
|
||||||
}
|
|
||||||
match &func.body[2].kind {
|
|
||||||
InstrKind::Ret => (),
|
|
||||||
_ => panic!("Expected Ret"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,82 +0,0 @@
|
|||||||
//! # IR Module Structure
|
|
||||||
//!
|
|
||||||
//! This module defines the structure of the Intermediate Representation (IR).
|
|
||||||
//! The IR is a higher-level representation of the program than bytecode, but lower
|
|
||||||
//! than the source code AST. It is organized into Modules, Functions, and Globals.
|
|
||||||
|
|
||||||
use crate::ir_core::const_pool::ConstPool;
|
|
||||||
use crate::ir_core::ids::{FunctionId, SigId};
|
|
||||||
use crate::ir_lang::instr::Instruction;
|
|
||||||
use crate::ir_lang::types::Type;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
/// A `Module` is the top-level container for a compiled program or library.
|
|
||||||
/// It contains a collection of global variables, functions, and a constant pool.
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Module {
|
|
||||||
/// The name of the module (usually derived from the project name).
|
|
||||||
pub name: String,
|
|
||||||
/// Shared constant pool for this module.
|
|
||||||
pub const_pool: ConstPool,
|
|
||||||
/// List of all functions defined in this module.
|
|
||||||
pub functions: Vec<Function>,
|
|
||||||
/// List of all global variables available in this module.
|
|
||||||
pub globals: Vec<Global>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents a function in the IR.
|
|
||||||
///
|
|
||||||
/// Functions consist of a signature (name, parameters, return type) and a body
|
|
||||||
/// which is a flat list of IR instructions.
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Function {
|
|
||||||
/// The unique identifier of the function.
|
|
||||||
pub id: FunctionId,
|
|
||||||
/// The unique name of the function.
|
|
||||||
pub name: String,
|
|
||||||
/// Canonical signature id (params + return type). Not serialized yet.
|
|
||||||
#[serde(skip)]
|
|
||||||
pub sig: SigId,
|
|
||||||
/// The list of input parameters.
|
|
||||||
pub params: Vec<Param>,
|
|
||||||
/// The type of value this function returns.
|
|
||||||
pub return_type: Type,
|
|
||||||
/// The sequence of instructions that make up the function's logic.
|
|
||||||
pub body: Vec<Instruction>,
|
|
||||||
|
|
||||||
pub param_slots: u16,
|
|
||||||
pub local_slots: u16,
|
|
||||||
pub return_slots: u16,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A parameter passed to a function.
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Param {
|
|
||||||
/// The name of the parameter (useful for debugging and symbols).
|
|
||||||
pub name: String,
|
|
||||||
/// The data type of the parameter.
|
|
||||||
pub r#type: Type,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A global variable accessible by any function in the module.
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Global {
|
|
||||||
/// The name of the global variable.
|
|
||||||
pub name: String,
|
|
||||||
/// The data type of the variable.
|
|
||||||
pub r#type: Type,
|
|
||||||
/// The unique memory slot index assigned to this global variable.
|
|
||||||
pub slot: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Module {
|
|
||||||
/// Creates a new, empty module with the given name.
|
|
||||||
pub fn new(name: String) -> Self {
|
|
||||||
Self {
|
|
||||||
name,
|
|
||||||
const_pool: ConstPool::new(),
|
|
||||||
functions: Vec::new(),
|
|
||||||
globals: Vec::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,88 +0,0 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct GateId(pub u32);
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct ConstId(pub u32);
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct TypeId(pub u32);
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
|
||||||
pub enum Value {
|
|
||||||
Int(i64),
|
|
||||||
Float(f64),
|
|
||||||
Bounded(u32),
|
|
||||||
Bool(bool),
|
|
||||||
Unit,
|
|
||||||
Const(ConstId),
|
|
||||||
Gate(GateId),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
pub enum Type {
|
|
||||||
Any,
|
|
||||||
Null,
|
|
||||||
Bool,
|
|
||||||
Int,
|
|
||||||
Bounded,
|
|
||||||
Float,
|
|
||||||
String,
|
|
||||||
Color,
|
|
||||||
Array(Box<Type>),
|
|
||||||
Object,
|
|
||||||
Function,
|
|
||||||
Void,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use std::collections::HashSet;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ids_implement_required_traits() {
|
|
||||||
fn assert_copy<T: Copy>() {}
|
|
||||||
fn assert_eq_hash<T: Eq + std::hash::Hash>() {}
|
|
||||||
|
|
||||||
assert_copy::<GateId>();
|
|
||||||
assert_eq_hash::<GateId>();
|
|
||||||
|
|
||||||
assert_copy::<ConstId>();
|
|
||||||
assert_eq_hash::<ConstId>();
|
|
||||||
|
|
||||||
assert_copy::<TypeId>();
|
|
||||||
assert_eq_hash::<TypeId>();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_gate_id_usage() {
|
|
||||||
let id1 = GateId(1);
|
|
||||||
let id2 = GateId(1);
|
|
||||||
let id3 = GateId(2);
|
|
||||||
|
|
||||||
assert_eq!(id1, id2);
|
|
||||||
assert_ne!(id1, id3);
|
|
||||||
|
|
||||||
let mut set = HashSet::new();
|
|
||||||
set.insert(id1);
|
|
||||||
assert!(set.contains(&id2));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_value_gate_exists_and_is_clonable() {
|
|
||||||
let gate_id = GateId(42);
|
|
||||||
let val = Value::Gate(gate_id);
|
|
||||||
|
|
||||||
let cloned_val = val.clone();
|
|
||||||
if let Value::Gate(id) = cloned_val {
|
|
||||||
assert_eq!(id, gate_id);
|
|
||||||
} else {
|
|
||||||
panic!("Expected Value::Gate");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,10 +0,0 @@
|
|||||||
use crate::common::diagnostics::DiagnosticBundle;
|
|
||||||
use crate::ir_lang::module::Module;
|
|
||||||
|
|
||||||
pub fn validate_module(_module: &Module) -> Result<(), DiagnosticBundle> {
|
|
||||||
// TODO: Implement common IR validations:
|
|
||||||
// - Type checking rules
|
|
||||||
// - HostCall signatures
|
|
||||||
// - VM invariants
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@ -1,156 +0,0 @@
|
|||||||
//! # Prometeu Compiler
|
|
||||||
//!
|
|
||||||
//! This crate provides the official compiler for the Prometeu ecosystem.
|
|
||||||
//! It translates high-level source code (primarily Prometeu Base Script - PBS) into
|
|
||||||
//! Prometeu ByteCode (.pbc), which runs on the Prometeu Virtual Machine.
|
|
||||||
//!
|
|
||||||
//! ## Architecture Overview:
|
|
||||||
//!
|
|
||||||
//! The compiler follows a multi-stage pipeline:
|
|
||||||
//!
|
|
||||||
//! 1. **Frontend (Parsing & Analysis)**:
|
|
||||||
//! - Uses the PBS parser to generate an Abstract Syntax Tree (AST).
|
|
||||||
//! - Performs semantic analysis and validation.
|
|
||||||
//! - Lowers the AST into the **Intermediate Representation (IR)**.
|
|
||||||
//! - *Example*: Converting a `a + b` expression into IR instructions like `Push(a)`, `Push(b)`, `Add`.
|
|
||||||
//!
|
|
||||||
//! 2. **IR Optimization (Optional/Planned)**:
|
|
||||||
//! - Simplifies the IR to improve performance or reduce bytecode size.
|
|
||||||
//!
|
|
||||||
//! 3. **Backend (Code Generation)**:
|
|
||||||
//! - **Lowering**: Converts the high-level IR into a flat list of ByteCode instructions.
|
|
||||||
//! - **Assembly**: Resolves branch labels into actual memory offsets.
|
|
||||||
//! - **Serialization**: Encodes the instructions into the binary PBC format.
|
|
||||||
//!
|
|
||||||
//! 4. **Artifact Export**:
|
|
||||||
//! - Generates the `.pbc` binary file.
|
|
||||||
//! - Optionally produces `.disasm` (disassembly for debugging) and `.json` (symbol maps).
|
|
||||||
//!
|
|
||||||
//! ## Example Usage (CLI):
|
|
||||||
//!
|
|
||||||
//! ```bash
|
|
||||||
//! # Build a project from a directory
|
|
||||||
//! prometeu-compiler build ./my-game --entry ./src/main.pbs --out ./game.pbc
|
|
||||||
//! ```
|
|
||||||
//!
|
|
||||||
//! ## Programmatic Entry Point:
|
|
||||||
//!
|
|
||||||
//! See the [`compiler`] module for the main entry point to trigger a compilation programmatically.
|
|
||||||
|
|
||||||
pub mod common;
|
|
||||||
pub mod ir_lang;
|
|
||||||
pub mod ir_core;
|
|
||||||
pub mod lowering;
|
|
||||||
pub mod backend;
|
|
||||||
pub mod frontends;
|
|
||||||
pub mod compiler;
|
|
||||||
pub mod manifest;
|
|
||||||
pub mod deps;
|
|
||||||
pub mod sources;
|
|
||||||
pub mod building;
|
|
||||||
pub mod semantics;
|
|
||||||
pub mod analysis;
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use clap::{Parser, Subcommand};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
/// Command line interface for the Prometeu Compiler.
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(name = "prometeu-compiler")]
|
|
||||||
#[command(version, about = "Official compiler for the PROMETEU Virtual Machine", long_about = None)]
|
|
||||||
pub struct Cli {
|
|
||||||
/// The action to perform (build or verify).
|
|
||||||
#[command(subcommand)]
|
|
||||||
pub command: Commands,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Available subcommands for the compiler.
|
|
||||||
#[derive(Subcommand)]
|
|
||||||
pub enum Commands {
|
|
||||||
/// Builds a Prometeu project by compiling source code into a PBC file.
|
|
||||||
Build {
|
|
||||||
/// Path to the project root directory.
|
|
||||||
project_dir: PathBuf,
|
|
||||||
|
|
||||||
/// Explicit path to the entry file (defaults to src/main.pbs).
|
|
||||||
#[arg(short, long)]
|
|
||||||
entry: Option<PathBuf>,
|
|
||||||
|
|
||||||
/// Path to save the compiled .pbc file.
|
|
||||||
#[arg(short, long)]
|
|
||||||
out: Option<PathBuf>,
|
|
||||||
|
|
||||||
/// Whether to generate a .json symbols file for source mapping.
|
|
||||||
#[arg(long, default_value_t = true)]
|
|
||||||
emit_symbols: bool,
|
|
||||||
|
|
||||||
/// Disable symbol generation.
|
|
||||||
#[arg(long)]
|
|
||||||
no_symbols: bool,
|
|
||||||
|
|
||||||
/// Whether to generate a .disasm file for debugging.
|
|
||||||
#[arg(long, default_value_t = true)]
|
|
||||||
emit_disasm: bool,
|
|
||||||
|
|
||||||
/// Disable disassembly generation.
|
|
||||||
#[arg(long)]
|
|
||||||
no_disasm: bool,
|
|
||||||
|
|
||||||
/// Whether to explain the dependency resolution process.
|
|
||||||
#[arg(long)]
|
|
||||||
explain_deps: bool,
|
|
||||||
},
|
|
||||||
/// Verifies if a Prometeu project is syntactically and semantically valid without emitting code.
|
|
||||||
Verify {
|
|
||||||
/// Path to the project root directory.
|
|
||||||
project_dir: PathBuf,
|
|
||||||
|
|
||||||
/// Whether to explain the dependency resolution process.
|
|
||||||
#[arg(long)]
|
|
||||||
explain_deps: bool,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Main entry point for the compiler library's execution logic.
|
|
||||||
/// Parses CLI arguments and dispatches to the appropriate compiler functions.
|
|
||||||
pub fn run() -> Result<()> {
|
|
||||||
let cli = Cli::parse();
|
|
||||||
|
|
||||||
match cli.command {
|
|
||||||
Commands::Build {
|
|
||||||
project_dir,
|
|
||||||
out,
|
|
||||||
emit_disasm,
|
|
||||||
no_disasm,
|
|
||||||
emit_symbols,
|
|
||||||
no_symbols,
|
|
||||||
explain_deps,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
let build_dir = project_dir.join("../../../../build");
|
|
||||||
let out = out.unwrap_or_else(|| build_dir.join("program.pbc"));
|
|
||||||
|
|
||||||
let emit_symbols = emit_symbols && !no_symbols;
|
|
||||||
let emit_disasm = emit_disasm && !no_disasm;
|
|
||||||
|
|
||||||
if !build_dir.exists() {
|
|
||||||
std::fs::create_dir_all(&build_dir)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
println!("Building project at {:?}", project_dir);
|
|
||||||
println!("Output: {:?}", out);
|
|
||||||
|
|
||||||
let compilation_unit = compiler::compile_ext(&project_dir, explain_deps)?;
|
|
||||||
compilation_unit.export(&out, emit_disasm, emit_symbols)?;
|
|
||||||
}
|
|
||||||
Commands::Verify { project_dir, explain_deps } => {
|
|
||||||
println!("Verifying project at {:?}", project_dir);
|
|
||||||
|
|
||||||
compiler::compile_ext(&project_dir, explain_deps)?;
|
|
||||||
println!("Project is valid!");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@ -1,760 +0,0 @@
|
|||||||
use crate::ir_core;
|
|
||||||
use crate::ir_lang;
|
|
||||||
use anyhow::Result;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
/// Lowers a Core IR program into a VM IR module.
|
|
||||||
pub fn lower_program(program: &ir_core::Program) -> Result<ir_lang::Module> {
|
|
||||||
// Build a map of function return types for type tracking
|
|
||||||
let mut function_returns = HashMap::new();
|
|
||||||
for module in &program.modules {
|
|
||||||
for func in &module.functions {
|
|
||||||
function_returns.insert(func.id, func.return_type.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// For now, we assume a single module program or lower the first one.
|
|
||||||
if let Some(core_module) = program.modules.first() {
|
|
||||||
lower_module(core_module, program, &function_returns)
|
|
||||||
} else {
|
|
||||||
anyhow::bail!("No modules in core program")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Lowers a single Core IR module into a VM IR module.
|
|
||||||
pub fn lower_module(
|
|
||||||
core_module: &ir_core::Module,
|
|
||||||
program: &ir_core::Program,
|
|
||||||
function_returns: &HashMap<ir_core::ids::FunctionId, ir_core::Type>,
|
|
||||||
) -> Result<ir_lang::Module> {
|
|
||||||
let mut vm_module = ir_lang::Module::new(core_module.name.clone());
|
|
||||||
vm_module.const_pool = program.const_pool.clone();
|
|
||||||
|
|
||||||
for core_func in &core_module.functions {
|
|
||||||
// Detect the PBS entry point heuristically by (function name + signature)
|
|
||||||
// This matches fn frame(): void anywhere. In practice for v0 tests, only main.pbs defines it.
|
|
||||||
let is_entry_point = core_func.name == "frame"
|
|
||||||
&& core_func.params.is_empty()
|
|
||||||
&& matches!(core_func.return_type, ir_core::Type::Void);
|
|
||||||
|
|
||||||
vm_module
|
|
||||||
.functions
|
|
||||||
.push(lower_function(core_func, program, function_returns, is_entry_point)?);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(vm_module)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Lowers a Core IR function into a VM IR function.
|
|
||||||
pub fn lower_function(
|
|
||||||
core_func: &ir_core::Function,
|
|
||||||
program: &ir_core::Program,
|
|
||||||
function_returns: &HashMap<ir_core::ids::FunctionId, ir_core::Type>,
|
|
||||||
is_entry_point: bool,
|
|
||||||
) -> Result<ir_lang::Function> {
|
|
||||||
let mut vm_func = ir_lang::Function {
|
|
||||||
id: core_func.id,
|
|
||||||
name: core_func.name.clone(),
|
|
||||||
sig: core_func.sig,
|
|
||||||
params: core_func.params.iter().map(|p| ir_lang::Param {
|
|
||||||
name: p.name.clone(),
|
|
||||||
r#type: lower_type(&p.ty),
|
|
||||||
}).collect(),
|
|
||||||
return_type: lower_type(&core_func.return_type),
|
|
||||||
body: vec![],
|
|
||||||
param_slots: core_func.param_slots,
|
|
||||||
local_slots: core_func.local_slots,
|
|
||||||
return_slots: core_func.return_slots,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Type tracking for RC insertion
|
|
||||||
let mut local_types = HashMap::new();
|
|
||||||
// Populate with parameter types
|
|
||||||
for (i, param) in core_func.params.iter().enumerate() {
|
|
||||||
local_types.insert(i as u32, param.ty.clone());
|
|
||||||
}
|
|
||||||
// Also use the pre-computed local types from ir_core if available
|
|
||||||
for (slot, ty) in &core_func.local_types {
|
|
||||||
local_types.insert(*slot, ty.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
for block in &core_func.blocks {
|
|
||||||
// Core blocks map to labels in the flat VM IR instruction list.
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(
|
|
||||||
ir_lang::InstrKind::Label(ir_lang::Label(format!("block_{}", block.id))),
|
|
||||||
None,
|
|
||||||
));
|
|
||||||
|
|
||||||
// Note: For multi-block functions, we should ideally track stack types across blocks.
|
|
||||||
// For v0, we assume each block starts with an empty stack in terms of types,
|
|
||||||
// which matches how PBS frontend generates code for now.
|
|
||||||
let mut stack_types = Vec::new();
|
|
||||||
|
|
||||||
for instr in &block.instrs {
|
|
||||||
let span = instr.span.clone();
|
|
||||||
match &instr.kind {
|
|
||||||
ir_core::InstrKind::PushConst(id) => {
|
|
||||||
let ty = if let Some(val) = program.const_pool.get(ir_core::ConstId(id.0)) {
|
|
||||||
match val {
|
|
||||||
ir_core::ConstantValue::Int(_) => ir_core::Type::Int,
|
|
||||||
ir_core::ConstantValue::Float(_) => ir_core::Type::Float,
|
|
||||||
ir_core::ConstantValue::String(_) => ir_core::Type::String,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ir_core::Type::Void
|
|
||||||
};
|
|
||||||
stack_types.push(ty);
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::PushConst(ir_lang::ConstId(id.0)), span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::PushBounded(val) => {
|
|
||||||
stack_types.push(ir_core::Type::Bounded);
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::PushBounded(*val), span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::Call(func_id, arg_count) => {
|
|
||||||
// Pop arguments from type stack
|
|
||||||
for _ in 0..*arg_count {
|
|
||||||
stack_types.pop();
|
|
||||||
}
|
|
||||||
// Push return type
|
|
||||||
let ret_ty = function_returns.get(func_id).cloned().unwrap_or(ir_core::Type::Void);
|
|
||||||
stack_types.push(ret_ty);
|
|
||||||
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::Call {
|
|
||||||
func_id: *func_id,
|
|
||||||
arg_count: *arg_count
|
|
||||||
}, None));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::ImportCall { dep_alias, module_path, owner, base_name, sig, arg_count } => {
|
|
||||||
// Pop arguments from type stack
|
|
||||||
for _ in 0..*arg_count {
|
|
||||||
stack_types.pop();
|
|
||||||
}
|
|
||||||
// Do not assume a return type here; VM semantics should be verified elsewhere.
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::ImportCall {
|
|
||||||
dep_alias: dep_alias.clone(),
|
|
||||||
module_path: module_path.clone(),
|
|
||||||
owner: owner.clone(),
|
|
||||||
base_name: base_name.clone(),
|
|
||||||
sig: *sig,
|
|
||||||
arg_count: *arg_count,
|
|
||||||
}, None));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::HostCall(id, slots) => {
|
|
||||||
// HostCall return types are not easily known without a registry,
|
|
||||||
// but we now pass the number of slots.
|
|
||||||
for _ in 0..*slots {
|
|
||||||
stack_types.push(ir_core::Type::Int);
|
|
||||||
}
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::Syscall(*id), span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::GetLocal(slot) => {
|
|
||||||
let ty = local_types.get(slot).cloned().unwrap_or(ir_core::Type::Void);
|
|
||||||
stack_types.push(ty.clone());
|
|
||||||
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::LocalLoad { slot: *slot }, span.clone()));
|
|
||||||
|
|
||||||
// If it's a gate, we should retain it if we just pushed it onto stack?
|
|
||||||
// "on assigning a gate to a local/global"
|
|
||||||
// "on overwriting a local/global holding a gate"
|
|
||||||
// "on popping/dropping gate temporaries"
|
|
||||||
|
|
||||||
// Wait, if I Load it, I have a new handle on the stack. I should Retain it.
|
|
||||||
if is_gate_type(&ty) {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRetain, span.clone()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::SetLocal(slot) => {
|
|
||||||
let new_ty = stack_types.pop().unwrap_or(ir_core::Type::Void);
|
|
||||||
let old_ty = local_types.get(slot).cloned();
|
|
||||||
|
|
||||||
// 1. Release old value if it was a gate
|
|
||||||
if let Some(old_ty) = old_ty {
|
|
||||||
if is_gate_type(&old_ty) {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::LocalLoad { slot: *slot }, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRelease, span.clone()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. The new value is already on stack.
|
|
||||||
// We don't need to Retain it here because it was either just created (Alloc)
|
|
||||||
// or just Loaded (which already did a Retain).
|
|
||||||
// Wait, if it was just Loaded, it has +1. If we store it, it stays +1.
|
|
||||||
// If it was just Alocated, it has +1. If we store it, it stays +1.
|
|
||||||
|
|
||||||
// Actually, if we Pop it later, we Release it.
|
|
||||||
|
|
||||||
local_types.insert(*slot, new_ty);
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::LocalStore { slot: *slot }, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::Pop => {
|
|
||||||
let ty = stack_types.pop().unwrap_or(ir_core::Type::Void);
|
|
||||||
if is_gate_type(&ty) {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRelease, span.clone()));
|
|
||||||
} else {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::Pop, span.clone()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::Dup => {
|
|
||||||
let ty = stack_types.last().cloned().unwrap_or(ir_core::Type::Void);
|
|
||||||
stack_types.push(ty.clone());
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::Dup, span.clone()));
|
|
||||||
if is_gate_type(&ty) {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRetain, span.clone()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::Add | ir_core::InstrKind::Sub | ir_core::InstrKind::Mul | ir_core::InstrKind::Div => {
|
|
||||||
stack_types.pop();
|
|
||||||
stack_types.pop();
|
|
||||||
stack_types.push(ir_core::Type::Int); // Assume Int for arithmetic
|
|
||||||
let kind = match &instr.kind {
|
|
||||||
ir_core::InstrKind::Add => ir_lang::InstrKind::Add,
|
|
||||||
ir_core::InstrKind::Sub => ir_lang::InstrKind::Sub,
|
|
||||||
ir_core::InstrKind::Mul => ir_lang::InstrKind::Mul,
|
|
||||||
ir_core::InstrKind::Div => ir_lang::InstrKind::Div,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(kind, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::Neg => {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::Neg, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::Eq | ir_core::InstrKind::Neq | ir_core::InstrKind::Lt | ir_core::InstrKind::Lte | ir_core::InstrKind::Gt | ir_core::InstrKind::Gte => {
|
|
||||||
stack_types.pop();
|
|
||||||
stack_types.pop();
|
|
||||||
stack_types.push(ir_core::Type::Bool);
|
|
||||||
let kind = match &instr.kind {
|
|
||||||
ir_core::InstrKind::Eq => ir_lang::InstrKind::Eq,
|
|
||||||
ir_core::InstrKind::Neq => ir_lang::InstrKind::Neq,
|
|
||||||
ir_core::InstrKind::Lt => ir_lang::InstrKind::Lt,
|
|
||||||
ir_core::InstrKind::Lte => ir_lang::InstrKind::Lte,
|
|
||||||
ir_core::InstrKind::Gt => ir_lang::InstrKind::Gt,
|
|
||||||
ir_core::InstrKind::Gte => ir_lang::InstrKind::Gte,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(kind, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::And | ir_core::InstrKind::Or => {
|
|
||||||
stack_types.pop();
|
|
||||||
stack_types.pop();
|
|
||||||
stack_types.push(ir_core::Type::Bool);
|
|
||||||
let kind = match &instr.kind {
|
|
||||||
ir_core::InstrKind::And => ir_lang::InstrKind::And,
|
|
||||||
ir_core::InstrKind::Or => ir_lang::InstrKind::Or,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(kind, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::Not => {
|
|
||||||
stack_types.pop();
|
|
||||||
stack_types.push(ir_core::Type::Bool);
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::Not, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::Alloc { ty, slots } => {
|
|
||||||
stack_types.push(ir_core::Type::Contract(format!("Gate<{}>", ty.0)));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::Alloc {
|
|
||||||
type_id: ir_lang::TypeId(ty.0),
|
|
||||||
slots: *slots
|
|
||||||
}, None));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::BeginPeek { gate } => {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::LocalLoad { slot: gate.0 }, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRetain, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateBeginPeek, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::BeginBorrow { gate } => {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::LocalLoad { slot: gate.0 }, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRetain, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateBeginBorrow, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::BeginMutate { gate } => {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::LocalLoad { slot: gate.0 }, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRetain, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateBeginMutate, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::EndPeek => {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateEndPeek, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRelease, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::EndBorrow => {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateEndBorrow, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRelease, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::EndMutate => {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateEndMutate, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRelease, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::GateLoadField { gate, field } => {
|
|
||||||
let offset = program.field_offsets.get(field)
|
|
||||||
.ok_or_else(|| anyhow::anyhow!("E_LOWER_UNRESOLVED_OFFSET: Field {:?} offset cannot be resolved", field))?;
|
|
||||||
|
|
||||||
let field_ty = program.field_types.get(field).cloned().unwrap_or(ir_core::Type::Int);
|
|
||||||
stack_types.push(field_ty.clone());
|
|
||||||
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::LocalLoad { slot: gate.0 }, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRetain, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateLoad { offset: *offset }, span.clone()));
|
|
||||||
|
|
||||||
if is_gate_type(&field_ty) {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRetain, span.clone()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::GateStoreField { gate, field, value } => {
|
|
||||||
let offset = program.field_offsets.get(field)
|
|
||||||
.ok_or_else(|| anyhow::anyhow!("E_LOWER_UNRESOLVED_OFFSET: Field {:?} offset cannot be resolved", field))?;
|
|
||||||
|
|
||||||
let field_ty = program.field_types.get(field).cloned().unwrap_or(ir_core::Type::Int);
|
|
||||||
|
|
||||||
// 1. Release old value in HIP if it was a gate
|
|
||||||
if is_gate_type(&field_ty) {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::LocalLoad { slot: gate.0 }, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRetain, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateLoad { offset: *offset }, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRelease, span.clone()));
|
|
||||||
}
|
|
||||||
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::LocalLoad { slot: gate.0 }, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRetain, span.clone()));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::LocalLoad { slot: value.0 }, span.clone()));
|
|
||||||
|
|
||||||
// 2. Retain new value if it's a gate
|
|
||||||
if let Some(val_ty) = local_types.get(&value.0) {
|
|
||||||
if is_gate_type(val_ty) {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRetain, span.clone()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateStore { offset: *offset }, span.clone()));
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::GateLoadIndex { .. } => {
|
|
||||||
anyhow::bail!("E_LOWER_UNSUPPORTED: Dynamic HIP index access not supported in v0 lowering");
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::GateStoreIndex { .. } => {
|
|
||||||
anyhow::bail!("E_LOWER_UNSUPPORTED: Dynamic HIP index access not supported in v0 lowering");
|
|
||||||
}
|
|
||||||
ir_core::InstrKind::Free => anyhow::bail!("Instruction 'Free' cannot be represented in ir_lang v0"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match &block.terminator {
|
|
||||||
ir_core::Terminator::Return => {
|
|
||||||
// Release all live locals that hold gates
|
|
||||||
let mut sorted_slots: Vec<_> = local_types.keys().collect();
|
|
||||||
sorted_slots.sort();
|
|
||||||
|
|
||||||
for slot in sorted_slots {
|
|
||||||
let ty = &local_types[slot];
|
|
||||||
if is_gate_type(ty) {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::LocalLoad { slot: *slot }, None));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::GateRelease, None));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Inject FRAME_SYNC immediately before RET only for the entry point.
|
|
||||||
// This is a signal-only safe point; no GC opcodes should be emitted here.
|
|
||||||
if is_entry_point {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::FrameSync, None));
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the function is Void, we don't need to push anything.
|
|
||||||
// The VM's Ret opcode handles zero return slots correctly.
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(ir_lang::InstrKind::Ret, None));
|
|
||||||
}
|
|
||||||
ir_core::Terminator::Jump(target) => {
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(
|
|
||||||
ir_lang::InstrKind::Jmp(ir_lang::Label(format!("block_{}", target))),
|
|
||||||
None,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
ir_core::Terminator::JumpIfFalse { target, else_target } => {
|
|
||||||
stack_types.pop();
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(
|
|
||||||
ir_lang::InstrKind::JmpIfFalse(ir_lang::Label(format!("block_{}", target))),
|
|
||||||
None,
|
|
||||||
));
|
|
||||||
vm_func.body.push(ir_lang::Instruction::new(
|
|
||||||
ir_lang::InstrKind::Jmp(ir_lang::Label(format!("block_{}", else_target))),
|
|
||||||
None,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(vm_func)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note: Unit tests for full lowering already exist below. End-to-end tests for
|
|
||||||
// FRAME_SYNC injection are provided in the orchestrator tests.
|
|
||||||
|
|
||||||
fn is_gate_type(ty: &ir_core::Type) -> bool {
|
|
||||||
match ty {
|
|
||||||
ir_core::Type::Contract(name) => name.starts_with("Gate<"),
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn lower_type(ty: &ir_core::Type) -> ir_lang::Type {
|
|
||||||
match ty {
|
|
||||||
ir_core::Type::Void => ir_lang::Type::Void,
|
|
||||||
ir_core::Type::Int => ir_lang::Type::Int,
|
|
||||||
ir_core::Type::Float => ir_lang::Type::Float,
|
|
||||||
ir_core::Type::Bool => ir_lang::Type::Bool,
|
|
||||||
ir_core::Type::String => ir_lang::Type::String,
|
|
||||||
ir_core::Type::Bounded => ir_lang::Type::Bounded,
|
|
||||||
ir_core::Type::Optional(inner) => ir_lang::Type::Array(Box::new(lower_type(inner))),
|
|
||||||
ir_core::Type::Result(ok, _) => lower_type(ok),
|
|
||||||
ir_core::Type::Struct(_)
|
|
||||||
| ir_core::Type::Service(_)
|
|
||||||
| ir_core::Type::Contract(_)
|
|
||||||
| ir_core::Type::ErrorType(_) => ir_lang::Type::Object,
|
|
||||||
ir_core::Type::Function { .. } => ir_lang::Type::Function,
|
|
||||||
ir_core::Type::Array(inner, _) => ir_lang::Type::Array(Box::new(lower_type(inner))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::ir_core;
|
|
||||||
use crate::ir_core::ids::{ConstId as CoreConstId, FunctionId};
|
|
||||||
use crate::ir_core::{Block, ConstPool, ConstantValue, Instr, InstrKind, Program, Terminator};
|
|
||||||
use crate::ir_lang::{InstrKind as VmInstrKind, Label};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_full_lowering() {
|
|
||||||
let mut const_pool = ConstPool::new();
|
|
||||||
const_pool.insert(ConstantValue::Int(100)); // ConstId(0)
|
|
||||||
|
|
||||||
let program = Program {
|
|
||||||
const_pool,
|
|
||||||
modules: vec![ir_core::Module {
|
|
||||||
name: "test_mod".to_string(),
|
|
||||||
functions: vec![ir_core::Function {
|
|
||||||
id: FunctionId(1),
|
|
||||||
name: "main".to_string(),
|
|
||||||
sig: {
|
|
||||||
let mut i = ir_core::global_signature_interner().lock().unwrap();
|
|
||||||
i.intern(ir_core::Signature { params: vec![], return_type: ir_core::Type::Void })
|
|
||||||
},
|
|
||||||
params: vec![],
|
|
||||||
return_type: ir_core::Type::Void,
|
|
||||||
blocks: vec![
|
|
||||||
Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::PushConst(CoreConstId(0))),
|
|
||||||
Instr::from(InstrKind::Call(FunctionId(2), 1)),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Jump(1),
|
|
||||||
},
|
|
||||||
Block {
|
|
||||||
id: 1,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::HostCall(42, 1)),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
local_types: HashMap::new(),
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
}],
|
|
||||||
}],
|
|
||||||
field_offsets: std::collections::HashMap::new(),
|
|
||||||
field_types: std::collections::HashMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let vm_module = lower_program(&program).expect("Lowering failed");
|
|
||||||
|
|
||||||
assert_eq!(vm_module.name, "test_mod");
|
|
||||||
let func = &vm_module.functions[0];
|
|
||||||
assert_eq!(func.name, "main");
|
|
||||||
|
|
||||||
assert_eq!(func.body.len(), 7);
|
|
||||||
|
|
||||||
match &func.body[0].kind {
|
|
||||||
VmInstrKind::Label(Label(l)) => assert_eq!(l, "block_0"),
|
|
||||||
_ => panic!("Expected label block_0"),
|
|
||||||
}
|
|
||||||
match &func.body[1].kind {
|
|
||||||
VmInstrKind::PushConst(id) => assert_eq!(id.0, 0),
|
|
||||||
_ => panic!("Expected PushConst 0"),
|
|
||||||
}
|
|
||||||
match &func.body[2].kind {
|
|
||||||
VmInstrKind::Call { func_id, arg_count } => {
|
|
||||||
assert_eq!(func_id.0, 2);
|
|
||||||
assert_eq!(*arg_count, 1);
|
|
||||||
}
|
|
||||||
_ => panic!("Expected Call"),
|
|
||||||
}
|
|
||||||
match &func.body[3].kind {
|
|
||||||
VmInstrKind::Jmp(Label(l)) => assert_eq!(l, "block_1"),
|
|
||||||
_ => panic!("Expected Jmp block_1"),
|
|
||||||
}
|
|
||||||
match &func.body[4].kind {
|
|
||||||
VmInstrKind::Label(Label(l)) => assert_eq!(l, "block_1"),
|
|
||||||
_ => panic!("Expected label block_1"),
|
|
||||||
}
|
|
||||||
match &func.body[5].kind {
|
|
||||||
VmInstrKind::Syscall(id) => assert_eq!(*id, 42),
|
|
||||||
_ => panic!("Expected HostCall 42"),
|
|
||||||
}
|
|
||||||
match &func.body[6].kind {
|
|
||||||
VmInstrKind::Ret => (),
|
|
||||||
_ => panic!("Expected Ret"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_field_access_lowering_golden() {
|
|
||||||
let const_pool = ConstPool::new();
|
|
||||||
let mut field_offsets = std::collections::HashMap::new();
|
|
||||||
let field_id = ir_core::FieldId(42);
|
|
||||||
field_offsets.insert(field_id, 100);
|
|
||||||
|
|
||||||
let program = Program {
|
|
||||||
const_pool,
|
|
||||||
modules: vec![ir_core::Module {
|
|
||||||
name: "test".to_string(),
|
|
||||||
functions: vec![ir_core::Function {
|
|
||||||
id: FunctionId(1),
|
|
||||||
name: "test_fields".to_string(),
|
|
||||||
sig: {
|
|
||||||
let mut i = ir_core::global_signature_interner().lock().unwrap();
|
|
||||||
i.intern(ir_core::Signature { params: vec![], return_type: ir_core::Type::Void })
|
|
||||||
},
|
|
||||||
params: vec![],
|
|
||||||
return_type: ir_core::Type::Void,
|
|
||||||
blocks: vec![Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::GateLoadField { gate: ir_core::ValueId(0), field: field_id }),
|
|
||||||
Instr::from(InstrKind::GateStoreField { gate: ir_core::ValueId(0), field: field_id, value: ir_core::ValueId(1) }),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
}],
|
|
||||||
local_types: HashMap::new(),
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
}],
|
|
||||||
}],
|
|
||||||
field_offsets,
|
|
||||||
field_types: HashMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let vm_module = lower_program(&program).expect("Lowering failed");
|
|
||||||
let func = &vm_module.functions[0];
|
|
||||||
|
|
||||||
// Expected VM IR:
|
|
||||||
// Label block_0
|
|
||||||
// LocalLoad 0 (gate)
|
|
||||||
// GateLoad 100 (offset)
|
|
||||||
// LocalLoad 0 (gate)
|
|
||||||
// LocalLoad 1 (value)
|
|
||||||
// GateStore 100 (offset)
|
|
||||||
// Ret
|
|
||||||
|
|
||||||
assert_eq!(func.body.len(), 9);
|
|
||||||
match &func.body[1].kind {
|
|
||||||
VmInstrKind::LocalLoad { slot } => assert_eq!(*slot, 0),
|
|
||||||
_ => panic!("Expected LocalLoad 0"),
|
|
||||||
}
|
|
||||||
match &func.body[2].kind {
|
|
||||||
VmInstrKind::GateRetain => (),
|
|
||||||
_ => panic!("Expected GateRetain"),
|
|
||||||
}
|
|
||||||
match &func.body[3].kind {
|
|
||||||
VmInstrKind::GateLoad { offset } => assert_eq!(*offset, 100),
|
|
||||||
_ => panic!("Expected GateLoad 100"),
|
|
||||||
}
|
|
||||||
match &func.body[7].kind {
|
|
||||||
VmInstrKind::GateStore { offset } => assert_eq!(*offset, 100),
|
|
||||||
_ => panic!("Expected GateStore 100"),
|
|
||||||
}
|
|
||||||
match &func.body[8].kind {
|
|
||||||
VmInstrKind::Ret => (),
|
|
||||||
_ => panic!("Expected Ret"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_missing_field_offset_fails() {
|
|
||||||
let program = Program {
|
|
||||||
const_pool: ConstPool::new(),
|
|
||||||
modules: vec![ir_core::Module {
|
|
||||||
name: "test".to_string(),
|
|
||||||
functions: vec![ir_core::Function {
|
|
||||||
id: FunctionId(1),
|
|
||||||
name: "fail".to_string(),
|
|
||||||
sig: {
|
|
||||||
let mut i = ir_core::global_signature_interner().lock().unwrap();
|
|
||||||
i.intern(ir_core::Signature { params: vec![], return_type: ir_core::Type::Void })
|
|
||||||
},
|
|
||||||
params: vec![],
|
|
||||||
return_type: ir_core::Type::Void,
|
|
||||||
blocks: vec![Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::GateLoadField { gate: ir_core::ValueId(0), field: ir_core::FieldId(999) }),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
}],
|
|
||||||
local_types: HashMap::new(),
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
}],
|
|
||||||
}],
|
|
||||||
field_offsets: std::collections::HashMap::new(),
|
|
||||||
field_types: HashMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = lower_program(&program);
|
|
||||||
assert!(result.is_err());
|
|
||||||
assert!(result.unwrap_err().to_string().contains("E_LOWER_UNRESOLVED_OFFSET"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_rc_trace_lowering_golden() {
|
|
||||||
let mut const_pool = ConstPool::new();
|
|
||||||
const_pool.insert(ConstantValue::Int(0)); // ConstId(0)
|
|
||||||
|
|
||||||
let type_id = ir_core::TypeId(1);
|
|
||||||
|
|
||||||
let program = Program {
|
|
||||||
const_pool,
|
|
||||||
modules: vec![ir_core::Module {
|
|
||||||
name: "test".to_string(),
|
|
||||||
functions: vec![ir_core::Function {
|
|
||||||
id: FunctionId(1),
|
|
||||||
name: "main".to_string(),
|
|
||||||
sig: {
|
|
||||||
let mut i = ir_core::global_signature_interner().lock().unwrap();
|
|
||||||
i.intern(ir_core::Signature { params: vec![], return_type: ir_core::Type::Void })
|
|
||||||
},
|
|
||||||
params: vec![],
|
|
||||||
return_type: ir_core::Type::Void,
|
|
||||||
blocks: vec![Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
// 1. allocates a gate
|
|
||||||
Instr::from(InstrKind::Alloc { ty: type_id, slots: 1 }),
|
|
||||||
Instr::from(InstrKind::SetLocal(0)), // x = alloc
|
|
||||||
|
|
||||||
// 2. copies it
|
|
||||||
Instr::from(InstrKind::GetLocal(0)),
|
|
||||||
Instr::from(InstrKind::SetLocal(1)), // y = x
|
|
||||||
|
|
||||||
// 3. overwrites one copy
|
|
||||||
Instr::from(InstrKind::PushConst(CoreConstId(0))),
|
|
||||||
Instr::from(InstrKind::SetLocal(0)), // x = 0 (overwrites gate)
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
}],
|
|
||||||
local_types: HashMap::new(),
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
}],
|
|
||||||
}],
|
|
||||||
field_offsets: HashMap::new(),
|
|
||||||
field_types: HashMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let vm_module = lower_program(&program).expect("Lowering failed");
|
|
||||||
let func = &vm_module.functions[0];
|
|
||||||
|
|
||||||
let kinds: Vec<_> = func.body.iter().map(|i| &i.kind).collect();
|
|
||||||
|
|
||||||
assert!(kinds.contains(&&VmInstrKind::GateRetain));
|
|
||||||
assert!(kinds.contains(&&VmInstrKind::GateRelease));
|
|
||||||
|
|
||||||
// Check specific sequence for overwrite:
|
|
||||||
// LocalLoad 0, GateRelease, LocalStore 0
|
|
||||||
let mut found_overwrite = false;
|
|
||||||
for i in 0..kinds.len() - 2 {
|
|
||||||
if let (VmInstrKind::LocalLoad { slot: 0 }, VmInstrKind::GateRelease, VmInstrKind::LocalStore { slot: 0 }) = (kinds[i], kinds[i+1], kinds[i+2]) {
|
|
||||||
found_overwrite = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert!(found_overwrite, "Should have emitted release-then-store sequence for overwrite");
|
|
||||||
|
|
||||||
// Check Ret cleanup:
|
|
||||||
// LocalLoad 1, GateRelease, Ret
|
|
||||||
let mut found_cleanup = false;
|
|
||||||
for i in 0..kinds.len() - 2 {
|
|
||||||
if let (VmInstrKind::LocalLoad { slot: 1 }, VmInstrKind::GateRelease, VmInstrKind::Ret) = (kinds[i], kinds[i+1], kinds[i+2]) {
|
|
||||||
found_cleanup = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert!(found_cleanup, "Should have emitted cleanup for local y at return");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_no_silent_rc() {
|
|
||||||
let mut const_pool = ConstPool::new();
|
|
||||||
const_pool.insert(ConstantValue::Int(42));
|
|
||||||
|
|
||||||
let program = Program {
|
|
||||||
const_pool,
|
|
||||||
modules: vec![ir_core::Module {
|
|
||||||
name: "test".to_string(),
|
|
||||||
functions: vec![ir_core::Function {
|
|
||||||
id: FunctionId(1),
|
|
||||||
name: "main".to_string(),
|
|
||||||
sig: {
|
|
||||||
let mut i = ir_core::global_signature_interner().lock().unwrap();
|
|
||||||
i.intern(ir_core::Signature { params: vec![], return_type: ir_core::Type::Void })
|
|
||||||
},
|
|
||||||
params: vec![],
|
|
||||||
return_type: ir_core::Type::Void,
|
|
||||||
blocks: vec![Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
Instr::from(InstrKind::PushConst(CoreConstId(0))),
|
|
||||||
Instr::from(InstrKind::SetLocal(0)), // x = 42
|
|
||||||
Instr::from(InstrKind::GetLocal(0)),
|
|
||||||
Instr::from(InstrKind::Pop),
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
}],
|
|
||||||
local_types: HashMap::new(),
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
}],
|
|
||||||
}],
|
|
||||||
field_offsets: HashMap::new(),
|
|
||||||
field_types: HashMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let vm_module = lower_program(&program).expect("Lowering failed");
|
|
||||||
let func = &vm_module.functions[0];
|
|
||||||
|
|
||||||
for instr in &func.body {
|
|
||||||
match &instr.kind {
|
|
||||||
VmInstrKind::GateRetain | VmInstrKind::GateRelease => {
|
|
||||||
panic!("Non-gate program should not contain RC instructions: {:?}", instr);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_no_implicit_offsets_in_vm_ir() {
|
|
||||||
// This test ensures that GateLoad and GateStore in VM IR always have explicit offsets.
|
|
||||||
// Since we are using struct variants with mandatory 'offset' field, this is
|
|
||||||
// enforced by the type system, but we can also check the serialized form.
|
|
||||||
let instructions = vec![
|
|
||||||
VmInstrKind::GateLoad { offset: 123 },
|
|
||||||
VmInstrKind::GateStore { offset: 456 },
|
|
||||||
];
|
|
||||||
let json = serde_json::to_string(&instructions).unwrap();
|
|
||||||
assert!(json.contains("\"GateLoad\":{\"offset\":123}"));
|
|
||||||
assert!(json.contains("\"GateStore\":{\"offset\":456}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
pub mod core_to_vm;
|
|
||||||
|
|
||||||
pub use core_to_vm::lower_program;
|
|
||||||
@ -1,404 +0,0 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::BTreeMap;
|
|
||||||
use std::fs;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
#[serde(rename_all = "lowercase")]
|
|
||||||
pub enum ManifestKind {
|
|
||||||
App,
|
|
||||||
Lib,
|
|
||||||
System,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for ManifestKind {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::App
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type Alias = String;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
#[serde(untagged)]
|
|
||||||
pub enum DependencySpec {
|
|
||||||
Path(String),
|
|
||||||
Full(FullDependencySpec),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub struct FullDependencySpec {
|
|
||||||
pub path: Option<String>,
|
|
||||||
pub git: Option<String>,
|
|
||||||
pub version: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
|
||||||
pub struct Manifest {
|
|
||||||
pub name: String,
|
|
||||||
pub version: String,
|
|
||||||
#[serde(default)]
|
|
||||||
pub kind: ManifestKind,
|
|
||||||
#[serde(default)]
|
|
||||||
pub dependencies: BTreeMap<Alias, DependencySpec>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum ManifestError {
|
|
||||||
Io(std::io::Error),
|
|
||||||
Json {
|
|
||||||
path: PathBuf,
|
|
||||||
error: serde_json::Error,
|
|
||||||
},
|
|
||||||
Validation {
|
|
||||||
path: PathBuf,
|
|
||||||
message: String,
|
|
||||||
pointer: Option<String>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for ManifestError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
ManifestError::Io(e) => write!(f, "IO error: {}", e),
|
|
||||||
ManifestError::Json { path, error } => {
|
|
||||||
write!(f, "JSON error in {}: {}", path.display(), error)
|
|
||||||
}
|
|
||||||
ManifestError::Validation { path, message, pointer } => {
|
|
||||||
write!(f, "Validation error in {}: {}", path.display(), message)?;
|
|
||||||
if let Some(p) = pointer {
|
|
||||||
write!(f, " (at {})", p)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for ManifestError {}
|
|
||||||
|
|
||||||
pub fn load_manifest(project_root: &Path) -> Result<Manifest, ManifestError> {
|
|
||||||
let manifest_path = project_root.join("prometeu.json");
|
|
||||||
let content = fs::read_to_string(&manifest_path).map_err(ManifestError::Io)?;
|
|
||||||
let manifest: Manifest = serde_json::from_str(&content).map_err(|e| ManifestError::Json {
|
|
||||||
path: manifest_path.clone(),
|
|
||||||
error: e,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
validate_manifest(&manifest, &manifest_path)?;
|
|
||||||
|
|
||||||
Ok(manifest)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn validate_manifest(manifest: &Manifest, path: &Path) -> Result<(), ManifestError> {
|
|
||||||
// Validate name
|
|
||||||
if manifest.name.trim().is_empty() {
|
|
||||||
return Err(ManifestError::Validation {
|
|
||||||
path: path.to_path_buf(),
|
|
||||||
message: "Project name cannot be empty".into(),
|
|
||||||
pointer: Some("/name".into()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if manifest.name.chars().any(|c| c.is_whitespace()) {
|
|
||||||
return Err(ManifestError::Validation {
|
|
||||||
path: path.to_path_buf(),
|
|
||||||
message: "Project name cannot contain whitespace".into(),
|
|
||||||
pointer: Some("/name".into()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate version (basic check, could be more thorough if we want to enforce semver now)
|
|
||||||
if manifest.version.trim().is_empty() {
|
|
||||||
return Err(ManifestError::Validation {
|
|
||||||
path: path.to_path_buf(),
|
|
||||||
message: "Project version cannot be empty".into(),
|
|
||||||
pointer: Some("/version".into()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate dependencies
|
|
||||||
for (alias, spec) in &manifest.dependencies {
|
|
||||||
if alias.trim().is_empty() {
|
|
||||||
return Err(ManifestError::Validation {
|
|
||||||
path: path.to_path_buf(),
|
|
||||||
message: "Dependency alias cannot be empty".into(),
|
|
||||||
pointer: Some("/dependencies".into()), // Best effort pointer
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if alias.chars().any(|c| c.is_whitespace()) {
|
|
||||||
return Err(ManifestError::Validation {
|
|
||||||
path: path.to_path_buf(),
|
|
||||||
message: format!("Dependency alias '{}' cannot contain whitespace", alias),
|
|
||||||
pointer: Some(format!("/dependencies/{}", alias)),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
match spec {
|
|
||||||
DependencySpec::Path(p) => {
|
|
||||||
if p.trim().is_empty() {
|
|
||||||
return Err(ManifestError::Validation {
|
|
||||||
path: path.to_path_buf(),
|
|
||||||
message: format!("Path for dependency '{}' cannot be empty", alias),
|
|
||||||
pointer: Some(format!("/dependencies/{}", alias)),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
DependencySpec::Full(full) => {
|
|
||||||
match (full.path.as_ref(), full.git.as_ref()) {
|
|
||||||
(Some(_), Some(_)) => {
|
|
||||||
return Err(ManifestError::Validation {
|
|
||||||
path: path.to_path_buf(),
|
|
||||||
message: format!("Dependency '{}' must specify exactly one source (path or git), but both were found", alias),
|
|
||||||
pointer: Some(format!("/dependencies/{}", alias)),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
(None, None) => {
|
|
||||||
return Err(ManifestError::Validation {
|
|
||||||
path: path.to_path_buf(),
|
|
||||||
message: format!("Dependency '{}' must specify exactly one source (path or git), but none were found", alias),
|
|
||||||
pointer: Some(format!("/dependencies/{}", alias)),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
(Some(p), None) => {
|
|
||||||
if p.trim().is_empty() {
|
|
||||||
return Err(ManifestError::Validation {
|
|
||||||
path: path.to_path_buf(),
|
|
||||||
message: format!("Path for dependency '{}' cannot be empty", alias),
|
|
||||||
pointer: Some(format!("/dependencies/{}", alias)),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(None, Some(g)) => {
|
|
||||||
if g.trim().is_empty() {
|
|
||||||
return Err(ManifestError::Validation {
|
|
||||||
path: path.to_path_buf(),
|
|
||||||
message: format!("Git URL for dependency '{}' cannot be empty", alias),
|
|
||||||
pointer: Some(format!("/dependencies/{}", alias)),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use std::fs;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_minimal_manifest() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let manifest_path = dir.path().join("prometeu.json");
|
|
||||||
fs::write(
|
|
||||||
&manifest_path,
|
|
||||||
r#"{
|
|
||||||
"name": "my_project",
|
|
||||||
"version": "0.1.0"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let manifest = load_manifest(dir.path()).unwrap();
|
|
||||||
assert_eq!(manifest.name, "my_project");
|
|
||||||
assert_eq!(manifest.version, "0.1.0");
|
|
||||||
assert_eq!(manifest.kind, ManifestKind::App);
|
|
||||||
assert!(manifest.dependencies.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_full_manifest() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let manifest_path = dir.path().join("prometeu.json");
|
|
||||||
fs::write(
|
|
||||||
&manifest_path,
|
|
||||||
r#"{
|
|
||||||
"name": "full_project",
|
|
||||||
"version": "1.2.3",
|
|
||||||
"kind": "lib",
|
|
||||||
"dependencies": {
|
|
||||||
"std": "../std",
|
|
||||||
"core": {
|
|
||||||
"git": "https://github.com/prometeu/core",
|
|
||||||
"version": "v1.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let manifest = load_manifest(dir.path()).unwrap();
|
|
||||||
assert_eq!(manifest.name, "full_project");
|
|
||||||
assert_eq!(manifest.version, "1.2.3");
|
|
||||||
assert_eq!(manifest.kind, ManifestKind::Lib);
|
|
||||||
assert_eq!(manifest.dependencies.len(), 2);
|
|
||||||
|
|
||||||
match manifest.dependencies.get("std").unwrap() {
|
|
||||||
DependencySpec::Path(p) => assert_eq!(p, "../std"),
|
|
||||||
_ => panic!("Expected path dependency"),
|
|
||||||
}
|
|
||||||
|
|
||||||
match manifest.dependencies.get("core").unwrap() {
|
|
||||||
DependencySpec::Full(full) => {
|
|
||||||
assert_eq!(full.git.as_ref().unwrap(), "https://github.com/prometeu/core");
|
|
||||||
assert_eq!(full.version.as_ref().unwrap(), "v1.0");
|
|
||||||
assert!(full.path.is_none());
|
|
||||||
}
|
|
||||||
_ => panic!("Expected full dependency"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_missing_name_error() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let manifest_path = dir.path().join("prometeu.json");
|
|
||||||
fs::write(
|
|
||||||
&manifest_path,
|
|
||||||
r#"{
|
|
||||||
"version": "0.1.0"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let result = load_manifest(dir.path());
|
|
||||||
match result {
|
|
||||||
Err(ManifestError::Json { .. }) => {}
|
|
||||||
_ => panic!("Expected JSON error due to missing name, got {:?}", result),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_name_error() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let manifest_path = dir.path().join("prometeu.json");
|
|
||||||
fs::write(
|
|
||||||
&manifest_path,
|
|
||||||
r#"{
|
|
||||||
"name": "my project",
|
|
||||||
"version": "0.1.0"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let result = load_manifest(dir.path());
|
|
||||||
match result {
|
|
||||||
Err(ManifestError::Validation { message, pointer, .. }) => {
|
|
||||||
assert!(message.contains("whitespace"));
|
|
||||||
assert_eq!(pointer.unwrap(), "/name");
|
|
||||||
}
|
|
||||||
_ => panic!("Expected validation error due to invalid name, got {:?}", result),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_dependency_shape_both_sources() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let manifest_path = dir.path().join("prometeu.json");
|
|
||||||
fs::write(
|
|
||||||
&manifest_path,
|
|
||||||
r#"{
|
|
||||||
"name": "test",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"dependencies": {
|
|
||||||
"bad": {
|
|
||||||
"path": "./here",
|
|
||||||
"git": "https://there"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let result = load_manifest(dir.path());
|
|
||||||
match result {
|
|
||||||
Err(ManifestError::Validation { message, pointer, .. }) => {
|
|
||||||
assert!(message.contains("exactly one source"));
|
|
||||||
assert_eq!(pointer.unwrap(), "/dependencies/bad");
|
|
||||||
}
|
|
||||||
_ => panic!("Expected validation error due to both sources, got {:?}", result),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_dependency_shape_no_source() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let manifest_path = dir.path().join("prometeu.json");
|
|
||||||
fs::write(
|
|
||||||
&manifest_path,
|
|
||||||
r#"{
|
|
||||||
"name": "test",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"dependencies": {
|
|
||||||
"bad": {
|
|
||||||
"version": "1.0.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let result = load_manifest(dir.path());
|
|
||||||
match result {
|
|
||||||
Err(ManifestError::Validation { message, pointer, .. }) => {
|
|
||||||
assert!(message.contains("exactly one source"));
|
|
||||||
assert_eq!(pointer.unwrap(), "/dependencies/bad");
|
|
||||||
}
|
|
||||||
_ => panic!("Expected validation error due to no source, got {:?}", result),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_dependency_empty_path() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let manifest_path = dir.path().join("prometeu.json");
|
|
||||||
fs::write(
|
|
||||||
&manifest_path,
|
|
||||||
r#"{
|
|
||||||
"name": "test",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"dependencies": {
|
|
||||||
"empty": ""
|
|
||||||
}
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let result = load_manifest(dir.path());
|
|
||||||
match result {
|
|
||||||
Err(ManifestError::Validation { message, .. }) => {
|
|
||||||
assert!(message.contains("cannot be empty"));
|
|
||||||
}
|
|
||||||
_ => panic!("Expected validation error due to empty path, got {:?}", result),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_dependency_alias_whitespace() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let manifest_path = dir.path().join("prometeu.json");
|
|
||||||
fs::write(
|
|
||||||
&manifest_path,
|
|
||||||
r#"{
|
|
||||||
"name": "test",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"dependencies": {
|
|
||||||
"bad alias": "../std"
|
|
||||||
}
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let result = load_manifest(dir.path());
|
|
||||||
match result {
|
|
||||||
Err(ManifestError::Validation { message, .. }) => {
|
|
||||||
assert!(message.contains("whitespace"));
|
|
||||||
}
|
|
||||||
_ => panic!("Expected validation error due to whitespace in alias, got {:?}", result),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,44 +0,0 @@
|
|||||||
use crate::frontends::pbs::symbols::{SymbolKind, Visibility};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
|
|
||||||
pub enum ExportSurfaceKind {
|
|
||||||
Service,
|
|
||||||
DeclareType, // struct, storage struct, type alias
|
|
||||||
Function, // funções públicas (ex.: métodos de service expostos pelo SDK)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExportSurfaceKind {
|
|
||||||
pub fn from_symbol_kind(kind: SymbolKind) -> Option<Self> {
|
|
||||||
match kind {
|
|
||||||
SymbolKind::Service => Some(ExportSurfaceKind::Service),
|
|
||||||
SymbolKind::Struct | SymbolKind::Contract | SymbolKind::ErrorType => {
|
|
||||||
Some(ExportSurfaceKind::DeclareType)
|
|
||||||
}
|
|
||||||
// Em v0, permitimos exportar funções públicas — usado sobretudo para métodos de `service`
|
|
||||||
SymbolKind::Function => Some(ExportSurfaceKind::Function),
|
|
||||||
SymbolKind::Local => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn validate_visibility(kind: SymbolKind, vis: Visibility) -> Result<(), String> {
|
|
||||||
if vis == Visibility::Pub {
|
|
||||||
if Self::from_symbol_kind(kind).is_none() {
|
|
||||||
let kind_str = match kind {
|
|
||||||
SymbolKind::Function => "Functions",
|
|
||||||
_ => "This declaration",
|
|
||||||
};
|
|
||||||
return Err(format!("{} are not exportable in this version.", kind_str));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn namespace(&self) -> crate::frontends::pbs::symbols::Namespace {
|
|
||||||
match self {
|
|
||||||
ExportSurfaceKind::Service => crate::frontends::pbs::symbols::Namespace::Type,
|
|
||||||
ExportSurfaceKind::DeclareType => crate::frontends::pbs::symbols::Namespace::Type,
|
|
||||||
ExportSurfaceKind::Function => crate::frontends::pbs::symbols::Namespace::Value,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1 +0,0 @@
|
|||||||
pub mod export_surface;
|
|
||||||
@ -1,216 +0,0 @@
|
|||||||
use crate::common::diagnostics::DiagnosticBundle;
|
|
||||||
use crate::manifest::{load_manifest, ManifestKind};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::fs;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
pub struct ProjectSources {
|
|
||||||
pub main: Option<PathBuf>,
|
|
||||||
pub files: Vec<PathBuf>,
|
|
||||||
pub test_files: Vec<PathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum SourceError {
|
|
||||||
Io(std::io::Error),
|
|
||||||
Manifest(crate::manifest::ManifestError),
|
|
||||||
MissingMain(PathBuf),
|
|
||||||
Diagnostics(DiagnosticBundle),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for SourceError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
SourceError::Io(e) => write!(f, "IO error: {}", e),
|
|
||||||
SourceError::Manifest(e) => write!(f, "Manifest error: {}", e),
|
|
||||||
SourceError::MissingMain(path) => write!(f, "Missing entry point: {}", path.display()),
|
|
||||||
SourceError::Diagnostics(d) => write!(f, "Source diagnostics: {:?}", d),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::error::Error for SourceError {}
|
|
||||||
|
|
||||||
impl From<std::io::Error> for SourceError {
|
|
||||||
fn from(e: std::io::Error) -> Self {
|
|
||||||
SourceError::Io(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<crate::manifest::ManifestError> for SourceError {
|
|
||||||
fn from(e: crate::manifest::ManifestError) -> Self {
|
|
||||||
SourceError::Manifest(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<DiagnosticBundle> for SourceError {
|
|
||||||
fn from(d: DiagnosticBundle) -> Self {
|
|
||||||
SourceError::Diagnostics(d)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// NOTE: Export surface discovery is a Frontend responsibility now.
|
|
||||||
// This module is intentionally discovery-only (file listing + main rules).
|
|
||||||
|
|
||||||
pub fn discover(project_dir: &Path) -> Result<ProjectSources, SourceError> {
|
|
||||||
let project_dir = project_dir.canonicalize()?;
|
|
||||||
let manifest = load_manifest(&project_dir)?;
|
|
||||||
|
|
||||||
let main_modules_dir = project_dir.join("src/main/modules");
|
|
||||||
let test_modules_dir = project_dir.join("src/test/modules");
|
|
||||||
|
|
||||||
let mut production_files = Vec::new();
|
|
||||||
if main_modules_dir.exists() && main_modules_dir.is_dir() {
|
|
||||||
discover_recursive(&main_modules_dir, &mut production_files)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut test_files = Vec::new();
|
|
||||||
if test_modules_dir.exists() && test_modules_dir.is_dir() {
|
|
||||||
discover_recursive(&test_modules_dir, &mut test_files)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort files for determinism
|
|
||||||
production_files.sort();
|
|
||||||
test_files.sort();
|
|
||||||
|
|
||||||
// Recommended main: src/main/modules/main.pbs
|
|
||||||
let main_path = main_modules_dir.join("main.pbs");
|
|
||||||
let has_main = production_files.iter().any(|p| p == &main_path);
|
|
||||||
|
|
||||||
let main = if has_main { Some(main_path) } else { None };
|
|
||||||
|
|
||||||
if manifest.kind == ManifestKind::App && main.is_none() {
|
|
||||||
return Err(SourceError::MissingMain(main_modules_dir.join("main.pbs")));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(ProjectSources {
|
|
||||||
main,
|
|
||||||
files: production_files,
|
|
||||||
test_files,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn discover_recursive(dir: &Path, files: &mut Vec<PathBuf>) -> std::io::Result<()> {
|
|
||||||
for entry in fs::read_dir(dir)? {
|
|
||||||
let entry = entry?;
|
|
||||||
let path = entry.path();
|
|
||||||
if path.is_dir() {
|
|
||||||
discover_recursive(&path, files)?;
|
|
||||||
} else if path.extension().map_or(false, |ext| ext == "pbs") {
|
|
||||||
files.push(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// build_exports removed: export collection belongs to Frontend (frontend-api contract).
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use std::fs;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_discover_app_with_main() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().canonicalize().unwrap();
|
|
||||||
|
|
||||||
fs::write(
|
|
||||||
project_dir.join("prometeu.json"),
|
|
||||||
r#"{
|
|
||||||
"name": "app",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "app"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
let main_pbs = project_dir.join("src/main/modules/main.pbs");
|
|
||||||
fs::write(&main_pbs, "").unwrap();
|
|
||||||
|
|
||||||
let other_pbs = project_dir.join("src/main/modules/other.pbs");
|
|
||||||
fs::write(&other_pbs, "").unwrap();
|
|
||||||
|
|
||||||
let sources = discover(&project_dir).unwrap();
|
|
||||||
assert_eq!(sources.main, Some(main_pbs));
|
|
||||||
assert_eq!(sources.files.len(), 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_discover_app_missing_main() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().canonicalize().unwrap();
|
|
||||||
|
|
||||||
fs::write(
|
|
||||||
project_dir.join("prometeu.json"),
|
|
||||||
r#"{
|
|
||||||
"name": "app",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "app"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
fs::write(project_dir.join("src/main/modules/not_main.pbs"), "").unwrap();
|
|
||||||
|
|
||||||
let result = discover(&project_dir);
|
|
||||||
assert!(matches!(result, Err(SourceError::MissingMain(_))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_discover_lib_without_main() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().canonicalize().unwrap();
|
|
||||||
|
|
||||||
fs::write(
|
|
||||||
project_dir.join("prometeu.json"),
|
|
||||||
r#"{
|
|
||||||
"name": "lib",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
let lib_pbs = project_dir.join("src/main/modules/lib.pbs");
|
|
||||||
fs::write(&lib_pbs, "").unwrap();
|
|
||||||
|
|
||||||
let sources = discover(&project_dir).unwrap();
|
|
||||||
assert_eq!(sources.main, None);
|
|
||||||
assert_eq!(sources.files, vec![lib_pbs]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_discover_recursive() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().canonicalize().unwrap();
|
|
||||||
|
|
||||||
fs::write(
|
|
||||||
project_dir.join("prometeu.json"),
|
|
||||||
r#"{
|
|
||||||
"name": "lib",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"kind": "lib"
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules/utils")).unwrap();
|
|
||||||
let main_pbs = project_dir.join("src/main/modules/main.pbs");
|
|
||||||
let util_pbs = project_dir.join("src/main/modules/utils/util.pbs");
|
|
||||||
fs::write(&main_pbs, "").unwrap();
|
|
||||||
fs::write(&util_pbs, "").unwrap();
|
|
||||||
|
|
||||||
let sources = discover(&project_dir).unwrap();
|
|
||||||
assert_eq!(sources.files.len(), 2);
|
|
||||||
assert!(sources.files.contains(&main_pbs));
|
|
||||||
assert!(sources.files.contains(&util_pbs));
|
|
||||||
}
|
|
||||||
|
|
||||||
// No export-surface tests here; handled by Frontend implementations.
|
|
||||||
}
|
|
||||||
@ -1,46 +0,0 @@
|
|||||||
use std::fs;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
fn collect_rs_files(dir: &Path, out: &mut Vec<PathBuf>) {
|
|
||||||
if let Ok(entries) = fs::read_dir(dir) {
|
|
||||||
for e in entries.flatten() {
|
|
||||||
let path = e.path();
|
|
||||||
if path.is_dir() {
|
|
||||||
collect_rs_files(&path, out);
|
|
||||||
} else if path.extension().and_then(|s| s.to_str()) == Some("rs") {
|
|
||||||
out.push(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn backend_must_not_import_pbs() {
|
|
||||||
let crate_root = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
|
||||||
let backend_dir = crate_root.join("src").join("backend");
|
|
||||||
if !backend_dir.exists() { return; }
|
|
||||||
|
|
||||||
let mut files = Vec::new();
|
|
||||||
collect_rs_files(&backend_dir, &mut files);
|
|
||||||
|
|
||||||
let mut offenders = Vec::new();
|
|
||||||
for f in files {
|
|
||||||
if let Ok(src) = fs::read_to_string(&f) {
|
|
||||||
if src.contains("frontends::pbs") || src.contains("crate::frontends::pbs") {
|
|
||||||
offenders.push(f);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !offenders.is_empty() {
|
|
||||||
let list = offenders
|
|
||||||
.iter()
|
|
||||||
.map(|p| p.strip_prefix(&crate_root).unwrap_or(p).display().to_string())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n - ");
|
|
||||||
panic!(
|
|
||||||
"Backend must not import PBS modules (frontends::pbs). Offending files:\n - {}",
|
|
||||||
list
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,26 +0,0 @@
|
|||||||
use prometeu_analysis::{ids::FileId, span::Span};
|
|
||||||
use prometeu_compiler::common::diagnostics::{Diagnostic, DiagnosticBundle, Severity};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn diagnostic_span_is_valid_for_file() {
|
|
||||||
// Fixture simples
|
|
||||||
let text = "let x = 10;"; // len = 11
|
|
||||||
let file = FileId(1);
|
|
||||||
|
|
||||||
// Cria um diagnóstico com span válido: end exclusivo e <= len
|
|
||||||
let span = Span::new(file, 0, text.len() as u32);
|
|
||||||
assert!(span.end >= span.start);
|
|
||||||
assert!(span.end <= text.len() as u32);
|
|
||||||
|
|
||||||
let diag = Diagnostic {
|
|
||||||
severity: Severity::Error,
|
|
||||||
code: "E_TEST".to_string(),
|
|
||||||
message: "testing".to_string(),
|
|
||||||
span,
|
|
||||||
related: vec![],
|
|
||||||
};
|
|
||||||
let bundle = DiagnosticBundle { diagnostics: vec![diag] };
|
|
||||||
|
|
||||||
// Serialize para garantir que o span passa pelo pipeline sem panics
|
|
||||||
let _json = serde_json::to_string(&bundle).expect("must serialize diagnostics");
|
|
||||||
}
|
|
||||||
@ -1,292 +0,0 @@
|
|||||||
use prometeu_compiler::building::output::CompiledModule;
|
|
||||||
use prometeu_compiler::building::output::{compile_project, CompileError, ExportKey, ExportMetadata};
|
|
||||||
use prometeu_compiler::building::plan::{BuildStep, BuildTarget};
|
|
||||||
use prometeu_compiler::common::files::FileManager;
|
|
||||||
use prometeu_compiler::deps::resolver::ProjectKey;
|
|
||||||
use language_api::types::{ExportItem, ItemName};
|
|
||||||
use prometeu_compiler::frontends::pbs::adapter::PbsFrontendAdapter;
|
|
||||||
use prometeu_analysis::ids::ProjectId;
|
|
||||||
use std::collections::{BTreeMap, HashMap};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
|
|
||||||
use std::fs;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_local_vs_dependency_conflict() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().to_path_buf();
|
|
||||||
|
|
||||||
// Dependency: sdk
|
|
||||||
let dep_key = ProjectKey { name: "sdk-impl".to_string(), version: "1.0.0".to_string() };
|
|
||||||
let dep_id = ProjectId(0);
|
|
||||||
let mut dep_exports = BTreeMap::new();
|
|
||||||
dep_exports.insert(ExportKey {
|
|
||||||
module_path: "math".to_string(), // normalized path
|
|
||||||
item: ExportItem::Type { name: ItemName::new("Vector").unwrap() },
|
|
||||||
}, ExportMetadata {
|
|
||||||
func_idx: None,
|
|
||||||
is_host: false,
|
|
||||||
ty: None,
|
|
||||||
});
|
|
||||||
|
|
||||||
let dep_module = CompiledModule {
|
|
||||||
project_id: dep_id,
|
|
||||||
project_key: dep_key.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
exports: dep_exports,
|
|
||||||
imports: vec![],
|
|
||||||
const_pool: vec![],
|
|
||||||
code: vec![],
|
|
||||||
function_metas: vec![],
|
|
||||||
debug_info: None,
|
|
||||||
symbols: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut dep_modules: HashMap<ProjectId, CompiledModule> = HashMap::new();
|
|
||||||
dep_modules.insert(dep_id, dep_module);
|
|
||||||
|
|
||||||
// Main project has a LOCAL module named "sdk/math"
|
|
||||||
// By creating a file in src/main/modules/sdk/math/, the module path becomes "sdk/math"
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules/sdk/math")).unwrap();
|
|
||||||
fs::write(project_dir.join("src/main/modules/sdk/math/local.pbs"), "pub declare struct Vector(x: int)").unwrap();
|
|
||||||
|
|
||||||
let main_key = ProjectKey { name: "main".to_string(), version: "0.1.0".to_string() };
|
|
||||||
let main_id = ProjectId(1);
|
|
||||||
let mut deps: BTreeMap<String, ProjectId> = BTreeMap::new();
|
|
||||||
deps.insert("sdk".to_string(), ProjectId(0));
|
|
||||||
|
|
||||||
let step = BuildStep {
|
|
||||||
project_id: main_id,
|
|
||||||
project_key: main_key,
|
|
||||||
project_dir,
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
sources: vec![PathBuf::from("src/main/modules/sdk/math/local.pbs")],
|
|
||||||
deps,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut file_manager = FileManager::new();
|
|
||||||
let fe = PbsFrontendAdapter;
|
|
||||||
let result = compile_project(step, &dep_modules, &fe, &mut file_manager);
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Err(CompileError::DuplicateExport { symbol, .. }) => {
|
|
||||||
assert_eq!(symbol, "Vector");
|
|
||||||
},
|
|
||||||
_ => panic!("Expected DuplicateExport error, got {:?}", result),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_aliased_dependency_conflict() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().to_path_buf();
|
|
||||||
|
|
||||||
// Dependency 1: exports "b/c:Vector"
|
|
||||||
let dep1_key = ProjectKey { name: "p1".to_string(), version: "1.0.0".to_string() };
|
|
||||||
let dep1_id = ProjectId(0);
|
|
||||||
let mut dep1_exports = BTreeMap::new();
|
|
||||||
dep1_exports.insert(ExportKey {
|
|
||||||
module_path: "b/c".to_string(),
|
|
||||||
item: ExportItem::Type { name: ItemName::new("Vector").unwrap() },
|
|
||||||
}, ExportMetadata {
|
|
||||||
func_idx: None,
|
|
||||||
is_host: false,
|
|
||||||
ty: None,
|
|
||||||
});
|
|
||||||
let dep1_module = CompiledModule {
|
|
||||||
project_id: dep1_id,
|
|
||||||
project_key: dep1_key.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
exports: dep1_exports,
|
|
||||||
imports: vec![],
|
|
||||||
const_pool: vec![],
|
|
||||||
code: vec![],
|
|
||||||
function_metas: vec![],
|
|
||||||
debug_info: None,
|
|
||||||
symbols: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
// Dependency 2: exports "c:Vector"
|
|
||||||
let dep2_key = ProjectKey { name: "p2".to_string(), version: "1.0.0".to_string() };
|
|
||||||
let dep2_id = ProjectId(1);
|
|
||||||
let mut dep2_exports = BTreeMap::new();
|
|
||||||
dep2_exports.insert(ExportKey {
|
|
||||||
module_path: "c".to_string(),
|
|
||||||
item: ExportItem::Type { name: ItemName::new("Vector").unwrap() },
|
|
||||||
}, ExportMetadata {
|
|
||||||
func_idx: None,
|
|
||||||
is_host: false,
|
|
||||||
ty: None,
|
|
||||||
});
|
|
||||||
let dep2_module = CompiledModule {
|
|
||||||
project_id: dep2_id,
|
|
||||||
project_key: dep2_key.clone(),
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
exports: dep2_exports,
|
|
||||||
imports: vec![],
|
|
||||||
const_pool: vec![],
|
|
||||||
code: vec![],
|
|
||||||
function_metas: vec![],
|
|
||||||
debug_info: None,
|
|
||||||
symbols: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut dep_modules: HashMap<ProjectId, CompiledModule> = HashMap::new();
|
|
||||||
dep_modules.insert(dep1_id, dep1_module);
|
|
||||||
dep_modules.insert(dep2_id, dep2_module);
|
|
||||||
|
|
||||||
let main_key = ProjectKey { name: "main".to_string(), version: "0.1.0".to_string() };
|
|
||||||
let main_id = ProjectId(2);
|
|
||||||
let mut deps: BTreeMap<String, ProjectId> = BTreeMap::new();
|
|
||||||
deps.insert("a".to_string(), ProjectId(0));
|
|
||||||
deps.insert("a/b".to_string(), ProjectId(1));
|
|
||||||
|
|
||||||
let step = BuildStep {
|
|
||||||
project_id: main_id,
|
|
||||||
project_key: main_key,
|
|
||||||
project_dir,
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
sources: vec![],
|
|
||||||
deps,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut file_manager = FileManager::new();
|
|
||||||
let fe = PbsFrontendAdapter;
|
|
||||||
let result = compile_project(step, &dep_modules, &fe, &mut file_manager);
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Err(CompileError::DuplicateExport { symbol, .. }) => {
|
|
||||||
assert_eq!(symbol, "Vector");
|
|
||||||
},
|
|
||||||
_ => panic!("Expected DuplicateExport error, got {:?}", result),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_mixed_main_test_modules() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().to_path_buf();
|
|
||||||
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules/math")).unwrap();
|
|
||||||
fs::write(project_dir.join("src/main/modules/math/Vector.pbs"), "pub declare struct Vector(x: int)").unwrap();
|
|
||||||
|
|
||||||
fs::create_dir_all(project_dir.join("src/test/modules/foo")).unwrap();
|
|
||||||
fs::write(project_dir.join("src/test/modules/foo/Test.pbs"), "pub declare struct Test(x: int)").unwrap();
|
|
||||||
|
|
||||||
let project_key = ProjectKey { name: "mixed".to_string(), version: "0.1.0".to_string() };
|
|
||||||
let project_id = ProjectId(0);
|
|
||||||
let step = BuildStep {
|
|
||||||
project_id,
|
|
||||||
project_key,
|
|
||||||
project_dir,
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
sources: vec![
|
|
||||||
PathBuf::from("src/main/modules/math/Vector.pbs"),
|
|
||||||
PathBuf::from("src/test/modules/foo/Test.pbs"),
|
|
||||||
],
|
|
||||||
deps: BTreeMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut file_manager = FileManager::new();
|
|
||||||
let fe = PbsFrontendAdapter;
|
|
||||||
let compiled = compile_project(step, &HashMap::new(), &fe, &mut file_manager).unwrap();
|
|
||||||
|
|
||||||
// Both should be in exports with normalized paths
|
|
||||||
assert!(compiled.exports.keys().any(|k| k.module_path == "math"));
|
|
||||||
assert!(compiled.exports.keys().any(|k| k.module_path == "foo"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_module_merging_same_directory() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().to_path_buf();
|
|
||||||
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules/gfx")).unwrap();
|
|
||||||
fs::write(project_dir.join("src/main/modules/gfx/api.pbs"), "pub declare struct Gfx(id: int)").unwrap();
|
|
||||||
fs::write(project_dir.join("src/main/modules/gfx/colors.pbs"), "pub declare struct Color(r: int)").unwrap();
|
|
||||||
|
|
||||||
let project_key = ProjectKey { name: "merge".to_string(), version: "0.1.0".to_string() };
|
|
||||||
let project_id = ProjectId(0);
|
|
||||||
let step = BuildStep {
|
|
||||||
project_id,
|
|
||||||
project_key,
|
|
||||||
project_dir,
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
sources: vec![
|
|
||||||
PathBuf::from("src/main/modules/gfx/api.pbs"),
|
|
||||||
PathBuf::from("src/main/modules/gfx/colors.pbs"),
|
|
||||||
],
|
|
||||||
deps: BTreeMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut file_manager = FileManager::new();
|
|
||||||
let fe = PbsFrontendAdapter;
|
|
||||||
let compiled = compile_project(step, &HashMap::new(), &fe, &mut file_manager).unwrap();
|
|
||||||
|
|
||||||
// Both should be in the same module "gfx"
|
|
||||||
assert!(compiled.exports.keys().any(|k| k.module_path == "gfx" && matches!(&k.item, ExportItem::Type { name } if name.as_str() == "Gfx")));
|
|
||||||
assert!(compiled.exports.keys().any(|k| k.module_path == "gfx" && matches!(&k.item, ExportItem::Type { name } if name.as_str() == "Color")));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_duplicate_symbol_in_same_module_different_files() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().to_path_buf();
|
|
||||||
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules/gfx")).unwrap();
|
|
||||||
fs::write(project_dir.join("src/main/modules/gfx/a.pbs"), "pub declare struct Gfx(id: int)").unwrap();
|
|
||||||
fs::write(project_dir.join("src/main/modules/gfx/b.pbs"), "pub declare struct Gfx(id: int)").unwrap();
|
|
||||||
|
|
||||||
let project_key = ProjectKey { name: "dup".to_string(), version: "0.1.0".to_string() };
|
|
||||||
let project_id = ProjectId(0);
|
|
||||||
let step = BuildStep {
|
|
||||||
project_id,
|
|
||||||
project_key,
|
|
||||||
project_dir,
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
sources: vec![
|
|
||||||
PathBuf::from("src/main/modules/gfx/a.pbs"),
|
|
||||||
PathBuf::from("src/main/modules/gfx/b.pbs"),
|
|
||||||
],
|
|
||||||
deps: BTreeMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut file_manager = FileManager::new();
|
|
||||||
let fe = PbsFrontendAdapter;
|
|
||||||
let result = compile_project(step, &HashMap::new(), &fe, &mut file_manager);
|
|
||||||
assert!(result.is_err());
|
|
||||||
// Should be a frontend error (duplicate symbol)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_root_module_merging() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
let project_dir = dir.path().to_path_buf();
|
|
||||||
|
|
||||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
|
||||||
fs::write(project_dir.join("src/main/modules/main.pbs"), "pub declare struct Main(id: int)").unwrap();
|
|
||||||
fs::write(project_dir.join("src/main/modules/utils.pbs"), "pub declare struct Utils(id: int)").unwrap();
|
|
||||||
|
|
||||||
let project_key = ProjectKey { name: "root-merge".to_string(), version: "0.1.0".to_string() };
|
|
||||||
let project_id = ProjectId(0);
|
|
||||||
let step = BuildStep {
|
|
||||||
project_id,
|
|
||||||
project_key,
|
|
||||||
project_dir,
|
|
||||||
target: BuildTarget::Main,
|
|
||||||
sources: vec![
|
|
||||||
PathBuf::from("src/main/modules/main.pbs"),
|
|
||||||
PathBuf::from("src/main/modules/utils.pbs"),
|
|
||||||
],
|
|
||||||
deps: BTreeMap::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut file_manager = FileManager::new();
|
|
||||||
let fe = PbsFrontendAdapter;
|
|
||||||
let compiled = compile_project(step, &HashMap::new(), &fe, &mut file_manager).unwrap();
|
|
||||||
|
|
||||||
// Both should be in the root module ""
|
|
||||||
assert!(compiled.exports.keys().any(|k| k.module_path == "" && matches!(&k.item, ExportItem::Type { name } if name.as_str() == "Main")));
|
|
||||||
assert!(compiled.exports.keys().any(|k| k.module_path == "" && matches!(&k.item, ExportItem::Type { name } if name.as_str() == "Utils")));
|
|
||||||
}
|
|
||||||
@ -1,67 +0,0 @@
|
|||||||
// use prometeu_bytecode::disasm::disasm;
|
|
||||||
// use prometeu_bytecode::BytecodeLoader;
|
|
||||||
// use prometeu_compiler::compiler::compile;
|
|
||||||
// use prometeu_compiler::frontends::pbs::parser::Parser;
|
|
||||||
// use prometeu_compiler::common::spans::FileId;
|
|
||||||
// use prometeu_analysis::NameInterner;
|
|
||||||
// use std::fs;
|
|
||||||
// use std::path::Path;
|
|
||||||
//
|
|
||||||
// #[test]
|
|
||||||
// fn generate_canonical_goldens() {
|
|
||||||
// println!("CWD: {:?}", std::env::current_dir().unwrap());
|
|
||||||
// let project_dir = Path::new("../../test-cartridges/canonical");
|
|
||||||
// if !project_dir.exists() {
|
|
||||||
// // Fallback for when running from project root (some IDEs/environments)
|
|
||||||
// let project_dir = Path::new("../../../../test-cartridges/canonical");
|
|
||||||
// if !project_dir.exists() {
|
|
||||||
// panic!("Could not find project directory at ../../test-cartridges/canonical or test-cartridges/canonical");
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// // We need a stable path for the actual compilation which might use relative paths internally
|
|
||||||
// let project_dir = if Path::new("../../test-cartridges/canonical").exists() {
|
|
||||||
// Path::new("../../test-cartridges/canonical")
|
|
||||||
// } else {
|
|
||||||
// Path::new("../../../../test-cartridges/canonical")
|
|
||||||
// };
|
|
||||||
//
|
|
||||||
// let unit = compile(project_dir).map_err(|e| {
|
|
||||||
// println!("Compilation Error: {}", e);
|
|
||||||
// e
|
|
||||||
// }).expect("Failed to compile canonical cartridge");
|
|
||||||
//
|
|
||||||
// let golden_dir = project_dir.join("golden");
|
|
||||||
// fs::create_dir_all(&golden_dir).unwrap();
|
|
||||||
//
|
|
||||||
// // 1. Bytecode (.pbc)
|
|
||||||
// fs::write(golden_dir.join("program.pbc"), &unit.rom).unwrap();
|
|
||||||
//
|
|
||||||
// // 2. Disassembly
|
|
||||||
// let module = BytecodeLoader::load(&unit.rom).expect("Failed to load BytecodeModule");
|
|
||||||
// let instrs = disasm(&module.code).expect("Failed to disassemble");
|
|
||||||
// let mut disasm_text = String::new();
|
|
||||||
// for instr in instrs {
|
|
||||||
// let operands_str = instr.operands.iter()
|
|
||||||
// .map(|o| format!("{:?}", o))
|
|
||||||
// .collect::<Vec<_>>()
|
|
||||||
// .join(" ");
|
|
||||||
// let line = if operands_str.is_empty() {
|
|
||||||
// format!("{:04X} {:?}\n", instr.pc, instr.opcode)
|
|
||||||
// } else {
|
|
||||||
// format!("{:04X} {:?} {}\n", instr.pc, instr.opcode, operands_str.trim())
|
|
||||||
// };
|
|
||||||
// disasm_text.push_str(&line);
|
|
||||||
// }
|
|
||||||
// fs::write(golden_dir.join("program.disasm.txt"), disasm_text).unwrap();
|
|
||||||
//
|
|
||||||
// // 3. AST JSON
|
|
||||||
// let source = fs::read_to_string(project_dir.join("src/main/modules/main.pbs")).unwrap();
|
|
||||||
// let mut interner = NameInterner::new();
|
|
||||||
// let mut parser = Parser::new(&source, FileId(0), &mut interner);
|
|
||||||
// let parsed = parser.parse_file().expect("Failed to parse AST");
|
|
||||||
// let ast_json = serde_json::to_string_pretty(parsed.arena.kind(parsed.root)).unwrap();
|
|
||||||
// fs::write(golden_dir.join("ast.json"), ast_json).unwrap();
|
|
||||||
//
|
|
||||||
// println!("Golden artifacts generated in test-cartridges/canonical/golden/");
|
|
||||||
// }
|
|
||||||
@ -1,107 +0,0 @@
|
|||||||
use prometeu_compiler::backend::emit_bytecode::emit_module;
|
|
||||||
use prometeu_compiler::ir_core::ids::{ConstId as CoreConstId, FieldId, FunctionId, TypeId as CoreTypeId, ValueId};
|
|
||||||
use prometeu_compiler::ir_core::{self, Block, ConstPool, ConstantValue, Instr, InstrKind as CoreInstrKind, Program, Terminator};
|
|
||||||
use prometeu_compiler::ir_lang::InstrKind;
|
|
||||||
use prometeu_compiler::lowering::lower_program;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_hip_conformance_core_to_vm_to_bytecode() {
|
|
||||||
// 1. Setup Core IR Program
|
|
||||||
let mut const_pool = ConstPool::new();
|
|
||||||
let _val_const = const_pool.insert(ConstantValue::Int(42));
|
|
||||||
|
|
||||||
let type_id = CoreTypeId(10);
|
|
||||||
let field_id = FieldId(1);
|
|
||||||
|
|
||||||
let mut field_offsets = HashMap::new();
|
|
||||||
field_offsets.insert(field_id, 0); // Field at offset 0
|
|
||||||
|
|
||||||
let mut field_types = HashMap::new();
|
|
||||||
field_types.insert(field_id, ir_core::Type::Int);
|
|
||||||
|
|
||||||
let program = Program {
|
|
||||||
const_pool,
|
|
||||||
modules: vec![ir_core::Module {
|
|
||||||
name: "conformance".to_string(),
|
|
||||||
functions: vec![ir_core::Function {
|
|
||||||
id: FunctionId(1),
|
|
||||||
name: "main".to_string(),
|
|
||||||
sig: {
|
|
||||||
let mut i = ir_core::global_signature_interner().lock().unwrap();
|
|
||||||
i.intern(ir_core::Signature { params: vec![], return_type: ir_core::Type::Void })
|
|
||||||
},
|
|
||||||
param_slots: 0,
|
|
||||||
local_slots: 0,
|
|
||||||
return_slots: 0,
|
|
||||||
params: vec![],
|
|
||||||
return_type: ir_core::Type::Void,
|
|
||||||
local_types: HashMap::new(),
|
|
||||||
blocks: vec![Block {
|
|
||||||
id: 0,
|
|
||||||
instrs: vec![
|
|
||||||
// allocates a storage struct
|
|
||||||
Instr::from(CoreInstrKind::Alloc { ty: type_id, slots: 2 }),
|
|
||||||
Instr::from(CoreInstrKind::SetLocal(0)), // x = alloc
|
|
||||||
|
|
||||||
// mutates a field
|
|
||||||
Instr::from(CoreInstrKind::BeginMutate { gate: ValueId(0) }),
|
|
||||||
Instr::from(CoreInstrKind::PushConst(CoreConstId(0))),
|
|
||||||
Instr::from(CoreInstrKind::SetLocal(1)), // v = 42
|
|
||||||
Instr::from(CoreInstrKind::GateStoreField { gate: ValueId(0), field: field_id, value: ValueId(1) }),
|
|
||||||
Instr::from(CoreInstrKind::EndMutate),
|
|
||||||
|
|
||||||
// peeks value
|
|
||||||
Instr::from(CoreInstrKind::BeginPeek { gate: ValueId(0) }),
|
|
||||||
Instr::from(CoreInstrKind::GateLoadField { gate: ValueId(0), field: field_id }),
|
|
||||||
Instr::from(CoreInstrKind::EndPeek),
|
|
||||||
|
|
||||||
Instr::from(CoreInstrKind::Pop), // clean up the peeked value
|
|
||||||
],
|
|
||||||
terminator: Terminator::Return,
|
|
||||||
}],
|
|
||||||
}],
|
|
||||||
}],
|
|
||||||
field_offsets,
|
|
||||||
field_types,
|
|
||||||
};
|
|
||||||
|
|
||||||
// 2. Lower to VM IR
|
|
||||||
let vm_module = lower_program(&program).expect("Lowering failed");
|
|
||||||
let func = &vm_module.functions[0];
|
|
||||||
|
|
||||||
// Assert VM IR contains required instructions
|
|
||||||
let kinds: Vec<_> = func.body.iter().map(|i| &i.kind).collect();
|
|
||||||
|
|
||||||
assert!(kinds.iter().any(|k| matches!(k, InstrKind::Alloc { type_id: tid, slots: 2 } if tid.0 == 10)), "Missing correct Alloc");
|
|
||||||
assert!(kinds.contains(&&InstrKind::GateBeginMutate), "Missing GateBeginMutate");
|
|
||||||
assert!(kinds.contains(&&InstrKind::GateEndMutate), "Missing GateEndMutate");
|
|
||||||
assert!(kinds.iter().any(|k| matches!(k, InstrKind::GateStore { offset: 0 })), "Missing correct GateStore");
|
|
||||||
assert!(kinds.contains(&&InstrKind::GateBeginPeek), "Missing GateBeginPeek");
|
|
||||||
assert!(kinds.contains(&&InstrKind::GateEndPeek), "Missing GateEndPeek");
|
|
||||||
assert!(kinds.iter().any(|k| matches!(k, InstrKind::GateLoad { offset: 0 })), "Missing correct GateLoad");
|
|
||||||
|
|
||||||
// RC ops
|
|
||||||
assert!(kinds.contains(&&InstrKind::GateRetain), "Missing GateRetain");
|
|
||||||
assert!(kinds.contains(&&InstrKind::GateRelease), "Missing GateRelease");
|
|
||||||
|
|
||||||
// 3. Emit Bytecode
|
|
||||||
let emit_result = emit_module(&vm_module).expect("Emission failed");
|
|
||||||
let bytecode = emit_result.rom;
|
|
||||||
|
|
||||||
// 4. Assert industrial PBS\0 format
|
|
||||||
use prometeu_bytecode::BytecodeLoader;
|
|
||||||
let module = BytecodeLoader::load(&bytecode).expect("Failed to parse industrial PBC");
|
|
||||||
assert_eq!(&bytecode[0..4], b"PBS\0");
|
|
||||||
|
|
||||||
// 5. Verify a few key instructions in the code section to ensure ABI stability
|
|
||||||
// We don't do a full byte-for-byte check of the entire file here as the section
|
|
||||||
// table offsets vary, but we check the instruction stream.
|
|
||||||
let instrs = module.code;
|
|
||||||
|
|
||||||
// Alloc { tid: 10, slots: 2 } -> 0x60 0x00, 0x0a 0x00 0x00 0x00, 0x02 0x00 0x00 0x00
|
|
||||||
assert!(instrs.windows(10).any(|w| w == &[0x60, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00]));
|
|
||||||
|
|
||||||
// PushConst 1 (42) -> 0x10 0x00, 0x01 0x00, 0x00, 0x00
|
|
||||||
assert!(instrs.windows(6).any(|w| w == &[0x10, 0x00, 0x01, 0x00, 0x00, 0x00]));
|
|
||||||
}
|
|
||||||
@ -1,83 +0,0 @@
|
|||||||
// use prometeu_compiler::compiler::compile;
|
|
||||||
// use std::path::PathBuf;
|
|
||||||
// use std::sync::Arc;
|
|
||||||
// use prometeu_compiler::ir_vm::Value;
|
|
||||||
// use prometeu_hal::{AssetBridge, AudioBridge, GfxBridge, HardwareBridge, HostContext, HostReturn, NativeInterface, PadBridge, TouchBridge};
|
|
||||||
//
|
|
||||||
// struct SimpleNative;
|
|
||||||
// impl NativeInterface for SimpleNative {
|
|
||||||
// fn syscall(&mut self, _id: u32, _args: &[Value], _ret: &mut HostReturn, _ctx: &mut HostContext) -> Result<(), VmFault> {
|
|
||||||
// Ok(())
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// struct SimpleHardware {
|
|
||||||
// gfx: Gfx,
|
|
||||||
// audio: Audio,
|
|
||||||
// pad: Pad,
|
|
||||||
// touch: Touch,
|
|
||||||
// assets: AssetManager,
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// impl SimpleHardware {
|
|
||||||
// fn new() -> Self {
|
|
||||||
// let banks = Arc::new(MemoryBanks::new());
|
|
||||||
// Self {
|
|
||||||
// gfx: Gfx::new(320, 240, banks.clone()),
|
|
||||||
// audio: Audio::new(banks.clone()),
|
|
||||||
// pad: Pad::default(),
|
|
||||||
// touch: Touch::default(),
|
|
||||||
// assets: AssetManager::new(vec![], vec![], banks.clone(), banks.clone()),
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// impl HardwareBridge for SimpleHardware {
|
|
||||||
// fn gfx(&self) -> &dyn GfxBridge { &self.gfx }
|
|
||||||
// fn gfx_mut(&mut self) -> &mut dyn GfxBridge { &mut self.gfx }
|
|
||||||
// fn audio(&self) -> &dyn AudioBridge { &self.audio }
|
|
||||||
// fn audio_mut(&mut self) -> &mut dyn AudioBridge { &mut self.audio }
|
|
||||||
// fn pad(&self) -> &dyn PadBridge { &self.pad }
|
|
||||||
// fn pad_mut(&mut self) -> &mut dyn PadBridge { &mut self.pad }
|
|
||||||
// fn touch(&self) -> &dyn TouchBridge { &self.touch }
|
|
||||||
// fn touch_mut(&mut self) -> &mut dyn TouchBridge { &mut self.touch }
|
|
||||||
// fn assets(&self) -> &dyn AssetBridge { &self.assets }
|
|
||||||
// fn assets_mut(&mut self) -> &mut dyn AssetBridge { &mut self.assets }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// #[test]
|
|
||||||
// fn test_integration_test01_link() {
|
|
||||||
// let project_dir = PathBuf::from("../../test-cartridges/test01");
|
|
||||||
// // Since the test runs from crates/prometeu-compiler, we need to adjust path if necessary.
|
|
||||||
// // Actually, usually tests run from the workspace root if using cargo test --workspace,
|
|
||||||
// // but if running from the crate dir, it's different.
|
|
||||||
//
|
|
||||||
// // Let's try absolute path or relative to project root.
|
|
||||||
// let mut root_dir = std::env::current_dir().unwrap();
|
|
||||||
// while !root_dir.join("test-cartridges").exists() {
|
|
||||||
// if let Some(parent) = root_dir.parent() {
|
|
||||||
// root_dir = parent.to_path_buf();
|
|
||||||
// } else {
|
|
||||||
// break;
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// let _project_dir = root_dir.join("test-cartridges/test01");
|
|
||||||
//
|
|
||||||
// let unit = compile(&project_dir).expect("Failed to compile and link");
|
|
||||||
//
|
|
||||||
// let mut vm = VirtualMachine::default();
|
|
||||||
// // Use initialize to load the ROM; entrypoint must be numeric or empty (defaults to 0)
|
|
||||||
// vm.initialize(unit.rom, "frame").expect("Failed to initialize VM");
|
|
||||||
//
|
|
||||||
// let mut native = SimpleNative;
|
|
||||||
// let mut hw = SimpleHardware::new();
|
|
||||||
// let mut ctx = HostContext::new(Some(&mut hw));
|
|
||||||
//
|
|
||||||
// // Run for a bit
|
|
||||||
// let report = vm.run_budget(1000, &mut native, &mut ctx).expect("VM execution failed");
|
|
||||||
//
|
|
||||||
// // It should not trap. test01 might loop or return.
|
|
||||||
// if let LogicalFrameEndingReason::Trap(t) = report.reason {
|
|
||||||
// panic!("VM trapped: {:?}", t);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
@ -1,5 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "prometeu-abi"
|
name = "prometeu-core"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
3
crates/compiler/prometeu-core/src/lib.rs
Normal file
3
crates/compiler/prometeu-core/src/lib.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
mod source;
|
||||||
|
|
||||||
|
pub use source::*;
|
||||||
72
crates/compiler/prometeu-core/src/source/diagnostics.rs
Normal file
72
crates/compiler/prometeu-core/src/source/diagnostics.rs
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
use serde::{Serialize, Serializer};
|
||||||
|
use crate::Span;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum Severity {
|
||||||
|
Error,
|
||||||
|
Warning,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for Severity {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
match self {
|
||||||
|
Severity::Error => serializer.serialize_str("error"),
|
||||||
|
Severity::Warning => serializer.serialize_str("warning"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize)]
|
||||||
|
pub struct Diagnostic {
|
||||||
|
pub severity: Severity,
|
||||||
|
pub code: String,
|
||||||
|
pub message: String,
|
||||||
|
pub span: Span,
|
||||||
|
pub related: Vec<(String, Span)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize)]
|
||||||
|
pub struct DiagnosticBundle {
|
||||||
|
pub diagnostics: Vec<Diagnostic>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DiagnosticBundle {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
diagnostics: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn push(&mut self, diagnostic: Diagnostic) {
|
||||||
|
self.diagnostics.push(diagnostic);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn error(code: &str, message: String, span: Span) -> Self {
|
||||||
|
let mut bundle = Self::new();
|
||||||
|
bundle.push(Diagnostic {
|
||||||
|
severity: Severity::Error,
|
||||||
|
code: code.to_string(),
|
||||||
|
message,
|
||||||
|
span,
|
||||||
|
related: Vec::new(),
|
||||||
|
});
|
||||||
|
bundle
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn has_errors(&self) -> bool {
|
||||||
|
self.diagnostics
|
||||||
|
.iter()
|
||||||
|
.any(|d| matches!(d.severity, Severity::Error))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Diagnostic> for DiagnosticBundle {
|
||||||
|
fn from(diagnostic: Diagnostic) -> Self {
|
||||||
|
let mut bundle = Self::new();
|
||||||
|
bundle.push(diagnostic);
|
||||||
|
bundle
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,5 +1,6 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use crate::ids::FileId;
|
use crate::FileId;
|
||||||
|
use crate::LineIndex;
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct FileDB {
|
pub struct FileDB {
|
||||||
@ -13,11 +14,6 @@ struct FileData {
|
|||||||
line_index: LineIndex,
|
line_index: LineIndex,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LineIndex {
|
|
||||||
line_starts: Vec<u32>,
|
|
||||||
total_len: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileDB {
|
impl FileDB {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
@ -70,38 +66,3 @@ impl FileDB {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LineIndex {
|
|
||||||
pub fn new(text: &str) -> Self {
|
|
||||||
let mut line_starts = vec![0];
|
|
||||||
for (offset, c) in text.char_indices() {
|
|
||||||
if c == '\n' {
|
|
||||||
line_starts.push((offset + 1) as u32);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Self {
|
|
||||||
line_starts,
|
|
||||||
total_len: text.len() as u32,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn offset_to_line_col(&self, offset: u32) -> (u32, u32) {
|
|
||||||
let line = match self.line_starts.binary_search(&offset) {
|
|
||||||
Ok(line) => line as u32,
|
|
||||||
Err(line) => (line - 1) as u32,
|
|
||||||
};
|
|
||||||
let col = offset - self.line_starts[line as usize];
|
|
||||||
(line, col)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn line_col_to_offset(&self, line: u32, col: u32) -> Option<u32> {
|
|
||||||
let start = *self.line_starts.get(line as usize)?;
|
|
||||||
let offset = start + col;
|
|
||||||
|
|
||||||
let next_start = self.line_starts.get(line as usize + 1).copied().unwrap_or(self.total_len);
|
|
||||||
if offset < next_start || (offset == next_start && offset == self.total_len) {
|
|
||||||
Some(offset)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,6 +1,3 @@
|
|||||||
//! Canonical ID newtypes used across the Prometeu workspace.
|
|
||||||
//! Keep this crate low-level and independent from higher layers.
|
|
||||||
|
|
||||||
macro_rules! define_id {
|
macro_rules! define_id {
|
||||||
($name:ident) => {
|
($name:ident) => {
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
40
crates/compiler/prometeu-core/src/source/line_index.rs
Normal file
40
crates/compiler/prometeu-core/src/source/line_index.rs
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
pub struct LineIndex {
|
||||||
|
line_starts: Vec<u32>,
|
||||||
|
total_len: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LineIndex {
|
||||||
|
pub fn new(text: &str) -> Self {
|
||||||
|
let mut line_starts = vec![0];
|
||||||
|
for (offset, c) in text.char_indices() {
|
||||||
|
if c == '\n' {
|
||||||
|
line_starts.push((offset + 1) as u32);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Self {
|
||||||
|
line_starts,
|
||||||
|
total_len: text.len() as u32,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn offset_to_line_col(&self, offset: u32) -> (u32, u32) {
|
||||||
|
let line = match self.line_starts.binary_search(&offset) {
|
||||||
|
Ok(line) => line as u32,
|
||||||
|
Err(line) => (line - 1) as u32,
|
||||||
|
};
|
||||||
|
let col = offset - self.line_starts[line as usize];
|
||||||
|
(line, col)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn line_col_to_offset(&self, line: u32, col: u32) -> Option<u32> {
|
||||||
|
let start = *self.line_starts.get(line as usize)?;
|
||||||
|
let offset = start + col;
|
||||||
|
|
||||||
|
let next_start = self.line_starts.get(line as usize + 1).copied().unwrap_or(self.total_len);
|
||||||
|
if offset < next_start || (offset == next_start && offset == self.total_len) {
|
||||||
|
Some(offset)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
13
crates/compiler/prometeu-core/src/source/mod.rs
Normal file
13
crates/compiler/prometeu-core/src/source/mod.rs
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
mod ids;
|
||||||
|
mod span;
|
||||||
|
mod file_db;
|
||||||
|
mod name_interner;
|
||||||
|
mod diagnostics;
|
||||||
|
mod line_index;
|
||||||
|
|
||||||
|
pub use ids::*;
|
||||||
|
pub use span::Span;
|
||||||
|
pub use file_db::FileDB;
|
||||||
|
pub use line_index::LineIndex;
|
||||||
|
pub use name_interner::NameInterner;
|
||||||
|
pub use diagnostics::*;
|
||||||
@ -1,5 +1,5 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use crate::ids::NameId;
|
use crate::NameId;
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone)]
|
#[derive(Debug, Default, Clone)]
|
||||||
pub struct NameInterner {
|
pub struct NameInterner {
|
||||||
@ -1,4 +1,4 @@
|
|||||||
use crate::ids::FileId;
|
use crate::FileId;
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
|
#[derive(Clone, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
|
||||||
pub struct Span {
|
pub struct Span {
|
||||||
@ -1,4 +1,4 @@
|
|||||||
use prometeu_analysis::{FileDB, LineIndex};
|
use prometeu_core::{FileDB, LineIndex};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_line_index_roundtrip() {
|
fn test_line_index_roundtrip() {
|
||||||
@ -1,4 +1,4 @@
|
|||||||
use prometeu_analysis::{ids::FileId, span::Span};
|
use prometeu_core::{FileId, Span};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn span_end_is_exclusive() {
|
fn span_end_is_exclusive() {
|
||||||
13
crates/compiler/prometeu-deps/Cargo.toml
Normal file
13
crates/compiler/prometeu-deps/Cargo.toml
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
[package]
|
||||||
|
name = "prometeu-deps"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
license = "MIT"
|
||||||
|
description = ""
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
serde = { version = "1.0.228", features = ["derive"] }
|
||||||
|
prometeu-core = { path = "../prometeu-core" }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = []
|
||||||
1
crates/compiler/prometeu-deps/src/lib.rs
Normal file
1
crates/compiler/prometeu-deps/src/lib.rs
Normal file
@ -0,0 +1 @@
|
|||||||
|
mod project_registry;
|
||||||
@ -1,6 +1,5 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use prometeu_core::ProjectId;
|
||||||
use prometeu_analysis::ids::ProjectId;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
pub struct ProjectKey {
|
pub struct ProjectKey {
|
||||||
@ -1,5 +1,5 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "language-api"
|
name = "prometeu-language-api"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
1
crates/compiler/prometeu-language-api/src/lib.rs
Normal file
1
crates/compiler/prometeu-language-api/src/lib.rs
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user