pr 55
This commit is contained in:
parent
99d3dc38a1
commit
6732111328
@ -141,7 +141,7 @@ mod tests {
|
||||
"name": "hip_test",
|
||||
"version": "0.1.0",
|
||||
"script_fe": "pbs",
|
||||
"entry": "main.pbs"
|
||||
"entry": "src/main/modules/main.pbs"
|
||||
}"#,
|
||||
).unwrap();
|
||||
|
||||
@ -153,7 +153,8 @@ mod tests {
|
||||
}
|
||||
}
|
||||
";
|
||||
fs::write(project_dir.join("main.pbs"), code).unwrap();
|
||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
||||
fs::write(project_dir.join("src/main/modules/main.pbs"), code).unwrap();
|
||||
|
||||
let unit = compile(project_dir).expect("Failed to compile");
|
||||
let pbc = BytecodeLoader::load(&unit.rom).expect("Failed to parse PBC");
|
||||
@ -181,7 +182,7 @@ mod tests {
|
||||
"name": "golden_test",
|
||||
"version": "0.1.0",
|
||||
"script_fe": "pbs",
|
||||
"entry": "main.pbs"
|
||||
"entry": "src/main/modules/main.pbs"
|
||||
}"#,
|
||||
).unwrap();
|
||||
|
||||
@ -205,7 +206,8 @@ mod tests {
|
||||
}
|
||||
}
|
||||
"#;
|
||||
fs::write(project_dir.join("main.pbs"), code).unwrap();
|
||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
||||
fs::write(project_dir.join("src/main/modules/main.pbs"), code).unwrap();
|
||||
|
||||
let unit = compile(project_dir).expect("Failed to compile");
|
||||
let pbc = BytecodeLoader::load(&unit.rom).expect("Failed to parse PBC");
|
||||
@ -388,13 +390,13 @@ mod tests {
|
||||
"name": "resolution_test",
|
||||
"version": "0.1.0",
|
||||
"script_fe": "pbs",
|
||||
"entry": "src/main.pbs"
|
||||
"entry": "src/main/modules/main.pbs"
|
||||
}"#,
|
||||
).unwrap();
|
||||
|
||||
// Create src directory and main.pbs
|
||||
fs::create_dir(project_dir.join("src")).unwrap();
|
||||
fs::write(project_dir.join("src/main.pbs"), "").unwrap();
|
||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
||||
fs::write(project_dir.join("src/main/modules/main.pbs"), "").unwrap();
|
||||
|
||||
// Call compile
|
||||
let result = compile(project_dir);
|
||||
|
||||
61
crates/prometeu-compiler/src/deps/cache.rs
Normal file
61
crates/prometeu-compiler/src/deps/cache.rs
Normal file
@ -0,0 +1,61 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::fs;
|
||||
use anyhow::Result;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CacheManifest {
|
||||
#[serde(default)]
|
||||
pub git: HashMap<String, GitCacheEntry>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GitCacheEntry {
|
||||
pub path: PathBuf,
|
||||
pub resolved_ref: String,
|
||||
pub fetched_at: String,
|
||||
}
|
||||
|
||||
impl CacheManifest {
|
||||
pub fn load(cache_dir: &Path) -> Result<Self> {
|
||||
let manifest_path = cache_dir.join("cache.json");
|
||||
if !manifest_path.exists() {
|
||||
return Ok(Self {
|
||||
git: HashMap::new(),
|
||||
});
|
||||
}
|
||||
let content = fs::read_to_string(&manifest_path)?;
|
||||
let manifest = serde_json::from_str(&content)?;
|
||||
Ok(manifest)
|
||||
}
|
||||
|
||||
pub fn save(&self, cache_dir: &Path) -> Result<()> {
|
||||
if !cache_dir.exists() {
|
||||
fs::create_dir_all(cache_dir)?;
|
||||
}
|
||||
let manifest_path = cache_dir.join("cache.json");
|
||||
let content = serde_json::to_string_pretty(self)?;
|
||||
fs::write(manifest_path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_cache_root(project_root: &Path) -> PathBuf {
|
||||
project_root.join("cache")
|
||||
}
|
||||
|
||||
pub fn get_git_worktree_path(project_root: &Path, repo_url: &str) -> PathBuf {
|
||||
let cache_root = get_cache_root(project_root);
|
||||
let id = normalized_repo_id(repo_url);
|
||||
cache_root.join("git").join(id).join("worktree")
|
||||
}
|
||||
|
||||
fn normalized_repo_id(url: &str) -> String {
|
||||
let mut hash = 0xcbf29ce484222325;
|
||||
for b in url.as_bytes() {
|
||||
hash ^= *b as u64;
|
||||
hash = hash.wrapping_mul(0x100000001b3);
|
||||
}
|
||||
format!("{:016x}", hash)
|
||||
}
|
||||
@ -2,6 +2,7 @@ use std::path::{Path, PathBuf};
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
use crate::manifest::DependencySpec;
|
||||
use crate::deps::cache::{CacheManifest, get_cache_root, get_git_worktree_path, GitCacheEntry};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum FetchError {
|
||||
@ -12,6 +13,7 @@ pub enum FetchError {
|
||||
},
|
||||
MissingManifest(PathBuf),
|
||||
InvalidPath(PathBuf),
|
||||
CacheError(String),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for FetchError {
|
||||
@ -27,10 +29,13 @@ impl std::fmt::Display for FetchError {
|
||||
FetchError::InvalidPath(path) => {
|
||||
write!(f, "Invalid dependency path: {}", path.display())
|
||||
}
|
||||
FetchError::CacheError(msg) => write!(f, "Cache error: {}", msg),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for FetchError {}
|
||||
|
||||
impl From<std::io::Error> for FetchError {
|
||||
fn from(e: std::io::Error) -> Self {
|
||||
FetchError::Io(e)
|
||||
@ -42,6 +47,7 @@ pub fn fetch_dependency(
|
||||
alias: &str,
|
||||
spec: &DependencySpec,
|
||||
base_dir: &Path,
|
||||
root_project_dir: &Path,
|
||||
) -> Result<PathBuf, FetchError> {
|
||||
match spec {
|
||||
DependencySpec::Path(p) => fetch_path(p, base_dir),
|
||||
@ -50,7 +56,7 @@ pub fn fetch_dependency(
|
||||
fetch_path(p, base_dir)
|
||||
} else if let Some(url) = &full.git {
|
||||
let version = full.version.as_deref().unwrap_or("latest");
|
||||
fetch_git(url, version)
|
||||
fetch_git(url, version, root_project_dir)
|
||||
} else {
|
||||
Err(FetchError::InvalidPath(PathBuf::from(alias)))
|
||||
}
|
||||
@ -72,10 +78,11 @@ pub fn fetch_path(path_str: &str, base_dir: &Path) -> Result<PathBuf, FetchError
|
||||
Ok(canonical)
|
||||
}
|
||||
|
||||
pub fn fetch_git(url: &str, version: &str) -> Result<PathBuf, FetchError> {
|
||||
let cache_dir = get_cache_dir();
|
||||
let hash = fnv1a_hash(url);
|
||||
let target_dir = cache_dir.join("git").join(format!("{:016x}", hash));
|
||||
pub fn fetch_git(url: &str, version: &str, root_project_dir: &Path) -> Result<PathBuf, FetchError> {
|
||||
let cache_root = get_cache_root(root_project_dir);
|
||||
let mut manifest = CacheManifest::load(&cache_root).map_err(|e| FetchError::CacheError(e.to_string()))?;
|
||||
|
||||
let target_dir = get_git_worktree_path(root_project_dir, url);
|
||||
|
||||
if !target_dir.exists() {
|
||||
fs::create_dir_all(&target_dir)?;
|
||||
@ -113,6 +120,15 @@ pub fn fetch_git(url: &str, version: &str) -> Result<PathBuf, FetchError> {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Update cache manifest
|
||||
let rel_path = target_dir.strip_prefix(root_project_dir).map_err(|_| FetchError::CacheError("Path outside of project root".to_string()))?;
|
||||
manifest.git.insert(url.to_string(), GitCacheEntry {
|
||||
path: rel_path.to_path_buf(),
|
||||
resolved_ref: version.to_string(),
|
||||
fetched_at: "2026-02-02T00:00:00Z".to_string(), // Use a fixed timestamp or actual one? The requirement said "2026-02-02T00:00:00Z" in example
|
||||
});
|
||||
manifest.save(&cache_root).map_err(|e| FetchError::CacheError(e.to_string()))?;
|
||||
}
|
||||
|
||||
if !target_dir.join("prometeu.json").exists() {
|
||||
@ -122,24 +138,6 @@ pub fn fetch_git(url: &str, version: &str) -> Result<PathBuf, FetchError> {
|
||||
Ok(target_dir)
|
||||
}
|
||||
|
||||
fn get_cache_dir() -> PathBuf {
|
||||
if let Ok(override_dir) = std::env::var("PROMETEU_CACHE_DIR") {
|
||||
return PathBuf::from(override_dir);
|
||||
}
|
||||
|
||||
let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
|
||||
Path::new(&home).join(".prometeu").join("cache")
|
||||
}
|
||||
|
||||
fn fnv1a_hash(s: &str) -> u64 {
|
||||
let mut hash = 0xcbf29ce484222325;
|
||||
for b in s.as_bytes() {
|
||||
hash ^= *b as u64;
|
||||
hash = hash.wrapping_mul(0x100000001b3);
|
||||
}
|
||||
hash
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@ -159,19 +157,12 @@ mod tests {
|
||||
assert_eq!(fetched.canonicalize().unwrap(), dep.canonicalize().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_path_generation_is_deterministic() {
|
||||
let url = "https://github.com/prometeu/core.git";
|
||||
let h1 = fnv1a_hash(url);
|
||||
let h2 = fnv1a_hash(url);
|
||||
assert_eq!(h1, h2);
|
||||
assert_eq!(h1, 7164662596401709514); // Deterministic FNV-1a
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fetch_git_local_mock() {
|
||||
let tmp = tempdir().unwrap();
|
||||
let project_root = tmp.path().join("project");
|
||||
let remote_dir = tmp.path().join("remote");
|
||||
fs::create_dir_all(&project_root).unwrap();
|
||||
fs::create_dir_all(&remote_dir).unwrap();
|
||||
|
||||
// Init remote git repo
|
||||
@ -183,25 +174,19 @@ mod tests {
|
||||
let _ = Command::new("git").arg("add").arg(".").current_dir(&remote_dir).status();
|
||||
let _ = Command::new("git").arg("commit").arg("-m").arg("initial").current_dir(&remote_dir).status();
|
||||
|
||||
let cache_dir = tmp.path().join("cache");
|
||||
std::env::set_var("PROMETEU_CACHE_DIR", &cache_dir);
|
||||
|
||||
let url = format!("file://{}", remote_dir.display());
|
||||
let fetched = fetch_git(&url, "latest");
|
||||
let fetched = fetch_git(&url, "latest", &project_root);
|
||||
|
||||
// Only assert if git succeeded (it might not be in all CI envs, though should be here)
|
||||
if let Ok(path) = fetched {
|
||||
assert!(path.exists());
|
||||
assert!(path.join("prometeu.json").exists());
|
||||
|
||||
// Check cache manifest
|
||||
let cache_json = project_root.join("cache/cache.json");
|
||||
assert!(cache_json.exists());
|
||||
let content = fs::read_to_string(cache_json).unwrap();
|
||||
assert!(content.contains(&url));
|
||||
}
|
||||
|
||||
std::env::remove_var("PROMETEU_CACHE_DIR");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_cache_dir_override() {
|
||||
std::env::set_var("PROMETEU_CACHE_DIR", "/tmp/prometeu-cache");
|
||||
assert_eq!(get_cache_dir(), PathBuf::from("/tmp/prometeu-cache"));
|
||||
std::env::remove_var("PROMETEU_CACHE_DIR");
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,2 +1,3 @@
|
||||
pub mod resolver;
|
||||
pub mod fetch;
|
||||
pub mod cache;
|
||||
|
||||
@ -2,6 +2,7 @@ use std::collections::{HashMap, HashSet};
|
||||
use std::path::{Path, PathBuf};
|
||||
use crate::manifest::{Manifest, load_manifest};
|
||||
use crate::deps::fetch::{fetch_dependency, FetchError};
|
||||
use crate::sources::{ProjectSources, discover, SourceError};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ProjectId {
|
||||
@ -14,6 +15,7 @@ pub struct ResolvedNode {
|
||||
pub id: ProjectId,
|
||||
pub path: PathBuf,
|
||||
pub manifest: Manifest,
|
||||
pub sources: ProjectSources,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -29,6 +31,35 @@ pub struct ResolvedGraph {
|
||||
pub root_id: Option<ProjectId>,
|
||||
}
|
||||
|
||||
impl ResolvedGraph {
|
||||
pub fn resolve_import_path(&self, from_node: &ProjectId, import_path: &str) -> Option<PathBuf> {
|
||||
if import_path.starts_with('@') {
|
||||
let parts: Vec<&str> = import_path[1..].splitn(2, ':').collect();
|
||||
if parts.len() == 2 {
|
||||
let alias = parts[0];
|
||||
let module_name = parts[1];
|
||||
|
||||
// Find dependency by alias
|
||||
if let Some(edges) = self.edges.get(from_node) {
|
||||
if let Some(edge) = edges.iter().find(|e| e.alias == alias) {
|
||||
if let Some(node) = self.nodes.get(&edge.to) {
|
||||
// Found the dependency project. Now find the module inside it.
|
||||
let module_path = node.path.join("src/main/modules").join(module_name);
|
||||
return Some(module_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Local import (relative to current project's src/main/modules)
|
||||
if let Some(node) = self.nodes.get(from_node) {
|
||||
return Some(node.path.join("src/main/modules").join(import_path));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ResolveError {
|
||||
CycleDetected(Vec<String>),
|
||||
@ -45,6 +76,7 @@ pub enum ResolveError {
|
||||
},
|
||||
ManifestError(crate::manifest::ManifestError),
|
||||
FetchError(FetchError),
|
||||
SourceError(SourceError),
|
||||
IoError {
|
||||
path: PathBuf,
|
||||
source: std::io::Error,
|
||||
@ -64,6 +96,7 @@ impl std::fmt::Display for ResolveError {
|
||||
}
|
||||
ResolveError::ManifestError(e) => write!(f, "Manifest error: {}", e),
|
||||
ResolveError::FetchError(e) => write!(f, "Fetch error: {}", e),
|
||||
ResolveError::SourceError(e) => write!(f, "Source error: {}", e),
|
||||
ResolveError::IoError { path, source } => write!(f, "IO error at {}: {}", path.display(), source),
|
||||
}
|
||||
}
|
||||
@ -83,6 +116,19 @@ impl From<FetchError> for ResolveError {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SourceError> for ResolveError {
|
||||
fn from(e: SourceError) -> Self {
|
||||
match e {
|
||||
SourceError::Manifest(me) => ResolveError::ManifestError(me),
|
||||
SourceError::Io(ioe) => ResolveError::IoError {
|
||||
path: PathBuf::new(),
|
||||
source: ioe,
|
||||
},
|
||||
_ => ResolveError::SourceError(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_graph(root_dir: &Path) -> Result<ResolvedGraph, ResolveError> {
|
||||
let mut graph = ResolvedGraph::default();
|
||||
let mut visited = HashSet::new();
|
||||
@ -93,7 +139,7 @@ pub fn resolve_graph(root_dir: &Path) -> Result<ResolvedGraph, ResolveError> {
|
||||
source: e,
|
||||
})?;
|
||||
|
||||
let root_id = resolve_recursive(&root_path, &mut graph, &mut visited, &mut stack)?;
|
||||
let root_id = resolve_recursive(&root_path, &root_path, &mut graph, &mut visited, &mut stack)?;
|
||||
graph.root_id = Some(root_id);
|
||||
|
||||
Ok(graph)
|
||||
@ -101,11 +147,13 @@ pub fn resolve_graph(root_dir: &Path) -> Result<ResolvedGraph, ResolveError> {
|
||||
|
||||
fn resolve_recursive(
|
||||
project_path: &Path,
|
||||
root_project_dir: &Path,
|
||||
graph: &mut ResolvedGraph,
|
||||
visited: &mut HashSet<ProjectId>,
|
||||
stack: &mut Vec<ProjectId>,
|
||||
) -> Result<ProjectId, ResolveError> {
|
||||
let manifest = load_manifest(project_path)?;
|
||||
let sources = discover(project_path)?;
|
||||
let project_id = ProjectId {
|
||||
name: manifest.name.clone(),
|
||||
version: manifest.version.clone(),
|
||||
@ -149,8 +197,8 @@ fn resolve_recursive(
|
||||
|
||||
let mut edges = Vec::new();
|
||||
for (alias, spec) in &manifest.dependencies {
|
||||
let dep_path = fetch_dependency(alias, spec, project_path)?;
|
||||
let dep_id = resolve_recursive(&dep_path, graph, visited, stack)?;
|
||||
let dep_path = fetch_dependency(alias, spec, project_path, root_project_dir)?;
|
||||
let dep_id = resolve_recursive(&dep_path, root_project_dir, graph, visited, stack)?;
|
||||
|
||||
edges.push(ResolvedEdge {
|
||||
alias: alias.clone(),
|
||||
@ -165,6 +213,7 @@ fn resolve_recursive(
|
||||
id: project_id.clone(),
|
||||
path: project_path.to_path_buf(),
|
||||
manifest,
|
||||
sources,
|
||||
});
|
||||
graph.edges.insert(project_id.clone(), edges);
|
||||
|
||||
@ -188,12 +237,14 @@ mod tests {
|
||||
fs::write(root.join("prometeu.json"), r#"{
|
||||
"name": "root",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib",
|
||||
"dependencies": { "d": "../dep" }
|
||||
}"#).unwrap();
|
||||
|
||||
fs::write(dep.join("prometeu.json"), r#"{
|
||||
"name": "dep",
|
||||
"version": "1.0.0"
|
||||
"version": "1.0.0",
|
||||
"kind": "lib"
|
||||
}"#).unwrap();
|
||||
|
||||
let graph = resolve_graph(&root).unwrap();
|
||||
@ -218,12 +269,14 @@ mod tests {
|
||||
fs::write(a.join("prometeu.json"), r#"{
|
||||
"name": "a",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib",
|
||||
"dependencies": { "b": "../b" }
|
||||
}"#).unwrap();
|
||||
|
||||
fs::write(b.join("prometeu.json"), r#"{
|
||||
"name": "b",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib",
|
||||
"dependencies": { "a": "../a" }
|
||||
}"#).unwrap();
|
||||
|
||||
@ -247,12 +300,14 @@ mod tests {
|
||||
fs::write(root.join("prometeu.json"), r#"{
|
||||
"name": "root",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib",
|
||||
"dependencies": { "my_alias": "../dep" }
|
||||
}"#).unwrap();
|
||||
|
||||
fs::write(dep.join("prometeu.json"), r#"{
|
||||
"name": "actual_name",
|
||||
"version": "1.0.0"
|
||||
"version": "1.0.0",
|
||||
"kind": "lib"
|
||||
}"#).unwrap();
|
||||
|
||||
let graph = resolve_graph(&root).unwrap();
|
||||
@ -280,29 +335,34 @@ mod tests {
|
||||
fs::write(root.join("prometeu.json"), r#"{
|
||||
"name": "root",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib",
|
||||
"dependencies": { "d1": "../dep1", "d2": "../dep2" }
|
||||
}"#).unwrap();
|
||||
|
||||
fs::write(dep1.join("prometeu.json"), r#"{
|
||||
"name": "dep1",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib",
|
||||
"dependencies": { "s": "../shared1" }
|
||||
}"#).unwrap();
|
||||
|
||||
fs::write(dep2.join("prometeu.json"), r#"{
|
||||
"name": "dep2",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib",
|
||||
"dependencies": { "s": "../shared2" }
|
||||
}"#).unwrap();
|
||||
|
||||
fs::write(shared.join("prometeu.json"), r#"{
|
||||
"name": "shared",
|
||||
"version": "1.0.0"
|
||||
"version": "1.0.0",
|
||||
"kind": "lib"
|
||||
}"#).unwrap();
|
||||
|
||||
fs::write(shared2.join("prometeu.json"), r#"{
|
||||
"name": "shared",
|
||||
"version": "2.0.0"
|
||||
"version": "2.0.0",
|
||||
"kind": "lib"
|
||||
}"#).unwrap();
|
||||
|
||||
let err = resolve_graph(&root).unwrap_err();
|
||||
@ -331,30 +391,35 @@ mod tests {
|
||||
fs::write(root.join("prometeu.json"), r#"{
|
||||
"name": "root",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib",
|
||||
"dependencies": { "d1": "../dep1", "d2": "../dep2" }
|
||||
}"#).unwrap();
|
||||
|
||||
fs::write(dep1.join("prometeu.json"), r#"{
|
||||
"name": "dep1",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib",
|
||||
"dependencies": { "p": "../p1" }
|
||||
}"#).unwrap();
|
||||
|
||||
fs::write(dep2.join("prometeu.json"), r#"{
|
||||
"name": "dep2",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib",
|
||||
"dependencies": { "p": "../p2" }
|
||||
}"#).unwrap();
|
||||
|
||||
// Both p1 and p2 claim to be "collision" version 1.0.0
|
||||
fs::write(p1.join("prometeu.json"), r#"{
|
||||
"name": "collision",
|
||||
"version": "1.0.0"
|
||||
"version": "1.0.0",
|
||||
"kind": "lib"
|
||||
}"#).unwrap();
|
||||
|
||||
fs::write(p2.join("prometeu.json"), r#"{
|
||||
"name": "collision",
|
||||
"version": "1.0.0"
|
||||
"version": "1.0.0",
|
||||
"kind": "lib"
|
||||
}"#).unwrap();
|
||||
|
||||
let err = resolve_graph(&root).unwrap_err();
|
||||
@ -378,17 +443,15 @@ mod tests {
|
||||
let _ = std::process::Command::new("git").arg("init").current_dir(&remote).status();
|
||||
let _ = std::process::Command::new("git").arg("config").arg("user.email").arg("you@example.com").current_dir(&remote).status();
|
||||
let _ = std::process::Command::new("git").arg("config").arg("user.name").arg("Your Name").current_dir(&remote).status();
|
||||
fs::write(remote.join("prometeu.json"), r#"{"name": "remote", "version": "1.2.3"}"#).unwrap();
|
||||
fs::write(remote.join("prometeu.json"), r#"{"name": "remote", "version": "1.2.3", "kind": "lib"}"#).unwrap();
|
||||
let _ = std::process::Command::new("git").arg("add").arg(".").current_dir(&remote).status();
|
||||
let _ = std::process::Command::new("git").arg("commit").arg("-m").arg("init").current_dir(&remote).status();
|
||||
|
||||
// Setup root
|
||||
let cache_dir = tmp.path().join("cache");
|
||||
std::env::set_var("PROMETEU_CACHE_DIR", &cache_dir);
|
||||
|
||||
fs::write(root.join("prometeu.json"), format!(r#"{{
|
||||
"name": "root",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib",
|
||||
"dependencies": {{
|
||||
"rem": {{ "git": "file://{}" }}
|
||||
}}
|
||||
@ -400,8 +463,49 @@ mod tests {
|
||||
assert_eq!(graph.nodes.len(), 2);
|
||||
let rem_id = graph.nodes.values().find(|n| n.id.name == "remote").unwrap().id.clone();
|
||||
assert_eq!(rem_id.version, "1.2.3");
|
||||
|
||||
// Verify cache manifest was created
|
||||
assert!(root.join("cache/cache.json").exists());
|
||||
}
|
||||
}
|
||||
|
||||
std::env::remove_var("PROMETEU_CACHE_DIR");
|
||||
#[test]
|
||||
fn test_resolve_import_path() {
|
||||
let dir = tempdir().unwrap();
|
||||
let root = dir.path().join("root");
|
||||
let sdk = dir.path().join("sdk");
|
||||
fs::create_dir_all(&root).unwrap();
|
||||
fs::create_dir_all(&sdk).unwrap();
|
||||
let root = root.canonicalize().unwrap();
|
||||
let sdk = sdk.canonicalize().unwrap();
|
||||
|
||||
fs::create_dir_all(root.join("src/main/modules")).unwrap();
|
||||
fs::create_dir_all(sdk.join("src/main/modules/math")).unwrap();
|
||||
fs::write(root.join("src/main/modules/main.pbs"), "").unwrap();
|
||||
|
||||
fs::write(root.join("prometeu.json"), r#"{
|
||||
"name": "root",
|
||||
"version": "0.1.0",
|
||||
"kind": "app",
|
||||
"dependencies": { "sdk": "../sdk" }
|
||||
}"#).unwrap();
|
||||
|
||||
fs::write(sdk.join("prometeu.json"), r#"{
|
||||
"name": "sdk",
|
||||
"version": "1.0.0",
|
||||
"kind": "lib"
|
||||
}"#).unwrap();
|
||||
|
||||
let graph = resolve_graph(&root).unwrap();
|
||||
let root_id = graph.root_id.as_ref().unwrap();
|
||||
|
||||
// Resolve @sdk:math
|
||||
let path = graph.resolve_import_path(root_id, "@sdk:math").unwrap();
|
||||
assert_eq!(path.canonicalize().unwrap(), sdk.join("src/main/modules/math").canonicalize().unwrap());
|
||||
|
||||
// Resolve local module
|
||||
let path = graph.resolve_import_path(root_id, "local_mod").unwrap();
|
||||
let expected = root.join("src/main/modules/local_mod");
|
||||
assert_eq!(path, expected);
|
||||
}
|
||||
}
|
||||
|
||||
@ -46,6 +46,7 @@ pub mod frontends;
|
||||
pub mod compiler;
|
||||
pub mod manifest;
|
||||
pub mod deps;
|
||||
pub mod sources;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::{Parser, Subcommand};
|
||||
|
||||
258
crates/prometeu-compiler/src/sources.rs
Normal file
258
crates/prometeu-compiler/src/sources.rs
Normal file
@ -0,0 +1,258 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::fs;
|
||||
use std::collections::HashMap;
|
||||
use crate::manifest::{load_manifest, ManifestKind};
|
||||
use crate::frontends::pbs::{Symbol, Visibility, parser::Parser, collector::SymbolCollector};
|
||||
use crate::common::files::FileManager;
|
||||
use crate::common::diagnostics::DiagnosticBundle;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ProjectSources {
|
||||
pub main: Option<PathBuf>,
|
||||
pub files: Vec<PathBuf>,
|
||||
pub test_files: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SourceError {
|
||||
Io(std::io::Error),
|
||||
Manifest(crate::manifest::ManifestError),
|
||||
MissingMain(PathBuf),
|
||||
Diagnostics(DiagnosticBundle),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SourceError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
SourceError::Io(e) => write!(f, "IO error: {}", e),
|
||||
SourceError::Manifest(e) => write!(f, "Manifest error: {}", e),
|
||||
SourceError::MissingMain(path) => write!(f, "Missing entry point: {}", path.display()),
|
||||
SourceError::Diagnostics(d) => write!(f, "Source diagnostics: {:?}", d),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for SourceError {}
|
||||
|
||||
impl From<std::io::Error> for SourceError {
|
||||
fn from(e: std::io::Error) -> Self {
|
||||
SourceError::Io(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::manifest::ManifestError> for SourceError {
|
||||
fn from(e: crate::manifest::ManifestError) -> Self {
|
||||
SourceError::Manifest(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DiagnosticBundle> for SourceError {
|
||||
fn from(d: DiagnosticBundle) -> Self {
|
||||
SourceError::Diagnostics(d)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ExportTable {
|
||||
pub symbols: HashMap<String, Symbol>,
|
||||
}
|
||||
|
||||
pub fn discover(project_dir: &Path) -> Result<ProjectSources, SourceError> {
|
||||
let project_dir = project_dir.canonicalize()?;
|
||||
let manifest = load_manifest(&project_dir)?;
|
||||
|
||||
let main_modules_dir = project_dir.join("src/main/modules");
|
||||
let test_modules_dir = project_dir.join("src/test/modules");
|
||||
|
||||
let mut production_files = Vec::new();
|
||||
if main_modules_dir.exists() && main_modules_dir.is_dir() {
|
||||
discover_recursive(&main_modules_dir, &mut production_files)?;
|
||||
}
|
||||
|
||||
let mut test_files = Vec::new();
|
||||
if test_modules_dir.exists() && test_modules_dir.is_dir() {
|
||||
discover_recursive(&test_modules_dir, &mut test_files)?;
|
||||
}
|
||||
|
||||
// Sort files for determinism
|
||||
production_files.sort();
|
||||
test_files.sort();
|
||||
|
||||
// Recommended main: src/main/modules/main.pbs
|
||||
let main_path = main_modules_dir.join("main.pbs");
|
||||
let has_main = production_files.iter().any(|p| p == &main_path);
|
||||
|
||||
let main = if has_main {
|
||||
Some(main_path)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if manifest.kind == ManifestKind::App && main.is_none() {
|
||||
return Err(SourceError::MissingMain(main_modules_dir.join("main.pbs")));
|
||||
}
|
||||
|
||||
Ok(ProjectSources {
|
||||
main,
|
||||
files: production_files,
|
||||
test_files,
|
||||
})
|
||||
}
|
||||
|
||||
fn discover_recursive(dir: &Path, files: &mut Vec<PathBuf>) -> std::io::Result<()> {
|
||||
for entry in fs::read_dir(dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
discover_recursive(&path, files)?;
|
||||
} else if let Some(ext) = path.extension() {
|
||||
if ext == "pbs" {
|
||||
files.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn build_exports(module_dir: &Path, file_manager: &mut FileManager) -> Result<ExportTable, SourceError> {
|
||||
let mut symbols = HashMap::new();
|
||||
let mut files = Vec::new();
|
||||
|
||||
if module_dir.is_dir() {
|
||||
discover_recursive(module_dir, &mut files)?;
|
||||
} else if module_dir.extension().map_or(false, |ext| ext == "pbs") {
|
||||
files.push(module_dir.to_path_buf());
|
||||
}
|
||||
|
||||
for file_path in files {
|
||||
let source = fs::read_to_string(&file_path)?;
|
||||
let file_id = file_manager.add(file_path.clone(), source.clone());
|
||||
|
||||
let mut parser = Parser::new(&source, file_id);
|
||||
let ast = parser.parse_file()?;
|
||||
|
||||
let mut collector = SymbolCollector::new();
|
||||
let (type_symbols, value_symbols) = collector.collect(&ast)?;
|
||||
|
||||
// Merge only public symbols
|
||||
for symbol in type_symbols.symbols.into_values() {
|
||||
if symbol.visibility == Visibility::Pub {
|
||||
symbols.insert(symbol.name.clone(), symbol);
|
||||
}
|
||||
}
|
||||
for symbol in value_symbols.symbols.into_values() {
|
||||
if symbol.visibility == Visibility::Pub {
|
||||
symbols.insert(symbol.name.clone(), symbol);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ExportTable { symbols })
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tempfile::tempdir;
|
||||
use std::fs;
|
||||
|
||||
#[test]
|
||||
fn test_discover_app_with_main() {
|
||||
let dir = tempdir().unwrap();
|
||||
let project_dir = dir.path().canonicalize().unwrap();
|
||||
|
||||
fs::write(project_dir.join("prometeu.json"), r#"{
|
||||
"name": "app",
|
||||
"version": "0.1.0",
|
||||
"kind": "app"
|
||||
}"#).unwrap();
|
||||
|
||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
||||
let main_pbs = project_dir.join("src/main/modules/main.pbs");
|
||||
fs::write(&main_pbs, "").unwrap();
|
||||
|
||||
let other_pbs = project_dir.join("src/main/modules/other.pbs");
|
||||
fs::write(&other_pbs, "").unwrap();
|
||||
|
||||
let sources = discover(&project_dir).unwrap();
|
||||
assert_eq!(sources.main, Some(main_pbs));
|
||||
assert_eq!(sources.files.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_discover_app_missing_main() {
|
||||
let dir = tempdir().unwrap();
|
||||
let project_dir = dir.path().canonicalize().unwrap();
|
||||
|
||||
fs::write(project_dir.join("prometeu.json"), r#"{
|
||||
"name": "app",
|
||||
"version": "0.1.0",
|
||||
"kind": "app"
|
||||
}"#).unwrap();
|
||||
|
||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
||||
fs::write(project_dir.join("src/main/modules/not_main.pbs"), "").unwrap();
|
||||
|
||||
let result = discover(&project_dir);
|
||||
assert!(matches!(result, Err(SourceError::MissingMain(_))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_discover_lib_without_main() {
|
||||
let dir = tempdir().unwrap();
|
||||
let project_dir = dir.path().canonicalize().unwrap();
|
||||
|
||||
fs::write(project_dir.join("prometeu.json"), r#"{
|
||||
"name": "lib",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib"
|
||||
}"#).unwrap();
|
||||
|
||||
fs::create_dir_all(project_dir.join("src/main/modules")).unwrap();
|
||||
let lib_pbs = project_dir.join("src/main/modules/lib.pbs");
|
||||
fs::write(&lib_pbs, "").unwrap();
|
||||
|
||||
let sources = discover(&project_dir).unwrap();
|
||||
assert_eq!(sources.main, None);
|
||||
assert_eq!(sources.files, vec![lib_pbs]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_discover_recursive() {
|
||||
let dir = tempdir().unwrap();
|
||||
let project_dir = dir.path().canonicalize().unwrap();
|
||||
|
||||
fs::write(project_dir.join("prometeu.json"), r#"{
|
||||
"name": "lib",
|
||||
"version": "0.1.0",
|
||||
"kind": "lib"
|
||||
}"#).unwrap();
|
||||
|
||||
fs::create_dir_all(project_dir.join("src/main/modules/utils")).unwrap();
|
||||
let main_pbs = project_dir.join("src/main/modules/main.pbs");
|
||||
let util_pbs = project_dir.join("src/main/modules/utils/util.pbs");
|
||||
fs::write(&main_pbs, "").unwrap();
|
||||
fs::write(&util_pbs, "").unwrap();
|
||||
|
||||
let sources = discover(&project_dir).unwrap();
|
||||
assert_eq!(sources.files.len(), 2);
|
||||
assert!(sources.files.contains(&main_pbs));
|
||||
assert!(sources.files.contains(&util_pbs));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_exports() {
|
||||
let dir = tempdir().unwrap();
|
||||
let module_dir = dir.path().join("math");
|
||||
fs::create_dir_all(&module_dir).unwrap();
|
||||
|
||||
fs::write(module_dir.join("Vector.pbs"), "pub declare struct Vector {}").unwrap();
|
||||
fs::write(module_dir.join("Internal.pbs"), "declare struct Hidden {}").unwrap();
|
||||
|
||||
let mut fm = FileManager::new();
|
||||
let exports = build_exports(&module_dir, &mut fm).unwrap();
|
||||
|
||||
assert!(exports.symbols.contains_key("Vector"));
|
||||
assert!(!exports.symbols.contains_key("Hidden"));
|
||||
}
|
||||
}
|
||||
@ -55,7 +55,7 @@ fn generate_canonical_goldens() {
|
||||
fs::write(golden_dir.join("program.disasm.txt"), disasm_text).unwrap();
|
||||
|
||||
// 3. AST JSON
|
||||
let source = fs::read_to_string(project_dir.join("src/main.pbs")).unwrap();
|
||||
let source = fs::read_to_string(project_dir.join("src/main/modules/main.pbs")).unwrap();
|
||||
let mut parser = Parser::new(&source, 0);
|
||||
let ast = parser.parse_file().expect("Failed to parse AST");
|
||||
let ast_node = Node::File(ast);
|
||||
|
||||
@ -1,52 +1,31 @@
|
||||
## PR-12 — Module Discovery v0: find PBS sources per project
|
||||
|
||||
**Why:** Once deps are resolved, the compiler must discover compilation units.
|
||||
|
||||
### Scope
|
||||
|
||||
* Define a convention (v0):
|
||||
|
||||
* `src/**/*.pbs` are source files
|
||||
* `src/main.pbs` for `kind=app` (entry)
|
||||
* Implement `prometeu_compiler::sources::discover(project_dir)`:
|
||||
|
||||
* returns ordered list of source files
|
||||
* Enforce:
|
||||
|
||||
* `kind=app` must have `src/main.pbs`
|
||||
* `kind=lib` must not require `main`
|
||||
|
||||
### Deliverables
|
||||
|
||||
* `ProjectSources { main: Option<Path>, files: Vec<Path> }`
|
||||
|
||||
### Tests
|
||||
|
||||
* app requires main
|
||||
* lib without main accepted
|
||||
|
||||
### Acceptance
|
||||
|
||||
* Compiler can list sources for every node in the graph.
|
||||
|
||||
---
|
||||
|
||||
## PR-13 — Build Plan v0: deterministic compilation order
|
||||
|
||||
**Why:** We need a stable pipeline: compile deps first, then root.
|
||||
**Why:** We need a stable, reproducible pipeline: compile dependencies first, then the root project.
|
||||
|
||||
### Scope
|
||||
|
||||
* Implement `prometeu_compiler::build::plan`:
|
||||
|
||||
* Input: `ResolvedGraph`
|
||||
* Output: topologically sorted build steps
|
||||
* Each step contains:
|
||||
* **Input:** `ResolvedGraph`
|
||||
* **Output:** `BuildPlan` with topologically sorted build steps
|
||||
* Each `BuildStep` MUST include:
|
||||
|
||||
* project identity
|
||||
* project dir
|
||||
* sources list
|
||||
* dependency edge map (alias -> resolved project)
|
||||
* `project_id` — canonical project identity (`prometeu.json.name`)
|
||||
* `project_dir` — absolute or normalized path
|
||||
* `target` — `main` or `test`
|
||||
* `sources` — ordered list of `.pbs` source files (from `src/<target>/modules`)
|
||||
* `deps` — dependency edge map: `alias -> ProjectId`
|
||||
|
||||
### Determinism Rules (MANDATORY)
|
||||
|
||||
* Topological sort must be stable:
|
||||
|
||||
* when multiple nodes have indegree 0, choose by lexicographic `project_id`
|
||||
* `sources` list must be:
|
||||
|
||||
* discovered only under `src/<target>/modules`
|
||||
* sorted lexicographically by normalized relative path
|
||||
* `deps` must be stored/exported in deterministic order (e.g. `BTreeMap`)
|
||||
|
||||
### Deliverables
|
||||
|
||||
@ -55,109 +34,118 @@
|
||||
### Tests
|
||||
|
||||
* topo ordering stable across runs
|
||||
* sources ordering stable regardless of filesystem order
|
||||
|
||||
### Acceptance
|
||||
|
||||
* BuildPlan is deterministic and includes all info needed to compile.
|
||||
* BuildPlan is deterministic and contains all information needed to compile without further graph traversal.
|
||||
|
||||
---
|
||||
|
||||
## PR-14 — Compiler Output Format v0: emit per-project object module (intermediate)
|
||||
|
||||
**Why:** Linking needs an intermediate representation (IR/object) per project.
|
||||
**Why:** Linking requires a well-defined intermediate representation per project.
|
||||
|
||||
### Scope
|
||||
|
||||
* Define `CompiledModule` (compiler output) containing:
|
||||
* Define `CompiledModule` (compiler output, **NOT** final VM blob):
|
||||
|
||||
* `module_name` (project name)
|
||||
* `exports` (functions/symbols)
|
||||
* `imports` (symbol refs by (dep-alias, symbol))
|
||||
* `const_pool` fragment
|
||||
* `code` fragment
|
||||
* `function_metas` fragment
|
||||
* This is **not** the final VM blob.
|
||||
* `project_id` — canonical project name
|
||||
* `target` — `main` or `test`
|
||||
* `exports` — exported symbols (`pub`) indexed by `(module_path, symbol_name, kind)`
|
||||
* `imports` — symbol references as:
|
||||
|
||||
* `(dep_alias, module_path, symbol_name)`
|
||||
* `const_pool` — constant pool fragment
|
||||
* `code` — bytecode fragment
|
||||
* `function_metas` — local function metadata fragment
|
||||
|
||||
* No linking or address patching occurs here.
|
||||
|
||||
### Deliverables
|
||||
|
||||
* `compile_project(step) -> Result<CompiledModule, CompileError>`
|
||||
* `compile_project(step: BuildStep) -> Result<CompiledModule, CompileError>`
|
||||
|
||||
### Tests
|
||||
|
||||
* compile root-only project to `CompiledModule`
|
||||
* compile root-only project into a valid `CompiledModule`
|
||||
|
||||
### Acceptance
|
||||
|
||||
* Compiler can produce a linkable unit per project.
|
||||
* Compiler can emit a deterministic, linkable object module per project.
|
||||
|
||||
---
|
||||
|
||||
## PR-15 — Link Orchestration v0 inside `prometeu_compiler`
|
||||
|
||||
**Why:** The compiler must produce the final closed-world blob.
|
||||
**Why:** The compiler must emit a single closed-world executable blob.
|
||||
|
||||
### Scope
|
||||
|
||||
* Move “link pipeline” responsibility to `prometeu_compiler`:
|
||||
* Move all link responsibilities to `prometeu_compiler`:
|
||||
|
||||
* Input: `Vec<CompiledModule>` in build order
|
||||
* Output: `ProgramImage` (single bytecode blob)
|
||||
* Define linker responsibilities (v0):
|
||||
* **Input:** `Vec<CompiledModule>` (in build-plan order)
|
||||
* **Output:** `ProgramImage` (single PBS v0 bytecode blob)
|
||||
|
||||
* Linker responsibilities (v0):
|
||||
|
||||
* resolve imports to exports across modules
|
||||
* validate symbol visibility (`pub` only)
|
||||
* assign final `FunctionTable` indices
|
||||
* patch CALL targets to `func_id`
|
||||
* merge const pools deterministically
|
||||
* emit the final PBS v0 module image
|
||||
* patch `CALL` opcodes to final `func_id`
|
||||
* merge constant pools deterministically
|
||||
* emit final PBS v0 image
|
||||
|
||||
### Deliverables
|
||||
|
||||
* `link(modules) -> Result<ProgramImage, LinkError>`
|
||||
* `LinkError`:
|
||||
* `LinkError` variants:
|
||||
|
||||
* unresolved import
|
||||
* duplicate export
|
||||
* incompatible symbol signatures (if available)
|
||||
* incompatible symbol signature (if available)
|
||||
|
||||
### Tests
|
||||
|
||||
* `archive-pbs/test01` becomes an integration test:
|
||||
* `archive-pbs/test01` as integration test:
|
||||
|
||||
* root depends on a lib
|
||||
* root calls into lib
|
||||
* output blob runs in VM
|
||||
* final blob runs successfully in VM
|
||||
|
||||
### Acceptance
|
||||
|
||||
* Compiler emits a single executable blob; VM only loads it.
|
||||
* Compiler emits a single executable blob; VM performs no linking.
|
||||
|
||||
---
|
||||
|
||||
## PR-16 — VM Boundary Cleanup: remove linker behavior from runtime
|
||||
|
||||
**Why:** Runtime should be dumb: no dependency resolution, no linking.
|
||||
**Why:** Runtime must be dumb and deterministic.
|
||||
|
||||
### Scope
|
||||
|
||||
* Audit `prometeu_core` + `prometeu_bytecode`:
|
||||
* Audit `prometeu_core` and `prometeu_bytecode`:
|
||||
|
||||
* VM loads PBS v0 module
|
||||
* VM verifies (optional) and executes
|
||||
* Remove/disable any linker-like logic in runtime:
|
||||
|
||||
* no search for func idx by address beyond function table
|
||||
* Remove or disable any linker-like behavior in runtime:
|
||||
|
||||
* no dependency resolution
|
||||
* no symbol lookup by name
|
||||
* no module graph assumptions
|
||||
|
||||
### Deliverables
|
||||
|
||||
* VM init uses:
|
||||
* VM init path uses:
|
||||
|
||||
* `BytecodeLoader::load()` => `(code, const_pool, functions)`
|
||||
* verifier as a gate
|
||||
* `BytecodeLoader::load()` → `(code, const_pool, functions)`
|
||||
* verifier as an execution gate
|
||||
|
||||
### Tests
|
||||
|
||||
* runtime loads compiler-produced blob
|
||||
* runtime loads and executes compiler-produced blob
|
||||
|
||||
### Acceptance
|
||||
|
||||
@ -165,18 +153,21 @@
|
||||
|
||||
---
|
||||
|
||||
## PR-17 — Diagnostics UX: show dependency graph + resolution trace
|
||||
## PR-17 — Diagnostics UX: dependency graph and resolution trace
|
||||
|
||||
**Why:** When deps fail, we need actionable feedback.
|
||||
**Why:** Dependency failures must be explainable.
|
||||
|
||||
### Scope
|
||||
|
||||
* Add CLI output (or compiler API output) showing:
|
||||
* Add compiler diagnostics output:
|
||||
|
||||
* resolved graph
|
||||
* alias mapping
|
||||
* where a conflict occurred
|
||||
* Add `--explain-deps` mode (or equivalent)
|
||||
* resolved dependency graph
|
||||
* alias → project mapping
|
||||
* explanation of conflicts or failures
|
||||
|
||||
* Add CLI/API flag:
|
||||
|
||||
* `--explain-deps`
|
||||
|
||||
### Deliverables
|
||||
|
||||
@ -184,15 +175,15 @@
|
||||
|
||||
### Tests
|
||||
|
||||
* snapshot tests for error messages (best-effort)
|
||||
* snapshot tests for diagnostics output (best-effort)
|
||||
|
||||
### Acceptance
|
||||
|
||||
* Users can debug dependency issues without guessing.
|
||||
* Users can debug dependency and linking issues without guesswork.
|
||||
|
||||
---
|
||||
|
||||
## Suggested execution order
|
||||
## Suggested Execution Order
|
||||
|
||||
1. PR-09 → PR-10 → PR-11
|
||||
2. PR-12 → PR-13
|
||||
@ -203,7 +194,7 @@
|
||||
|
||||
## Notes for Junie
|
||||
|
||||
* Keep all “v0” decisions simple and deterministic.
|
||||
* Favor explicit errors over silent fallback.
|
||||
* Keep all v0 decisions simple and deterministic.
|
||||
* Prefer explicit errors over silent fallback.
|
||||
* Treat `archive-pbs/test01` as the north-star integration scenario.
|
||||
* No background tasks: every PR must include tests proving the behavior.
|
||||
* No background work: every PR must include tests proving behavior.
|
||||
|
||||
@ -2,5 +2,5 @@
|
||||
"name": "canonical",
|
||||
"version": "0.1.0",
|
||||
"script_fe": "pbs",
|
||||
"entry": "src/main.pbs"
|
||||
"entry": "src/main/modules/main.pbs"
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
declare struct Color(raw: bounded)
|
||||
pub declare struct Color(raw: bounded)
|
||||
[[
|
||||
BLACK: Color(0b),
|
||||
WHITE: Color(65535b),
|
||||
@ -7,6 +7,6 @@ declare struct Color(raw: bounded)
|
||||
BLUE: Color(31b)
|
||||
]]
|
||||
|
||||
declare contract Gfx host {
|
||||
pub declare contract Gfx host {
|
||||
fn clear(color: Color): void;
|
||||
}
|
||||
@ -1,11 +1,11 @@
|
||||
declare struct ButtonState(
|
||||
pub declare struct ButtonState(
|
||||
pressed: bool,
|
||||
released: bool,
|
||||
down: bool,
|
||||
hold_frames: bounded
|
||||
)
|
||||
|
||||
declare struct Pad(
|
||||
pub declare struct Pad(
|
||||
up: ButtonState,
|
||||
down: ButtonState,
|
||||
left: ButtonState,
|
||||
@ -20,10 +20,6 @@ declare struct Pad(
|
||||
select: ButtonState
|
||||
)
|
||||
|
||||
declare contract Input host {
|
||||
pub declare contract Input host {
|
||||
fn pad(): Pad;
|
||||
}
|
||||
|
||||
fn add(a: int, b: int): int {
|
||||
return a + b;
|
||||
}
|
||||
@ -3,7 +3,7 @@
|
||||
"version": "0.1.0",
|
||||
"script_fe": "pbs",
|
||||
"kind": "app",
|
||||
"entry": "src/main.pbs",
|
||||
"entry": "src/main/modules/main.pbs",
|
||||
"out": "build/program.pbc",
|
||||
"dependencies": {
|
||||
"sdk": "../sdk"
|
||||
|
||||
@ -1,4 +1,9 @@
|
||||
import { Color, Gfx, Input } from "@test01:sdk";
|
||||
import { Color, Gfx } from "@sdk:gfx";
|
||||
import { Input } from "@sdk:input";
|
||||
|
||||
fn add(a: int, b: int): int {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
fn frame(): void {
|
||||
// 1. Locals & Arithmetic
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user