This commit is contained in:
bQUARKz 2026-02-05 15:52:49 +00:00
parent 0e76368cba
commit c12c1f7939
Signed by: bquarkz
SSH Key Fingerprint: SHA256:Z7dgqoglWwoK6j6u4QC87OveEq74WOhFN+gitsxtkf8
11 changed files with 289 additions and 488 deletions

View File

@ -1,2 +1,3 @@
pub mod symbols;
pub mod types;
pub mod types;
pub mod project_registry;

View File

@ -0,0 +1,70 @@
use std::collections::HashMap;
use prometeu_analysis::ids::ProjectId;
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub struct ProjectKey {
pub name: String,
pub version: String,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct ProjectMeta {
pub id: ProjectId,
pub name: String,
pub version: String,
}
#[derive(Debug, Default, Clone)]
pub struct ProjectRegistry {
by_name: HashMap<ProjectKey, ProjectId>,
projects: Vec<ProjectMeta>,
}
impl ProjectRegistry {
pub fn new() -> Self { Self::default() }
pub fn intern(&mut self, key: &ProjectKey) -> ProjectId {
if let Some(id) = self.by_name.get(key).copied() { return id; }
let id = ProjectId(self.projects.len() as u32);
self.by_name.insert(key.clone(), id);
self.projects.push(ProjectMeta { id, name: key.name.clone(), version: key.version.clone() });
id
}
pub fn meta(&self, id: ProjectId) -> Option<&ProjectMeta> {
self.projects.get(id.as_usize())
}
pub fn key_of(&self, id: ProjectId) -> Option<ProjectKey> {
self.meta(id).map(|m| ProjectKey { name: m.name.clone(), version: m.version.clone() })
}
pub fn len(&self) -> usize { self.projects.len() }
pub fn is_empty(&self) -> bool { self.projects.is_empty() }
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn project_registry_stable_ids_for_same_key() {
let mut reg = ProjectRegistry::new();
let k = ProjectKey { name: "sdk".into(), version: "1.0.0".into() };
let id1 = reg.intern(&k);
let id2 = reg.intern(&k);
assert_eq!(id1, id2);
// Different version -> different id
let k2 = ProjectKey { name: "sdk".into(), version: "1.1.0".into() };
let id3 = reg.intern(&k2);
assert_ne!(id1, id3);
// Meta lookup
let m1 = reg.meta(id1).unwrap();
assert_eq!(m1.name, "sdk");
assert_eq!(m1.version, "1.0.0");
}
}

View File

@ -75,13 +75,13 @@ impl Linker {
let mut combined_pc_to_span = Vec::new();
let mut combined_function_names = Vec::new();
// 1. DebugSymbol resolution map: (ProjectId, module_path, symbol_name) -> func_idx in combined_functions
// 1. DebugSymbol resolution map: (ProjectKey, module_path, symbol_name) -> func_idx in combined_functions
let mut global_symbols = HashMap::new();
let mut module_code_offsets = Vec::with_capacity(modules.len());
let mut module_function_offsets = Vec::with_capacity(modules.len());
// Map ProjectId to index
// Map ProjectKey to index
let _project_to_idx: HashMap<_, _> = modules.iter().enumerate().map(|(i, m)| (m.project_id.clone(), i)).collect();
// PASS 1: Collect exports and calculate offsets
@ -279,16 +279,19 @@ mod tests {
use super::*;
use crate::building::output::{ExportKey, ExportMetadata, ImportKey, ImportMetadata};
use crate::building::plan::BuildTarget;
use crate::deps::resolver::ProjectId;
use crate::deps::resolver::ProjectKey;
use crate::semantics::export_surface::ExportSurfaceKind;
use prometeu_analysis::ids::ProjectId;
use prometeu_bytecode::opcode::OpCode;
use prometeu_bytecode::FunctionMeta;
use std::collections::BTreeMap;
#[test]
fn test_link_root_and_lib() {
let lib_id = ProjectId { name: "lib".into(), version: "1.0.0".into() };
let root_id = ProjectId { name: "root".into(), version: "1.0.0".into() };
let lib_key = ProjectKey { name: "lib".into(), version: "1.0.0".into() };
let root_key = ProjectKey { name: "root".into(), version: "1.0.0".into() };
let lib_id = ProjectId(0);
let root_id = ProjectId(1);
// Lib module: exports 'add'
let mut lib_code = Vec::new();
@ -303,7 +306,8 @@ mod tests {
}, ExportMetadata { func_idx: Some(0), is_host: false, ty: None });
let lib_module = CompiledModule {
project_id: lib_id.clone(),
project_id: lib_id,
project_key: lib_key.clone(),
target: BuildTarget::Main,
exports: lib_exports,
imports: vec![],
@ -340,7 +344,8 @@ mod tests {
}];
let root_module = CompiledModule {
project_id: root_id.clone(),
project_id: root_id,
project_key: root_key.clone(),
target: BuildTarget::Main,
exports: BTreeMap::new(),
imports: root_imports,
@ -356,18 +361,20 @@ mod tests {
};
let lib_step = BuildStep {
project_id: lib_id.clone(),
project_id: lib_id,
project_key: lib_key.clone(),
project_dir: "".into(),
target: BuildTarget::Main,
sources: vec![],
deps: BTreeMap::new(),
};
let mut root_deps = BTreeMap::new();
root_deps.insert("mylib".into(), lib_id.clone());
let mut root_deps: BTreeMap<String, ProjectId> = BTreeMap::new();
root_deps.insert("mylib".into(), lib_id);
let root_step = BuildStep {
project_id: root_id.clone(),
project_id: root_id,
project_key: root_key.clone(),
project_dir: "".into(),
target: BuildTarget::Main,
sources: vec![],
@ -391,11 +398,13 @@ mod tests {
#[test]
fn test_link_const_deduplication() {
let id = ProjectId { name: "test".into(), version: "1.0.0".into() };
let step = BuildStep { project_id: id.clone(), project_dir: "".into(), target: BuildTarget::Main, sources: vec![], deps: BTreeMap::new() };
let key = ProjectKey { name: "test".into(), version: "1.0.0".into() };
let id = ProjectId(0);
let step = BuildStep { project_id: id, project_key: key.clone(), project_dir: "".into(), target: BuildTarget::Main, sources: vec![], deps: BTreeMap::new() };
let m1 = CompiledModule {
project_id: id.clone(),
project_id: id,
project_key: key.clone(),
target: BuildTarget::Main,
exports: BTreeMap::new(),
imports: vec![],
@ -407,7 +416,8 @@ mod tests {
};
let m2 = CompiledModule {
project_id: id.clone(),
project_id: id,
project_key: key.clone(),
target: BuildTarget::Main,
exports: BTreeMap::new(),
imports: vec![],

View File

@ -77,7 +77,7 @@ pub fn build_from_graph(graph: &ResolvedGraph, target: BuildTarget) -> Result<Bu
}
all_project_symbols.push(crate::common::symbols::ProjectSymbols {
project: module.project_id.name.clone(),
project: module.project_key.name.clone(),
project_dir: project_dir.to_string_lossy().to_string(),
symbols: rel_symbols,
});

View File

@ -3,7 +3,8 @@ use crate::building::plan::{BuildStep, BuildTarget};
use crate::common::diagnostics::DiagnosticBundle;
use crate::common::files::FileManager;
use crate::common::spans::{FileId, Span};
use crate::deps::resolver::ProjectId;
use crate::deps::resolver::ProjectKey;
use prometeu_analysis::ids::ProjectId;
use crate::frontends::pbs::ast::ParsedAst;
use crate::frontends::pbs::collector::SymbolCollector;
use crate::frontends::pbs::lowering::Lowerer;
@ -49,6 +50,7 @@ pub struct ImportMetadata {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CompiledModule {
pub project_id: ProjectId,
pub project_key: ProjectKey,
pub target: BuildTarget,
pub exports: BTreeMap<ExportKey, ExportMetadata>,
pub imports: Vec<ImportMetadata>,
@ -339,7 +341,7 @@ pub fn compile_project(
// 6. Collect symbols
let project_symbols = crate::common::symbols::collect_symbols(
&step.project_id.name,
&step.project_key.name,
&module_symbols_map,
file_manager,
&interner,
@ -374,6 +376,7 @@ pub fn compile_project(
Ok(CompiledModule {
project_id: step.project_id,
project_key: step.project_key,
target: step.target,
exports,
imports,
@ -413,9 +416,11 @@ mod tests {
fs::write(project_dir.join("src/main/modules/main.pbs"), main_code).unwrap();
let project_id = ProjectId { name: "root".to_string(), version: "0.1.0".to_string() };
let project_key = ProjectKey { name: "root".to_string(), version: "0.1.0".to_string() };
let project_id = ProjectId(0);
let step = BuildStep {
project_id: project_id.clone(),
project_id,
project_key: project_key.clone(),
project_dir: project_dir.clone(),
target: BuildTarget::Main,
sources: vec![PathBuf::from("src/main/modules/main.pbs")],

View File

@ -1,4 +1,5 @@
use crate::deps::resolver::{ProjectId, ResolvedGraph};
use crate::deps::resolver::{ProjectKey, ResolvedGraph};
use prometeu_analysis::ids::ProjectId;
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap};
use std::path::PathBuf;
@ -13,6 +14,7 @@ pub enum BuildTarget {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BuildStep {
pub project_id: ProjectId,
pub project_key: ProjectKey,
pub project_dir: PathBuf,
pub target: BuildTarget,
pub sources: Vec<PathBuf>,
@ -47,15 +49,16 @@ impl BuildPlan {
.collect();
sources.sort();
let mut deps = BTreeMap::new();
let mut deps: BTreeMap<String, ProjectId> = BTreeMap::new();
if let Some(edges) = graph.edges.get(&id) {
for edge in edges {
deps.insert(edge.alias.clone(), edge.to.clone());
deps.insert(edge.alias.clone(), edge.to);
}
}
steps.push(BuildStep {
project_id: id.clone(),
project_id: id,
project_key: node.key.clone(),
project_dir: node.path.clone(),
target,
sources,
@ -89,19 +92,19 @@ fn topological_sort(graph: &ResolvedGraph) -> Vec<ProjectId> {
let mut ready: std::collections::BinaryHeap<ReverseProjectId> = graph.nodes.keys()
.filter(|id| *in_degree.get(id).unwrap() == 0)
.map(|id| ReverseProjectId(id.clone()))
.map(|id| ReverseProjectId(*id))
.collect();
let mut result = Vec::new();
while let Some(ReverseProjectId(u)) = ready.pop() {
result.push(u.clone());
result.push(u);
if let Some(neighbors) = adj.get(&u) {
for v in neighbors {
let degree = in_degree.get_mut(v).unwrap();
*degree -= 1;
if *degree == 0 {
ready.push(ReverseProjectId(v.clone()));
ready.push(ReverseProjectId(*v));
}
}
}
@ -110,15 +113,14 @@ fn topological_sort(graph: &ResolvedGraph) -> Vec<ProjectId> {
result
}
#[derive(Eq, PartialEq)]
#[derive(Eq, PartialEq, Copy, Clone)]
struct ReverseProjectId(ProjectId);
impl Ord for ReverseProjectId {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
// BinaryHeap is a max-heap. We want min-heap for lexicographic order.
// So we reverse the comparison.
other.0.name.cmp(&self.0.name)
.then(other.0.version.cmp(&self.0.version))
// BinaryHeap is a max-heap. We want min-heap with stable numeric order.
// So we reverse the comparison on the numeric id.
other.0.as_u32().cmp(&self.0.as_u32())
}
}
@ -131,14 +133,15 @@ impl PartialOrd for ReverseProjectId {
#[cfg(test)]
mod tests {
use super::*;
use crate::deps::resolver::{ProjectId, ResolvedEdge, ResolvedGraph, ResolvedNode};
use crate::deps::resolver::{ProjectKey, ResolvedEdge, ResolvedGraph, ResolvedNode};
use crate::manifest::Manifest;
use crate::sources::ProjectSources;
use std::collections::BTreeMap;
fn mock_node(name: &str, version: &str) -> ResolvedNode {
ResolvedNode {
id: ProjectId { name: name.to_string(), version: version.to_string() },
fn mock_node(id: ProjectId, name: &str, version: &str) -> (ProjectId, ResolvedNode) {
let node = ResolvedNode {
id,
key: ProjectKey { name: name.to_string(), version: version.to_string() },
path: PathBuf::from(format!("/{}", name)),
manifest: Manifest {
name: name.to_string(),
@ -151,96 +154,95 @@ mod tests {
files: vec![PathBuf::from("b.pbs"), PathBuf::from("a.pbs")],
test_files: vec![PathBuf::from("test_b.pbs"), PathBuf::from("test_a.pbs")],
},
}
};
(id, node)
}
#[test]
fn test_topo_sort_stability() {
let mut graph = ResolvedGraph::default();
let a = mock_node("a", "1.0.0");
let b = mock_node("b", "1.0.0");
let c = mock_node("c", "1.0.0");
graph.nodes.insert(a.id.clone(), a);
graph.nodes.insert(b.id.clone(), b);
graph.nodes.insert(c.id.clone(), c);
// No edges, should be alphabetical: a, b, c
let (a_id, a) = mock_node(ProjectId(0), "a", "1.0.0");
let (b_id, b) = mock_node(ProjectId(1), "b", "1.0.0");
let (c_id, c) = mock_node(ProjectId(2), "c", "1.0.0");
graph.nodes.insert(a_id, a);
graph.nodes.insert(b_id, b);
graph.nodes.insert(c_id, c);
// No edges, order by numeric id: a(0), b(1), c(2)
let plan = BuildPlan::from_graph(&graph, BuildTarget::Main);
assert_eq!(plan.steps[0].project_id.name, "a");
assert_eq!(plan.steps[1].project_id.name, "b");
assert_eq!(plan.steps[2].project_id.name, "c");
assert_eq!(plan.steps[0].project_key.name, "a");
assert_eq!(plan.steps[1].project_key.name, "b");
assert_eq!(plan.steps[2].project_key.name, "c");
}
#[test]
fn test_topo_sort_dependencies() {
let mut graph = ResolvedGraph::default();
let a = mock_node("a", "1.0.0");
let b = mock_node("b", "1.0.0");
let c = mock_node("c", "1.0.0");
graph.nodes.insert(a.id.clone(), a.clone());
graph.nodes.insert(b.id.clone(), b.clone());
graph.nodes.insert(c.id.clone(), c.clone());
let (a_id, a) = mock_node(ProjectId(0), "a", "1.0.0");
let (b_id, b) = mock_node(ProjectId(1), "b", "1.0.0");
let (c_id, c) = mock_node(ProjectId(2), "c", "1.0.0");
graph.nodes.insert(a_id, a.clone());
graph.nodes.insert(b_id, b.clone());
graph.nodes.insert(c_id, c.clone());
// c depends on b, b depends on a
// Sort should be: a, b, c
graph.edges.insert(c.id.clone(), vec![ResolvedEdge { alias: "b_alias".to_string(), to: b.id.clone() }]);
graph.edges.insert(b.id.clone(), vec![ResolvedEdge { alias: "a_alias".to_string(), to: a.id.clone() }]);
graph.edges.insert(c_id, vec![ResolvedEdge { alias: "b_alias".to_string(), to: b_id }]);
graph.edges.insert(b_id, vec![ResolvedEdge { alias: "a_alias".to_string(), to: a_id }]);
let plan = BuildPlan::from_graph(&graph, BuildTarget::Main);
assert_eq!(plan.steps.len(), 3);
assert_eq!(plan.steps[0].project_id.name, "a");
assert_eq!(plan.steps[1].project_id.name, "b");
assert_eq!(plan.steps[2].project_id.name, "c");
assert_eq!(plan.steps[2].deps.get("b_alias").unwrap(), &b.id);
assert_eq!(plan.steps[0].project_key.name, "a");
assert_eq!(plan.steps[1].project_key.name, "b");
assert_eq!(plan.steps[2].project_key.name, "c");
assert_eq!(plan.steps[2].deps.get("b_alias").copied(), Some(b_id));
}
#[test]
fn test_topo_sort_complex() {
let mut graph = ResolvedGraph::default();
// d -> b, c
// b -> a
// c -> a
// a
// Valid sorts: a, b, c, d OR a, c, b, d
// Lexicographic rule says b before c. So a, b, c, d.
let a = mock_node("a", "1.0.0");
let b = mock_node("b", "1.0.0");
let c = mock_node("c", "1.0.0");
let d = mock_node("d", "1.0.0");
graph.nodes.insert(a.id.clone(), a.clone());
graph.nodes.insert(b.id.clone(), b.clone());
graph.nodes.insert(c.id.clone(), c.clone());
graph.nodes.insert(d.id.clone(), d.clone());
graph.edges.insert(d.id.clone(), vec![
ResolvedEdge { alias: "b".to_string(), to: b.id.clone() },
ResolvedEdge { alias: "c".to_string(), to: c.id.clone() },
let (a_id, a) = mock_node(ProjectId(0), "a", "1.0.0");
let (b_id, b) = mock_node(ProjectId(1), "b", "1.0.0");
let (c_id, c) = mock_node(ProjectId(2), "c", "1.0.0");
let (d_id, d) = mock_node(ProjectId(3), "d", "1.0.0");
graph.nodes.insert(a_id, a.clone());
graph.nodes.insert(b_id, b.clone());
graph.nodes.insert(c_id, c.clone());
graph.nodes.insert(d_id, d.clone());
graph.edges.insert(d_id, vec![
ResolvedEdge { alias: "b".to_string(), to: b_id },
ResolvedEdge { alias: "c".to_string(), to: c_id },
]);
graph.edges.insert(b.id.clone(), vec![ResolvedEdge { alias: "a".to_string(), to: a.id.clone() }]);
graph.edges.insert(c.id.clone(), vec![ResolvedEdge { alias: "a".to_string(), to: a.id.clone() }]);
graph.edges.insert(b_id, vec![ResolvedEdge { alias: "a".to_string(), to: a_id }]);
graph.edges.insert(c_id, vec![ResolvedEdge { alias: "a".to_string(), to: a_id }]);
let plan = BuildPlan::from_graph(&graph, BuildTarget::Main);
let names: Vec<_> = plan.steps.iter().map(|s| s.project_id.name.as_str()).collect();
let names: Vec<_> = plan.steps.iter().map(|s| s.project_key.name.as_str()).collect();
assert_eq!(names, vec!["a", "b", "c", "d"]);
}
#[test]
fn test_sources_sorting() {
let mut graph = ResolvedGraph::default();
let a = mock_node("a", "1.0.0");
graph.nodes.insert(a.id.clone(), a);
let (a_id, a) = mock_node(ProjectId(0), "a", "1.0.0");
graph.nodes.insert(a_id, a);
let plan = BuildPlan::from_graph(&graph, BuildTarget::Main);
assert_eq!(plan.steps[0].sources, vec![PathBuf::from("a.pbs"), PathBuf::from("b.pbs")]);
let plan_test = BuildPlan::from_graph(&graph, BuildTarget::Test);
assert_eq!(plan_test.steps[0].sources, vec![PathBuf::from("test_a.pbs"), PathBuf::from("test_b.pbs")]);
}

View File

@ -1,19 +1,19 @@
use crate::analysis::project_registry::ProjectRegistry;
use crate::deps::fetch::{fetch_dependency, FetchError};
use crate::manifest::{load_manifest, Manifest};
use crate::sources::{discover, ProjectSources, SourceError};
use prometeu_analysis::ids::ProjectId;
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
use std::path::{Path, PathBuf};
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct ProjectId {
pub name: String,
pub version: String,
}
// Re-export for external modules/tests that previously imported from here
pub use crate::analysis::project_registry::ProjectKey;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ResolvedNode {
pub id: ProjectId,
pub key: ProjectKey,
pub path: PathBuf,
pub manifest: Manifest,
pub sources: ProjectSources,
@ -32,11 +32,11 @@ pub enum ResolutionStep {
spec: String,
},
Resolved {
project_id: ProjectId,
project_id: ProjectKey,
path: PathBuf,
},
UsingCached {
project_id: ProjectId,
project_id: ProjectKey,
},
Conflict {
name: String,
@ -56,9 +56,11 @@ pub struct ResolutionTrace {
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct ResolvedGraph {
pub nodes: HashMap<ProjectId, ResolvedNode>,
pub edges: HashMap<ProjectId, Vec<ResolvedEdge>>,
pub edges: HashMap<ProjectId, Vec<ResolvedEdge>>,
pub root_id: Option<ProjectId>,
pub trace: ResolutionTrace,
#[serde(skip)]
pub registry: ProjectRegistry,
}
impl ResolvedGraph {
@ -115,7 +117,9 @@ impl ResolvedGraph {
if let Some(root_id) = &self.root_id {
out.push_str("\n--- Resolved Dependency Graph ---\n");
let mut visited = HashSet::new();
out.push_str(&format!("{} v{}\n", root_id.name, root_id.version));
if let Some(meta) = self.registry.meta(*root_id) {
out.push_str(&format!("{} v{}\n", meta.name, meta.version));
}
self.print_node(root_id, 0, &mut out, &mut visited);
}
@ -126,9 +130,11 @@ impl ResolvedGraph {
if let Some(edges) = self.edges.get(id) {
for edge in edges {
let prefix = " ".repeat(indent);
out.push_str(&format!("{}└── {}: {} v{}\n", prefix, edge.alias, edge.to.name, edge.to.version));
if let Some(meta) = self.registry.meta(edge.to) {
out.push_str(&format!("{}└── {}: {} v{}\n", prefix, edge.alias, meta.name, meta.version));
}
if !visited.contains(&edge.to) {
visited.insert(edge.to.clone());
visited.insert(edge.to);
self.print_node(&edge.to, indent + 1, out, visited);
}
}
@ -212,8 +218,8 @@ impl From<SourceError> for ResolveError {
pub fn resolve_graph(root_dir: &Path) -> Result<ResolvedGraph, ResolveError> {
let mut graph = ResolvedGraph::default();
let mut visited = HashSet::new();
let mut stack = Vec::new();
let mut visited: HashSet<ProjectId> = HashSet::new();
let mut stack: Vec<ProjectId> = Vec::new();
let root_path = root_dir.canonicalize().map_err(|e| ResolveError::IoError {
path: root_dir.to_path_buf(),
@ -241,38 +247,37 @@ fn resolve_recursive(
) -> Result<ProjectId, ResolveError> {
let manifest = load_manifest(project_path)?;
let sources = discover(project_path)?;
let project_id = ProjectId {
name: manifest.name.clone(),
version: manifest.version.clone(),
};
let project_key = ProjectKey { name: manifest.name.clone(), version: manifest.version.clone() };
let project_id = graph.registry.intern(&project_key);
// Cycle detection
if let Some(pos) = stack.iter().position(|id| id == &project_id) {
let mut chain: Vec<String> = stack[pos..].iter().map(|id| id.name.clone()).collect();
chain.push(project_id.name.clone());
let mut chain: Vec<String> = stack[pos..]
.iter()
.map(|id| graph.registry.meta(*id).map(|m| m.name.clone()).unwrap_or_else(|| format!("#{}", id.0)))
.collect();
chain.push(project_key.name.clone());
return Err(ResolveError::CycleDetected(chain));
}
// Collision handling: Name collision
// If we find a project with the same name but different path/version, we might have a collision or version conflict.
// Collision handling: Name collision / Version conflict
for node in graph.nodes.values() {
if node.id.name == project_id.name {
if node.id.version != project_id.version {
if node.key.name == project_key.name {
if node.key.version != project_key.version {
graph.trace.steps.push(ResolutionStep::Conflict {
name: project_id.name.clone(),
existing_version: node.id.version.clone(),
new_version: project_id.version.clone(),
name: project_key.name.clone(),
existing_version: node.key.version.clone(),
new_version: project_key.version.clone(),
});
return Err(ResolveError::VersionConflict {
name: project_id.name.clone(),
v1: node.id.version.clone(),
v2: project_id.version.clone(),
name: project_key.name.clone(),
v1: node.key.version.clone(),
v2: project_key.version.clone(),
});
}
// Same name, same version, but different path?
if node.path != project_path {
return Err(ResolveError::NameCollision {
name: project_id.name.clone(),
name: project_key.name.clone(),
p1: node.path.clone(),
p2: project_path.to_path_buf(),
});
@ -282,60 +287,38 @@ fn resolve_recursive(
// If already fully visited, return the ID
if visited.contains(&project_id) {
graph.trace.steps.push(ResolutionStep::UsingCached {
project_id: project_id.clone(),
});
graph.trace.steps.push(ResolutionStep::UsingCached { project_id: project_key.clone() });
return Ok(project_id);
}
graph.trace.steps.push(ResolutionStep::Resolved {
project_id: project_id.clone(),
path: project_path.to_path_buf(),
});
graph.trace.steps.push(ResolutionStep::Resolved { project_id: project_key.clone(), path: project_path.to_path_buf() });
visited.insert(project_id.clone());
stack.push(project_id.clone());
visited.insert(project_id);
stack.push(project_id);
let mut edges = Vec::new();
for (alias, spec) in &manifest.dependencies {
graph.trace.steps.push(ResolutionStep::TryResolve {
alias: alias.clone(),
spec: format!("{:?}", spec),
});
graph.trace.steps.push(ResolutionStep::TryResolve { alias: alias.clone(), spec: format!("{:?}", spec) });
let dep_path = match fetch_dependency(alias, spec, project_path, root_project_dir) {
Ok(p) => p,
Err(e) => {
graph.trace.steps.push(ResolutionStep::Error {
message: format!("Fetch error for '{}': {}", alias, e),
});
graph.trace.steps.push(ResolutionStep::Error { message: format!("Fetch error for '{}': {}", alias, e) });
return Err(e.into());
}
};
let dep_id = match resolve_recursive(&dep_path, root_project_dir, graph, visited, stack) {
Ok(id) => id,
Err(e) => {
// If it's a version conflict, we already pushed it inside the recursive call
// but let's make sure we catch other errors too.
return Err(e);
}
Err(e) => return Err(e),
};
edges.push(ResolvedEdge {
alias: alias.clone(),
to: dep_id,
});
edges.push(ResolvedEdge { alias: alias.clone(), to: dep_id });
}
stack.pop();
graph.nodes.insert(project_id.clone(), ResolvedNode {
id: project_id.clone(),
path: project_path.to_path_buf(),
manifest,
sources,
});
graph.edges.insert(project_id.clone(), edges);
graph.nodes.insert(project_id, ResolvedNode { id: project_id, key: project_key, path: project_path.to_path_buf(), manifest, sources });
graph.edges.insert(project_id, edges);
Ok(project_id)
}
@ -370,12 +353,14 @@ mod tests {
let graph = resolve_graph(&root).unwrap();
assert_eq!(graph.nodes.len(), 2);
let root_id = graph.root_id.as_ref().unwrap();
assert_eq!(root_id.name, "root");
let root_meta = graph.registry.meta(*root_id).unwrap();
assert_eq!(root_meta.name, "root");
let edges = graph.edges.get(root_id).unwrap();
assert_eq!(edges.len(), 1);
assert_eq!(edges[0].alias, "d");
assert_eq!(edges[0].to.name, "dep");
let dep_meta = graph.registry.meta(edges[0].to).unwrap();
assert_eq!(dep_meta.name, "dep");
}
#[test]
@ -438,7 +423,8 @@ mod tests {
let root_id = graph.root_id.as_ref().unwrap();
let edges = graph.edges.get(root_id).unwrap();
assert_eq!(edges[0].alias, "my_alias");
assert_eq!(edges[0].to.name, "actual_name");
let dep_meta = graph.registry.meta(edges[0].to).unwrap();
assert_eq!(dep_meta.name, "actual_name");
assert!(graph.nodes.contains_key(&edges[0].to));
}
@ -593,8 +579,8 @@ mod tests {
if let Ok(graph) = graph {
assert_eq!(graph.nodes.len(), 2);
let rem_id = graph.nodes.values().find(|n| n.id.name == "remote").unwrap().id.clone();
assert_eq!(rem_id.version, "1.2.3");
let rem_node = graph.nodes.values().find(|n| n.key.name == "remote").unwrap();
assert_eq!(rem_node.key.version, "1.2.3");
// Verify cache manifest was created
assert!(root.join("cache/cache.json").exists());

View File

@ -2,8 +2,9 @@ use prometeu_compiler::building::output::CompiledModule;
use prometeu_compiler::building::output::{compile_project, CompileError, ExportKey, ExportMetadata};
use prometeu_compiler::building::plan::{BuildStep, BuildTarget};
use prometeu_compiler::common::files::FileManager;
use prometeu_compiler::deps::resolver::ProjectId;
use prometeu_compiler::deps::resolver::ProjectKey;
use prometeu_compiler::semantics::export_surface::ExportSurfaceKind;
use prometeu_analysis::ids::ProjectId;
use std::collections::{BTreeMap, HashMap};
use std::path::PathBuf;
use tempfile::tempdir;
@ -16,7 +17,8 @@ fn test_local_vs_dependency_conflict() {
let project_dir = dir.path().to_path_buf();
// Dependency: sdk
let dep_id = ProjectId { name: "sdk-impl".to_string(), version: "1.0.0".to_string() };
let dep_key = ProjectKey { name: "sdk-impl".to_string(), version: "1.0.0".to_string() };
let dep_id = ProjectId(0);
let mut dep_exports = BTreeMap::new();
dep_exports.insert(ExportKey {
module_path: "math".to_string(), // normalized path
@ -29,7 +31,8 @@ fn test_local_vs_dependency_conflict() {
});
let dep_module = CompiledModule {
project_id: dep_id.clone(),
project_id: dep_id,
project_key: dep_key.clone(),
target: BuildTarget::Main,
exports: dep_exports,
imports: vec![],
@ -40,20 +43,22 @@ fn test_local_vs_dependency_conflict() {
symbols: vec![],
};
let mut dep_modules = HashMap::new();
dep_modules.insert(dep_id.clone(), dep_module);
let mut dep_modules: HashMap<ProjectId, CompiledModule> = HashMap::new();
dep_modules.insert(dep_id, dep_module);
// Main project has a LOCAL module named "sdk/math"
// By creating a file in src/main/modules/sdk/math/, the module path becomes "sdk/math"
fs::create_dir_all(project_dir.join("src/main/modules/sdk/math")).unwrap();
fs::write(project_dir.join("src/main/modules/sdk/math/local.pbs"), "pub declare struct Vector(x: int)").unwrap();
let main_id = ProjectId { name: "main".to_string(), version: "0.1.0".to_string() };
let mut deps = BTreeMap::new();
deps.insert("sdk".to_string(), dep_id.clone());
let main_key = ProjectKey { name: "main".to_string(), version: "0.1.0".to_string() };
let main_id = ProjectId(1);
let mut deps: BTreeMap<String, ProjectId> = BTreeMap::new();
deps.insert("sdk".to_string(), ProjectId(0));
let step = BuildStep {
project_id: main_id,
project_key: main_key,
project_dir,
target: BuildTarget::Main,
sources: vec![PathBuf::from("src/main/modules/sdk/math/local.pbs")],
@ -77,7 +82,8 @@ fn test_aliased_dependency_conflict() {
let project_dir = dir.path().to_path_buf();
// Dependency 1: exports "b/c:Vector"
let dep1_id = ProjectId { name: "p1".to_string(), version: "1.0.0".to_string() };
let dep1_key = ProjectKey { name: "p1".to_string(), version: "1.0.0".to_string() };
let dep1_id = ProjectId(0);
let mut dep1_exports = BTreeMap::new();
dep1_exports.insert(ExportKey {
module_path: "b/c".to_string(),
@ -89,7 +95,8 @@ fn test_aliased_dependency_conflict() {
ty: None,
});
let dep1_module = CompiledModule {
project_id: dep1_id.clone(),
project_id: dep1_id,
project_key: dep1_key.clone(),
target: BuildTarget::Main,
exports: dep1_exports,
imports: vec![],
@ -101,7 +108,8 @@ fn test_aliased_dependency_conflict() {
};
// Dependency 2: exports "c:Vector"
let dep2_id = ProjectId { name: "p2".to_string(), version: "1.0.0".to_string() };
let dep2_key = ProjectKey { name: "p2".to_string(), version: "1.0.0".to_string() };
let dep2_id = ProjectId(1);
let mut dep2_exports = BTreeMap::new();
dep2_exports.insert(ExportKey {
module_path: "c".to_string(),
@ -113,7 +121,8 @@ fn test_aliased_dependency_conflict() {
ty: None,
});
let dep2_module = CompiledModule {
project_id: dep2_id.clone(),
project_id: dep2_id,
project_key: dep2_key.clone(),
target: BuildTarget::Main,
exports: dep2_exports,
imports: vec![],
@ -124,17 +133,19 @@ fn test_aliased_dependency_conflict() {
symbols: vec![],
};
let mut dep_modules = HashMap::new();
dep_modules.insert(dep1_id.clone(), dep1_module);
dep_modules.insert(dep2_id.clone(), dep2_module);
let mut dep_modules: HashMap<ProjectId, CompiledModule> = HashMap::new();
dep_modules.insert(dep1_id, dep1_module);
dep_modules.insert(dep2_id, dep2_module);
let main_id = ProjectId { name: "main".to_string(), version: "0.1.0".to_string() };
let mut deps = BTreeMap::new();
deps.insert("a".to_string(), dep1_id.clone());
deps.insert("a/b".to_string(), dep2_id.clone());
let main_key = ProjectKey { name: "main".to_string(), version: "0.1.0".to_string() };
let main_id = ProjectId(2);
let mut deps: BTreeMap<String, ProjectId> = BTreeMap::new();
deps.insert("a".to_string(), ProjectId(0));
deps.insert("a/b".to_string(), ProjectId(1));
let step = BuildStep {
project_id: main_id,
project_key: main_key,
project_dir,
target: BuildTarget::Main,
sources: vec![],
@ -163,9 +174,11 @@ fn test_mixed_main_test_modules() {
fs::create_dir_all(project_dir.join("src/test/modules/foo")).unwrap();
fs::write(project_dir.join("src/test/modules/foo/Test.pbs"), "pub declare struct Test(x: int)").unwrap();
let project_id = ProjectId { name: "mixed".to_string(), version: "0.1.0".to_string() };
let project_key = ProjectKey { name: "mixed".to_string(), version: "0.1.0".to_string() };
let project_id = ProjectId(0);
let step = BuildStep {
project_id,
project_key,
project_dir,
target: BuildTarget::Main,
sources: vec![
@ -192,9 +205,11 @@ fn test_module_merging_same_directory() {
fs::write(project_dir.join("src/main/modules/gfx/api.pbs"), "pub declare struct Gfx(id: int)").unwrap();
fs::write(project_dir.join("src/main/modules/gfx/colors.pbs"), "pub declare struct Color(r: int)").unwrap();
let project_id = ProjectId { name: "merge".to_string(), version: "0.1.0".to_string() };
let project_key = ProjectKey { name: "merge".to_string(), version: "0.1.0".to_string() };
let project_id = ProjectId(0);
let step = BuildStep {
project_id,
project_key,
project_dir,
target: BuildTarget::Main,
sources: vec![
@ -221,9 +236,11 @@ fn test_duplicate_symbol_in_same_module_different_files() {
fs::write(project_dir.join("src/main/modules/gfx/a.pbs"), "pub declare struct Gfx(id: int)").unwrap();
fs::write(project_dir.join("src/main/modules/gfx/b.pbs"), "pub declare struct Gfx(id: int)").unwrap();
let project_id = ProjectId { name: "dup".to_string(), version: "0.1.0".to_string() };
let project_key = ProjectKey { name: "dup".to_string(), version: "0.1.0".to_string() };
let project_id = ProjectId(0);
let step = BuildStep {
project_id,
project_key,
project_dir,
target: BuildTarget::Main,
sources: vec![
@ -248,9 +265,11 @@ fn test_root_module_merging() {
fs::write(project_dir.join("src/main/modules/main.pbs"), "pub declare struct Main(id: int)").unwrap();
fs::write(project_dir.join("src/main/modules/utils.pbs"), "pub declare struct Utils(id: int)").unwrap();
let project_id = ProjectId { name: "root-merge".to_string(), version: "0.1.0".to_string() };
let project_key = ProjectKey { name: "root-merge".to_string(), version: "0.1.0".to_string() };
let project_id = ProjectId(0);
let step = BuildStep {
project_id,
project_key,
project_dir,
target: BuildTarget::Main,
sources: vec![

View File

@ -3,10 +3,13 @@ use tokio::sync::RwLock;
use tokio_util::sync::CancellationToken;
use prometeu_analysis::FileDB;
use prometeu_analysis::ids::{FileId, ProjectId};
use std::collections::HashMap;
#[derive(Default)]
pub struct AnalysisDb {
pub file_db: FileDB,
pub file_to_project: HashMap<FileId, ProjectId>,
// Os campos abaixo serão conectados conforme PR-03/04/05 (podem começar como None)
// pub ast: Option<AstArena>,
@ -22,3 +25,9 @@ pub struct AnalysisDb {
}
pub type SharedDb = Arc<RwLock<AnalysisDb>>;
impl AnalysisDb {
pub fn project_for_file(&self, file: FileId) -> Option<ProjectId> {
self.file_to_project.get(&file).copied()
}
}

View File

@ -1,301 +0,0 @@
## PR-R1 — IDs padronizados (newtypes) em um único lugar
**Branch:** `pr-r1-ids-newtypes`
### Briefing
Hoje existem IDs espalhados entre crates (`FileId`, `NameId`, `NodeId`, `SymbolId`, `TypeId`) e alguns campos ainda usam `u32`/`usize` cru (ex.: `Symbol.module: u32`). Para LSP, precisamos de IDs consistentes para indexação, caches, spans e cross-crate APIs.
### Alvo
Centralizar e padronizar os seguintes IDs (newtypes):
* `FileId(u32)`
* `NodeId(u32)`
* `NameId(u32)`
* `SymbolId(u32)`
* `TypeId(u32)`
* `ModuleId(u32)`
* `ProjectId(u32)` *(ver PR-R4 para adoção total; aqui é apenas definição + plumbing mínimo se necessário)*
**Definição única** em `prometeu-analysis` (ou um crate novo `prometeu-ids`, se você preferir isolar):
* Arquivo sugerido: `crates/prometeu-analysis/src/ids.rs`
* Exportar via `pub mod ids; pub use ids::*;`
### Escopo / Mudanças
1. **Criar o módulo de IDs** com:
* `#[repr(transparent)] pub struct FileId(pub u32);` etc.
* `Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Debug`.
* Helpers:
* `impl FileId { pub const INVALID: FileId = FileId(u32::MAX); }` (opcional)
* `impl From<u32> for FileId` e `From<FileId> for u32`.
2. **Padronizar uso cross-crate**:
* `prometeu-compiler/frontends/pbs/ast`: trocar `NodeId` local para `prometeu_analysis::NodeId`.
* `prometeu-compiler/analysis/symbols`: trocar `SymbolId` local para `prometeu_analysis::SymbolId`.
* `prometeu-compiler/analysis/types`: trocar `TypeId` local para `prometeu_analysis::TypeId`.
* Onde houver `usize`/`u32` cru representando file/module/symbol/type/node: substituir.
3. **Trocar `Symbol.module: u32` → `ModuleId`**.
4. **Interner (`NameId`)**:
* Garantir que o interner existente retorna `NameId` do módulo unificado.
* Se existirem `NameId` duplicados em crates diferentes, remover e apontar para o único.
### Regras de compatibilidade (para não quebrar tudo de uma vez)
* Se algum ponto ainda depende de `usize`, oferecer funções auxiliares **temporárias**:
* `fn as_usize(self) -> usize` (somente se realmente necessário)
* Preferir converter na borda (ex.: índices de `Vec`).
### Testes de aceite
* `cargo test -q` no workspace.
* Teste unitário novo em `prometeu-analysis`:
* `ids_are_repr_transparent_and_hashable()` (checa `size_of::<FileId>() == 4` etc.).
* Teste de compilação indireto: build de `prometeu-compiler` sem warnings de tipos duplicados.
### Notas de implementação
* Evitar circular dependency: `prometeu-analysis` deve ser “baixo nível”. Se o compiler já depende dele, ok.
* Se `prometeu-analysis` não puder depender do compiler (não deve), manter IDs neutros e reutilizáveis.
---
## PR-R2 — Span unificado + FileId consistente em todo pipeline
**Branch:** `pr-r2-span-unify`
### Briefing
Hoje existem dois tipos de `Span`:
* `prometeu-analysis::Span` (com `FileId`)
* `prometeu-compiler::common::spans::Span` (com `file_id: usize`)
Para LSP, diagnostics/definition/symbols precisam de um único modelo de span para conversão consistente para `Location/Range`.
A spec aponta spans como **byte offsets**, `end` exclusivo, e file id deve ser estável. (PBS Implementation Spec / Diagnostic specs)
### Alvo
* Tornar `prometeu-analysis::Span` o **span canônico** do projeto.
* Remover/aposentar `prometeu-compiler::common::spans::Span`.
* Garantir que **todo span carregue `FileId`**, e não `usize`.
### Escopo / Mudanças
1. **Definir `Span` canônico** (se já existe, reforçar):
* `pub struct Span { pub file: FileId, pub start: u32, pub end: u32 }`
* `start/end` em bytes (u32), `end` exclusivo.
* Helpers:
* `Span::new(file, start, end)`
* `Span::len()`
* `Span::contains(byte)`
2. **Migrar compiler para usar Span canônico**:
* Parser: todos os nós AST devem carregar spans canônicos.
* Diagnostics: `Diagnostic.span` deve ser canônico.
* Resolver/Symbols: `Symbol.decl_span` deve ser canônico.
* RefIndex: deve usar `Span` canônico.
3. **Matar o `file_id: usize`**:
* Onde havia `usize`, trocar por `FileId`.
* Nas arenas indexadas por `Vec`, converter no ponto de acesso: `file.0 as usize`.
4. **Adapters temporários (se necessário)**
* Se houver muitos pontos que esperam o Span antigo, criar `type OldSpan = Span` por 1 PR (somente dentro do compiler), e remover no fim da PR.
### Testes de aceite
* `cargo test -q` no workspace.
* Teste novo:
* `span_end_is_exclusive()`
* `diagnostic_span_is_valid_for_file()` (valida `end>=start` e `end<=text.len()` em um fixture simples).
### Critérios de “done”
* Não existe mais `prometeu-compiler::common::spans::Span` (ou está `deprecated` e sem uso).
* Qualquer `Span` do pipeline é `prometeu-analysis::Span`.
---
## PR-R3 — TextIndex/LineIndex correto para LSP (UTF-16) + conversões
**Branch:** `pr-r3-text-index-utf16`
### Briefing
O LSP usa `Position.character` em **UTF-16 code units** (não bytes). Hoje o `LineIndex` calcula coluna como *byte offset* na linha. Em arquivos com Unicode (acentos), diagnostics e goto definition ficam desalinhados.
Queremos:
* Manter o core do compilador em **byte offsets** (spec).
* Converter **somente na borda** (LSP e ferramentas).
### Alvo
Criar um índice de texto (por arquivo) que suporte:
* `byte_offset -> (line, utf16_col)`
* `(line, utf16_col) -> byte_offset`
E manter:
* `Span` em bytes.
* O índice baseado no **conteúdo atual** do arquivo.
### Escopo / Mudanças
1. Introduzir `TextIndex` em `prometeu-analysis` (ou `prometeu-lsp` se você quiser limitar ao LSP; mas recomendo em `analysis` pois será útil para debug map e tooling):
* Arquivo sugerido: `crates/prometeu-analysis/src/text_index.rs`
* Estrutura:
* `line_starts: Vec<u32>` (byte offsets)
* `line_utf16_lens: Vec<u32>` (opcional cache)
2. API mínima:
* `TextIndex::new(text: &str) -> Self`
* `fn byte_to_lsp(&self, byte: u32) -> (u32 /*line*/, u32 /*utf16_col*/)`
* `fn lsp_to_byte(&self, line: u32, utf16_col: u32) -> u32`
3. Algoritmo
* `line_starts` calculado por varredura de `\n`.
* Para conversão de col:
* pegar o slice da linha (`&text[line_start..line_end]`)
* iterar `char_indices()`, acumulando:
* `byte_pos` e `utf16_count += ch.len_utf16()`
* parar quando:
* `byte_pos >= target_byte` (byte_to_lsp)
* `utf16_count >= target_utf16` (lsp_to_byte)
4. Testes fortes com Unicode
* Casos: `"aé🙂b"` (emoji e acento).
* Validar round-trip:
* `byte == lsp_to_byte(byte_to_lsp(byte))` para bytes em fronteira de char.
5. Integração
* Por enquanto, **não** mexer no LSP server.
* Apenas oferecer API em `analysis` para o LSP consumir na PR-08.
### Testes de aceite
* `cargo test -q`.
* Testes novos em `prometeu-analysis`:
* `text_index_ascii_roundtrip()`
* `text_index_unicode_roundtrip_utf16()`
---
## PR-R4 — ProjectId padronizado + modelagem de Project/Module estável
**Branch:** `pr-r4-project-id`
### Briefing
Hoje o resolver trabalha com `Project { name, version }` e o `symbols.json` contém projects e símbolos agrupados por projeto. Para LSP e para incremental analysis, queremos IDs estáveis e leves para:
* mapear `uri -> FileId -> (ProjectId, ModuleId)`
* armazenar caches por projeto
* suportar workspace com múltiplos projetos no futuro
Você pediu explicitamente incluir `ProjectId(u32)` nesta série.
### Alvo
Introduzir `ProjectId(u32)` e plugar no modelo de resolução/linking:
* Cada projeto carregado/descoberto no workspace recebe `ProjectId`.
* Mapas centrais usam `ProjectId` como chave em vez de string.
### Escopo / Mudanças
1. Definir `ProjectId(u32)` (já definido na PR-R1) e agora **adotar**.
2. Criar um registry estável (no analysis/resolver layer):
* `ProjectRegistry`:
* `by_name: HashMap<ProjectKey, ProjectId>`
* `projects: Vec<ProjectMeta>`
* `ProjectKey` pode ser:
* `{ name: SmolStr, version: Option<SmolStr> }` ou `{ name, version }`
3. Ajustar estruturas existentes para carregar `ProjectId`
* `ModuleRef` / `ModulePath` / `ResolvedModule` devem apontar para `ProjectId`.
* `symbols.json` writer/reader:
* Manter `project: "sdk"` no JSON (formato externo), mas internamente mapear para `ProjectId`.
4. Integração mínima (sem LSP ainda)
* `AnalysisDb` (ou equivalente) deve conseguir responder:
* `fn project_for_file(file: FileId) -> ProjectId`
### Estratégia para não explodir o diff
* Não reescrever o mundo:
* manter `ProjectMeta { id: ProjectId, name, version }`
* adicionar `id` aos lugares críticos (resolver, module index, symbols export)
### Testes de aceite
* `cargo test -q`.
* Teste novo:
* `project_registry_stable_ids_for_same_key()`
* `symbols_json_roundtrip_preserves_project_grouping()` (se houver infra de roundtrip)
### Critérios de “done”
* Nenhum mapa central chaveado por `String` para identificar projeto no core; usar `ProjectId`.
* Persistência (symbols.json) continua legível e compatível.
---
# Ordem recomendada de merge (para minimizar conflitos)
1. PR-R1 (IDs)
2. PR-R2 (Span)
3. PR-R3 (TextIndex)
4. PR-R4 (ProjectId)
> Depois disso, a PR-08 (LSP MVP) fica bem menor: o LSP só consome `Span` + `TextIndex` + IDs.
---
# Checklist global (pré-PR-08)
* [ ] IDs unificados e usados em todos os crates
* [ ] Span único, sempre com `FileId`, e offsets em bytes
* [ ] TextIndex com conversão UTF-16 confiável (testado)
* [ ] ProjectId adotado no resolver/modelo de projeto
* [ ] Workspace compila e `cargo test` passa