added first deps implementation

This commit is contained in:
bQUARKz 2026-02-17 10:13:26 +00:00
parent ce951d2e10
commit 5ee28101a3
Signed by: bquarkz
SSH Key Fingerprint: SHA256:Z7dgqoglWwoK6j6u4QC87OveEq74WOhFN+gitsxtkf8
129 changed files with 1691 additions and 2176 deletions

2
.gitignore vendored
View File

@ -8,3 +8,5 @@ build
# Ignore Kotlin plugin data # Ignore Kotlin plugin data
.kotlin .kotlin
.DS_Store

View File

@ -17,6 +17,9 @@ dependencies {
testImplementation("org.junit.jupiter:junit-jupiter:5.12.1") testImplementation("org.junit.jupiter:junit-jupiter:5.12.1")
testRuntimeOnly("org.junit.platform:junit-platform-launcher") testRuntimeOnly("org.junit.platform:junit-platform-launcher")
implementation("org.slf4j:slf4j-api:2.0.7")
implementation("org.slf4j:slf4j-simple:2.0.7")
} }
java { java {

View File

@ -1,11 +1,16 @@
[versions] [versions]
javafx = "23.0.2" javafx = "23.0.2"
richtextfx = "0.11.2" richtextfx = "0.11.2"
jackson = "2.18.2"
[libraries] [libraries]
javafx-controls = { group = "org.openjfx", name = "javafx-controls", version.ref = "javafx" } javafx-controls = { group = "org.openjfx", name = "javafx-controls", version.ref = "javafx" }
javafx-fxml = { group = "org.openjfx", name = "javafx-fxml", version.ref = "javafx" } javafx-fxml = { group = "org.openjfx", name = "javafx-fxml", version.ref = "javafx" }
richtextfx = { group = "org.fxmisc.richtext", name = "richtextfx", version.ref = "richtextfx" } richtextfx = { group = "org.fxmisc.richtext", name = "richtextfx", version.ref = "richtextfx" }
jackson-databind = { group = "com.fasterxml.jackson.core", name = "jackson-databind", version.ref = "jackson" }
apache-commons-lang3 = { group = "org.apache.commons", name = "commons-lang3", version = "3.13.0" }
apache-commons-io = { group = "commons-io", name = "commons-io", version = "2.13.0" }
apache-commons-collections = { group = "org.apache.commons", name = "commons-collections4", version = "4.4" }
[plugins] [plugins]
javafx = { id = "org.openjfx.javafxplugin", version = "0.1.0" } javafx = { id = "org.openjfx.javafxplugin", version = "0.1.0" }

View File

@ -1,9 +0,0 @@
[package]
name = "prometeu-language-pbs"
version = "0.1.0"
edition = "2021"
license = "MIT"
description = ""
[dependencies]
prometeu-language-api = { path = "../../prometeu-language-api" }

View File

@ -1,16 +0,0 @@
use std::sync::OnceLock;
use prometeu_language_api::{LanguageSpec, SourcePolicy};
pub static LANGUAGE_SPEC: OnceLock<LanguageSpec> = OnceLock::new();
fn registry() -> &'static LanguageSpec {
LANGUAGE_SPEC.get_or_init(|| {
LanguageSpec {
id: "pbs",
source_policy: SourcePolicy {
extensions: vec!["pbs"],
case_sensitive: true,
},
}
})
}

View File

@ -1,3 +0,0 @@
mod language_spec;
pub use language_spec::LANGUAGE_SPEC;

View File

@ -1,11 +0,0 @@
[package]
name = "prometeu-languages-registry"
version = "0.1.0"
edition = "2021"
license = "MIT"
description = ""
[dependencies]
prometeu-language-api = { path = "../../prometeu-language-api" }
prometeu-language-pbs = { path = "../prometeu-language-pbs" }

View File

@ -1,20 +0,0 @@
use prometeu_language_api::LanguageSpec;
use std::collections::HashMap;
use std::sync::OnceLock;
use prometeu_language_pbs::LANGUAGE_SPEC as PBS_LANGUAGE_SPEC;
static REGISTRY: OnceLock<HashMap<&'static str, LanguageSpec>> = OnceLock::new();
fn registry() -> &'static HashMap<&'static str, LanguageSpec> {
let pbs = PBS_LANGUAGE_SPEC.get().unwrap();
REGISTRY.get_or_init(|| {
HashMap::from([
(pbs.id, pbs.clone()),
])
})
}
pub fn get_language_spec(id: &str) -> Option<&LanguageSpec> {
registry().get(id)
}

View File

@ -1,3 +0,0 @@
mod language_spec_registry;
pub use language_spec_registry::get_language_spec;

View File

@ -1,24 +0,0 @@
[package]
name = "prometeu-build-pipeline"
version = "0.1.0"
edition = "2021"
license.workspace = true
repository.workspace = true
[[bin]]
name = "prometeu-build-pipeline"
path = "src/main.rs"
[package.metadata.dist]
dist = true
include = ["../../VERSION.txt"]
[dependencies]
prometeu-deps = { path = "../prometeu-deps" }
prometeu-core = { path = "../prometeu-core" }
prometeu-languages-registry = { path = "../languages/prometeu-languages-registry" }
clap = { version = "4.5.54", features = ["derive"] }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
anyhow = "1.0.100"
camino = "1.2.2"

View File

@ -1,158 +0,0 @@
use crate::pipeline::run_phases;
use crate::{BuildMode, PipelineConfig, PipelineInput, PipelineOutput};
use anyhow::{Context, Result};
use clap::{Parser, Subcommand};
use prometeu_deps::{load_sources, resolve_workspace, DepsConfig};
use std::path::{Path, PathBuf};
use camino::Utf8Path;
use crate::emit_artifacts::{emit_artifacts, EmitOptions};
/// Command line interface for the Prometeu Compiler.
#[derive(Parser)]
#[command(name = "prometeu")]
#[command(version, about = "PROMETEU toolchain entrypoint", long_about = None)]
pub struct Cli {
#[command(subcommand)]
pub command: Commands,
}
/// Available subcommands for the compiler.
#[derive(Subcommand)]
pub enum Commands {
/// Builds a Prometeu project by compiling source code into an artifact (pbc/program image).
Build {
/// Path to the project root directory.
project_dir: PathBuf,
/// Path to save the compiled artifact.
/// If omitted, deps/pipeline decide a default under target/ or dist/.
#[arg(short, long)]
out: Option<PathBuf>,
/// Whether to generate a .json symbols file for source mapping.
#[arg(long, default_value_t = true)]
emit_symbols: bool,
/// Whether to generate a .disasm file for debugging.
#[arg(long, default_value_t = true)]
emit_disasm: bool,
/// Whether to explain the dependency resolution process.
#[arg(long)]
explain_deps: bool,
/// Build mode (debug/release).
#[arg(long, default_value = "debug")]
mode: String,
},
/// Verifies if a Prometeu project is valid without emitting code.
Verify {
project_dir: PathBuf,
/// Whether to explain the dependency resolution process.
#[arg(long)]
explain_deps: bool,
},
}
pub fn run() -> Result<()> {
let cli = Cli::parse();
match cli.command {
Commands::Build {
project_dir,
out,
emit_disasm,
emit_symbols,
explain_deps,
mode,
} => {
let build_mode = parse_mode(&mode)?;
let cfg = PipelineConfig {
mode: build_mode,
enable_cache: true,
enable_frontends: false,
};
let pipeline_output = run_pipeline(cfg, &project_dir, explain_deps)
.context("pipeline: failed to execute pipeline")?;
for diagnostics in &pipeline_output.diagnostics {
eprintln!("{:?}", diagnostics);
}
let emit_opts = EmitOptions {
out,
emit_symbols,
emit_disasm,
};
emit_artifacts(&emit_opts, &pipeline_output)
.context("emit: failed to write artifacts")?;
if pipeline_output.diagnostics.iter().any(|d| d.severity.is_error()) {
anyhow::bail!("build failed due to errors");
}
}
Commands::Verify {
project_dir,
explain_deps,
} => {
let cfg = PipelineConfig {
mode: BuildMode::Test,
enable_cache: true,
enable_frontends: false,
};
let pipeline_output = run_pipeline(cfg, &project_dir, explain_deps)
.context("pipeline: failed to execute pipeline")?;
for diagnostic in &pipeline_output.diagnostics {
eprintln!("{:?}", diagnostic);
}
if pipeline_output.diagnostics.iter().any(|d| d.severity.is_error()) {
anyhow::bail!("verify failed due to errors");
}
}
}
Ok(())
}
fn run_pipeline(cfg: PipelineConfig, project_dir: &Path, explain_deps: bool) -> Result<PipelineOutput> {
let deps_cfg = DepsConfig {
explain: explain_deps,
cache_dir: Default::default(),
registry_dirs: vec![],
};
let project_dir_path_buf = Utf8Path::from_path(project_dir)
.with_context(|| format!("deps: failed to convert project_dir to Utf8Path: {:?}", project_dir))?;
let resolved = resolve_workspace(&deps_cfg, project_dir_path_buf)
.with_context(|| format!("deps: failed to resolve project at {:?}", project_dir))?;
let sources = load_sources(&deps_cfg, &resolved)
.context("deps: failed to load sources")?;
let input = PipelineInput {
graph: resolved.graph,
stack: resolved.stack,
sources
};
Ok(run_phases(cfg, input))
}
/// Parse `--mode` from CLI.
fn parse_mode(s: &str) -> Result<BuildMode> {
match s.to_ascii_lowercase().as_str() {
"debug" => Ok(BuildMode::Debug),
"release" => Ok(BuildMode::Release),
"test" => Ok(BuildMode::Test),
other => anyhow::bail!("invalid --mode '{}': expected debug|release|test", other),
}
}

View File

@ -1,23 +0,0 @@
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BuildMode {
Debug,
Release,
Test,
}
#[derive(Debug, Clone)]
pub struct PipelineConfig {
pub mode: BuildMode,
pub enable_cache: bool,
pub enable_frontends: bool,
}
impl Default for PipelineConfig {
fn default() -> Self {
Self {
mode: BuildMode::Debug,
enable_cache: true,
enable_frontends: false, // Hard Reset default: pipeline runs with no FE.
}
}
}

View File

@ -1,71 +0,0 @@
use std::any::Any;
use prometeu_core::{Diagnostic, FileDB, FileId, NameInterner, ProjectId};
use prometeu_deps::BuildStack;
/// Per-project arena slot created from the BuildStack order.
/// The pipeline owns this vector and indexes it by stack position.
#[derive(Debug)]
pub struct ProjectCtx {
pub project_id: ProjectId,
/// FileIds inserted into `source_db` for this project.
pub files: Vec<FileId>,
/// Frontend output (TypedHIRBundle or similar) - intentionally opaque.
pub frontend_out: Option<Box<dyn Any>>,
/// Backend output (ProgramImage / BytecodeModule / Artifact).
/// Keep as opaque until you finalize your bytecode/image crate.
pub backend_out: Option<Box<dyn Any>>,
}
impl ProjectCtx {
pub fn new(project_id: ProjectId) -> Self {
Self {
project_id,
files: Vec::new(),
frontend_out: None,
backend_out: None,
}
}
}
#[derive(Debug)]
pub struct PipelineCtx {
pub source_db: FileDB,
pub interner: NameInterner,
pub diagnostics: Vec<Diagnostic>,
pub projects: Vec<ProjectCtx>,
}
impl PipelineCtx {
pub fn new() -> Self {
Self {
source_db: FileDB::new(),
interner: NameInterner::new(),
diagnostics: Vec::new(),
projects: Vec::new(),
}
}
pub fn push_diagnostic(&mut self, d: Diagnostic) {
self.diagnostics.push(d);
}
/// Initialize per-project contexts from the BuildStack order.
pub fn init_projects_from_stack(&mut self, stack: &BuildStack) {
self.projects.clear();
self.projects.reserve(stack.projects.len());
for project_id in &stack.projects {
self.projects.push(ProjectCtx::new(project_id.clone()));
}
}
pub fn project_ctx_mut(&mut self, index_in_stack: usize) -> &mut ProjectCtx {
&mut self.projects[index_in_stack]
}
pub fn project_ctx(&self, index_in_stack: usize) -> &ProjectCtx {
&self.projects[index_in_stack]
}
}

View File

@ -1,17 +0,0 @@
use std::path::PathBuf;
use crate::PipelineOutput;
pub struct EmitOptions {
pub(crate) out: Option<PathBuf>,
pub(crate) emit_symbols: bool,
pub(crate) emit_disasm: bool,
}
pub fn emit_artifacts(_opts: &EmitOptions, _outp: &PipelineOutput) -> anyhow::Result<()> {
// Later:
// - decide output dir (opts.out or default)
// - write .pbc / program image
// - write symbols.json (if exists)
// - write disasm (if exists)
Ok(())
}

View File

@ -1,12 +0,0 @@
pub mod cli;
pub mod config;
pub mod ctx;
pub mod pipeline;
pub mod phases;
mod emit_artifacts;
pub use config::*;
pub use ctx::*;
pub use pipeline::*;
pub use cli::run;

View File

@ -1,7 +0,0 @@
use anyhow::Result;
/// Main entry point for the Prometeu Compiler binary.
/// It delegates execution to the library's `run` function.
fn main() -> Result<()> {
prometeu_build_pipeline::run()
}

View File

@ -1,12 +0,0 @@
use crate::{
config::PipelineConfig,
ctx::PipelineCtx,
pipeline::{PipelineInput},
};
pub fn run(_cfg: &PipelineConfig, input: &PipelineInput, ctx: &mut PipelineCtx) {
// Arena init: one ProjectCtx per project in stack order.
ctx.init_projects_from_stack(&input.stack);
// NOTE: no filesystem, no FE/BE assumptions here.
}

View File

@ -1,7 +0,0 @@
use crate::{config::PipelineConfig, ctx::PipelineCtx, pipeline::{Artifacts, PipelineInput}};
pub fn run(_cfg: &PipelineConfig, _input: &PipelineInput, _ctx: &mut PipelineCtx) -> Artifacts {
// Hard Reset stub:
// - later: emit build outputs (to FS via deps if you want strict IO centralization).
Artifacts::default()
}

View File

@ -1,11 +0,0 @@
use crate::{config::PipelineConfig, ctx::PipelineCtx, pipeline::PipelineInput};
pub fn run(cfg: &PipelineConfig, _input: &PipelineInput, _ctx: &mut PipelineCtx) {
if !cfg.enable_frontends {
return;
}
// Hard Reset:
// - no FE wired yet.
// - later: iterate projects in stack order and call FE plugin(s).
}

View File

@ -1,117 +0,0 @@
use prometeu_core::{Diagnostic, Severity, Span};
use prometeu_deps::LoadedSources;
use crate::{
config::PipelineConfig,
ctx::PipelineCtx,
pipeline::PipelineInput,
};
pub fn run(_cfg: &PipelineConfig, input: &PipelineInput, ctx: &mut PipelineCtx) {
load_sources(&input.sources, ctx);
for i in 0..ctx.projects.len() {
let is_empty = ctx.projects[i].files.is_empty();
if is_empty {
let project_id = &input.stack.projects[i];
let project_name = input.graph.project(project_id).unwrap().name.clone();
ctx.push_diagnostic(Diagnostic {
severity: Severity::Warning,
code: "PIPELINE_NO_SOURCES".into(),
message: format!(
"Project '{}' has no source files loaded.",
project_name
),
span: Span::none(),
related: vec![],
});
}
}
}
fn load_sources(sources: &LoadedSources, ctx: &mut PipelineCtx) {
let stack_len = ctx.projects.len();
let src_len = sources.per_project.len();
// 1) Diagnostic is sizes don't match
if src_len != stack_len {
ctx.push_diagnostic(Diagnostic {
severity: Severity::Error,
code: "PIPELINE_SOURCES_STACK_LEN_MISMATCH".into(),
message: format!(
"LoadedSources.per_project len ({}) does not match BuildStack len ({}).",
src_len, stack_len
),
span: Span::none(),
related: vec![],
});
}
// 2) Process the bare minimum (don't panic, just keep running with diagnostics)
let n = stack_len.min(src_len);
for i in 0..n {
let expected = ctx.projects[i].project_id;
let got = sources.per_project[i].project_id;
if got != expected {
ctx.push_diagnostic(Diagnostic {
severity: Severity::Error,
code: "PIPELINE_SOURCES_STACK_ORDER_MISMATCH".into(),
message: format!(
"LoadedSources is not aligned with BuildStack at index {}: expected project_id {:?}, got {:?}.",
i, expected, got
),
span: Span::none(),
related: vec![],
});
// there is no fix tolerance here, if it is wrong, it is wrong
// just catch as much diagnostics as possible before "crashing"
continue;
}
for f in &sources.per_project[i].files {
let file_id = ctx.source_db.upsert(&f.uri, &f.text);
ctx.projects[i].files.push(file_id);
}
}
// 3) If any LoadSources remains, it is a deps bug
if src_len > stack_len {
for extra in &sources.per_project[stack_len..] {
ctx.push_diagnostic(Diagnostic {
severity: Severity::Error,
code: "PIPELINE_SOURCES_EXTRA_PROJECT".into(),
message: format!(
"LoadedSources contains extra project_id {:?} not present in BuildStack.",
extra.project_id
),
span: Span::none(),
related: vec![],
});
}
}
// 4) If missing inputs, it is another deps bug...
if stack_len > src_len {
let mut diagnostics: Vec<Diagnostic> = Vec::new();
for missing in &ctx.projects[src_len..] {
diagnostics.push(Diagnostic {
severity: Severity::Error,
code: "PIPELINE_SOURCES_MISSING_PROJECT".into(),
message: format!(
"LoadedSources missing sources for project_id {:?} present in BuildStack.",
missing.project_id
),
span: Span::none(),
related: vec![],
});
}
for diagnostic in diagnostics {
ctx.push_diagnostic(diagnostic);
}
}
}

View File

@ -1,6 +0,0 @@
use crate::{config::PipelineConfig, ctx::PipelineCtx, pipeline::PipelineInput};
pub fn run(_cfg: &PipelineConfig, _input: &PipelineInput, _ctx: &mut PipelineCtx) {
// Hard Reset stub:
// - later: consume TypedHIRBundle(s) and lower into ProgramImage/BytecodeModule.
}

View File

@ -1,5 +0,0 @@
pub mod boot;
pub mod load_source;
pub mod language;
pub mod lowering;
pub mod emit;

View File

@ -1,59 +0,0 @@
use crate::{config::PipelineConfig, ctx::PipelineCtx, phases};
use prometeu_core::Diagnostic;
use prometeu_deps::{BuildStack, LoadedSources, ResolvedGraph};
#[derive(Debug, Clone)]
pub struct PipelineInput {
pub graph: ResolvedGraph,
pub stack: BuildStack,
pub sources: LoadedSources
}
#[derive(Debug, Default, Clone)]
pub struct PipelineStats {
pub projects_count: usize,
pub files_count: usize,
}
#[derive(Debug, Default, Clone)]
pub struct Artifacts {
// placeholder: later include produced ProgramImage(s), debug bundles, logs, etc.
}
#[derive(Debug, Default)]
pub struct PipelineOutput {
pub diagnostics: Vec<Diagnostic>,
pub artifacts: Artifacts,
pub stats: PipelineStats,
}
pub(crate) fn run_phases(cfg: PipelineConfig, input: PipelineInput) -> PipelineOutput {
let mut ctx = PipelineCtx::new();
// Boot: create project slots in arena order.
phases::boot::run(&cfg, &input, &mut ctx);
// Load source: populate FileDB from LoadedSources.
phases::load_source::run(&cfg, &input, &mut ctx);
// Frontend phase (stub / optional).
phases::language::run(&cfg, &input, &mut ctx);
// Backend phase (stub).
phases::lowering::run(&cfg, &input, &mut ctx);
// Emit phase (stub).
let artifacts = phases::emit::run(&cfg, &input, &mut ctx);
// Stats (basic).
let mut stats = PipelineStats::default();
stats.projects_count = ctx.projects.len();
stats.files_count = ctx.projects.iter().map(|p| p.files.len()).sum();
PipelineOutput {
diagnostics: ctx.diagnostics,
artifacts,
stats,
}
}

View File

@ -1,10 +0,0 @@
[package]
name = "prometeu-core"
version = "0.1.0"
edition = "2024"
license.workspace = true
[dependencies]
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
prometeu-bytecode = { path = "../prometeu-bytecode" }

View File

@ -1,3 +0,0 @@
mod source;
pub use source::*;

View File

@ -1,81 +0,0 @@
use serde::{Serialize, Serializer};
use crate::Span;
#[derive(Debug, Clone, PartialEq)]
pub enum Severity {
Error,
Warning,
}
impl Severity {
pub fn is_error(&self) -> bool {
match self {
Severity::Error => true,
Severity::Warning => false,
}
}
}
impl Serialize for Severity {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Severity::Error => serializer.serialize_str("error"),
Severity::Warning => serializer.serialize_str("warning"),
}
}
}
#[derive(Debug, Clone, Serialize)]
pub struct Diagnostic {
pub severity: Severity,
pub code: String,
pub message: String,
pub span: Span,
pub related: Vec<(String, Span)>,
}
#[derive(Debug, Clone, Serialize)]
pub struct DiagnosticBundle {
pub diagnostics: Vec<Diagnostic>,
}
impl DiagnosticBundle {
pub fn new() -> Self {
Self {
diagnostics: Vec::new(),
}
}
pub fn push(&mut self, diagnostic: Diagnostic) {
self.diagnostics.push(diagnostic);
}
pub fn error(code: &str, message: String, span: Span) -> Self {
let mut bundle = Self::new();
bundle.push(Diagnostic {
severity: Severity::Error,
code: code.to_string(),
message,
span,
related: Vec::new(),
});
bundle
}
pub fn has_errors(&self) -> bool {
self.diagnostics
.iter()
.any(|d| matches!(d.severity, Severity::Error))
}
}
impl From<Diagnostic> for DiagnosticBundle {
fn from(diagnostic: Diagnostic) -> Self {
let mut bundle = Self::new();
bundle.push(diagnostic);
bundle
}
}

View File

@ -1,69 +0,0 @@
use std::collections::HashMap;
use crate::FileId;
use crate::LineIndex;
#[derive(Default, Debug)]
pub struct FileDB {
files: Vec<FileData>,
uri_to_id: HashMap<String, FileId>,
}
#[derive(Debug)]
struct FileData {
uri: String,
text: String,
line_index: LineIndex,
}
impl FileDB {
pub fn new() -> Self {
Self {
files: Vec::new(),
uri_to_id: HashMap::new(),
}
}
pub fn upsert(&mut self, uri: &str, text: &str) -> FileId {
if let Some(&id) = self.uri_to_id.get(uri) {
let line_index = LineIndex::new(&text);
self.files[id.0 as usize] = FileData {
uri: uri.to_owned(),
text: text.to_owned(),
line_index,
};
id
} else {
let id = FileId(self.files.len() as u32);
let line_index = LineIndex::new(&text);
self.files.push(FileData {
uri: uri.to_owned(),
text: text.to_owned(),
line_index,
});
self.uri_to_id.insert(uri.to_string(), id);
id
}
}
pub fn file_id(&self, uri: &str) -> Option<FileId> {
self.uri_to_id.get(uri).copied()
}
pub fn uri(&self, id: FileId) -> &str {
&self.files[id.0 as usize].uri
}
pub fn text(&self, id: FileId) -> &str {
&self.files[id.0 as usize].text
}
pub fn line_index(&self, id: FileId) -> &LineIndex {
&self.files[id.0 as usize].line_index
}
/// Returns a list of all known file IDs in insertion order.
pub fn all_files(&self) -> Vec<FileId> {
(0..self.files.len()).map(|i| FileId(i as u32)).collect()
}
}

View File

@ -1,60 +0,0 @@
macro_rules! define_id {
($name:ident) => {
#[repr(transparent)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, serde::Serialize, serde::Deserialize)]
pub struct $name(pub u32);
impl $name {
pub const NONE: $name = $name(u32::MAX);
#[inline]
pub const fn as_u32(self) -> u32 { self.0 }
#[inline]
pub fn is_none(self) -> bool {
self == $name::NONE
}
}
impl From<u32> for $name {
#[inline]
fn from(value: u32) -> Self { Self(value) }
}
impl From<$name> for u32 {
#[inline]
fn from(value: $name) -> Self { value.0 }
}
};
}
define_id!(FileId);
define_id!(NodeId);
define_id!(NameId);
define_id!(SymbolId);
define_id!(TypeId);
define_id!(ModuleId);
define_id!(ProjectId);
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
use std::mem::size_of;
#[test]
fn ids_are_repr_transparent_and_hashable() {
assert_eq!(size_of::<FileId>(), 4);
assert_eq!(size_of::<NodeId>(), 4);
assert_eq!(size_of::<NameId>(), 4);
assert_eq!(size_of::<SymbolId>(), 4);
assert_eq!(size_of::<TypeId>(), 4);
assert_eq!(size_of::<ModuleId>(), 4);
assert_eq!(size_of::<ProjectId>(), 4);
// Hash/Eq usage
let mut m: HashMap<SymbolId, &str> = HashMap::new();
m.insert(SymbolId(1), "one");
assert_eq!(m.get(&SymbolId(1)).copied(), Some("one"));
}
}

View File

@ -1,41 +0,0 @@
#[derive(Debug)]
pub struct LineIndex {
line_starts: Vec<u32>,
total_len: u32,
}
impl LineIndex {
pub fn new(text: &str) -> Self {
let mut line_starts = vec![0];
for (offset, c) in text.char_indices() {
if c == '\n' {
line_starts.push((offset + 1) as u32);
}
}
Self {
line_starts,
total_len: text.len() as u32,
}
}
pub fn offset_to_line_col(&self, offset: u32) -> (u32, u32) {
let line = match self.line_starts.binary_search(&offset) {
Ok(line) => line as u32,
Err(line) => (line - 1) as u32,
};
let col = offset - self.line_starts[line as usize];
(line, col)
}
pub fn line_col_to_offset(&self, line: u32, col: u32) -> Option<u32> {
let start = *self.line_starts.get(line as usize)?;
let offset = start + col;
let next_start = self.line_starts.get(line as usize + 1).copied().unwrap_or(self.total_len);
if offset < next_start || (offset == next_start && offset == self.total_len) {
Some(offset)
} else {
None
}
}
}

View File

@ -1,13 +0,0 @@
mod ids;
mod span;
mod file_db;
mod name_interner;
mod diagnostics;
mod line_index;
pub use ids::*;
pub use span::Span;
pub use file_db::FileDB;
pub use line_index::LineIndex;
pub use name_interner::NameInterner;
pub use diagnostics::*;

View File

@ -1,56 +0,0 @@
use std::collections::HashMap;
use crate::NameId;
#[derive(Debug, Default, Clone)]
pub struct NameInterner {
names: Vec<String>,
ids: HashMap<String, NameId>,
}
impl NameInterner {
pub fn new() -> Self {
Self {
names: Vec::new(),
ids: HashMap::new(),
}
}
pub fn intern(&mut self, s: &str) -> NameId {
if let Some(id) = self.ids.get(s) {
return *id;
}
let id = NameId(self.names.len() as u32);
self.names.push(s.to_string());
self.ids.insert(self.names[id.0 as usize].clone(), id);
id
}
pub fn get(&self, s: &str) -> Option<NameId> {
self.ids.get(s).copied()
}
pub fn resolve(&self, id: NameId) -> &str {
&self.names[id.0 as usize]
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn interner_intern_resolve_roundtrip() {
let mut interner = NameInterner::new();
let id = interner.intern("foo");
assert_eq!(interner.resolve(id), "foo");
}
#[test]
fn interner_dedups_strings() {
let mut interner = NameInterner::new();
let id1 = interner.intern("bar");
let id2 = interner.intern("bar");
assert_eq!(id1, id2);
}
}

View File

@ -1,39 +0,0 @@
use crate::FileId;
#[derive(Clone, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct Span {
pub file: FileId,
pub start: u32, // byte offset
pub end: u32, // byte offset, exclusive
}
impl Span {
#[inline]
pub fn new(file: FileId, start: u32, end: u32) -> Self {
Self { file, start, end }
}
#[inline]
pub fn none() -> Self {
Self {
file: FileId::NONE,
start: 0,
end: 0,
}
}
#[inline]
pub fn is_none(&self) -> bool {
self.file.is_none()
}
#[inline]
pub fn len(&self) -> u32 {
self.end.saturating_sub(self.start)
}
#[inline]
pub fn contains(&self, byte: u32) -> bool {
self.start <= byte && byte < self.end
}
}

View File

@ -1,69 +0,0 @@
use prometeu_core::{FileDB, LineIndex};
#[test]
fn test_line_index_roundtrip() {
let text = "line 1\nline 2\nline 3";
let index = LineIndex::new(text);
// Roundtrip for each character
for (offset, _) in text.char_indices() {
let (line, col) = index.offset_to_line_col(offset as u32);
let recovered_offset = index.line_col_to_offset(line, col).expect("Should recover offset");
assert_eq!(offset as u32, recovered_offset, "Offset mismatch at line {}, col {}", line, col);
}
}
#[test]
fn test_line_index_boundaries() {
let text = "a\nbc\n";
let index = LineIndex::new(text);
// "a" -> (0, 0)
assert_eq!(index.offset_to_line_col(0), (0, 0));
assert_eq!(index.line_col_to_offset(0, 0), Some(0));
// "\n" -> (0, 1)
assert_eq!(index.offset_to_line_col(1), (0, 1));
assert_eq!(index.line_col_to_offset(0, 1), Some(1));
// "b" -> (1, 0)
assert_eq!(index.offset_to_line_col(2), (1, 0));
assert_eq!(index.line_col_to_offset(1, 0), Some(2));
// "c" -> (1, 1)
assert_eq!(index.offset_to_line_col(3), (1, 1));
assert_eq!(index.line_col_to_offset(1, 1), Some(3));
// "\n" (second) -> (1, 2)
assert_eq!(index.offset_to_line_col(4), (1, 2));
assert_eq!(index.line_col_to_offset(1, 2), Some(4));
// EOF (after last \n) -> (2, 0)
assert_eq!(index.offset_to_line_col(5), (2, 0));
assert_eq!(index.line_col_to_offset(2, 0), Some(5));
// Out of bounds
assert_eq!(index.line_col_to_offset(2, 1), None);
assert_eq!(index.line_col_to_offset(3, 0), None);
}
#[test]
fn test_file_db_upsert_and_access() {
let mut db = FileDB::new();
let uri = "file:///test.txt";
let text = "hello\nworld".to_string();
let id = db.upsert(uri, text.clone());
assert_eq!(db.file_id(uri), Some(id));
assert_eq!(db.uri(id), uri);
assert_eq!(db.text(id), &text);
let index = db.line_index(id);
assert_eq!(index.offset_to_line_col(6), (1, 0)); // 'w' is at offset 6
// Update existing file
let new_text = "new content".to_string();
let same_id = db.upsert(uri, new_text.clone());
assert_eq!(id, same_id);
assert_eq!(db.text(id), &new_text);
}

View File

@ -1,14 +0,0 @@
use prometeu_core::{FileId, Span};
#[test]
fn span_end_is_exclusive() {
let file = FileId(1);
let s = Span::new(file, 2, 5);
// len = end - start
assert_eq!(s.len(), 3);
// contains is [start, end)
assert!(s.contains(2));
assert!(s.contains(3));
assert!(s.contains(4));
assert!(!s.contains(5));
}

View File

@ -1,19 +0,0 @@
[package]
name = "prometeu-deps"
version = "0.1.0"
edition = "2021"
license = "MIT"
description = ""
[dependencies]
serde = { version = "1.0.228", features = ["derive"] }
prometeu-core = { path = "../prometeu-core" }
prometeu-language-api = { path = "../prometeu-language-api" }
prometeu-languages-registry = { path = "../languages/prometeu-languages-registry" }
anyhow = "1.0.101"
camino = "1.2.2"
walkdir = "2.5.0"
serde_json = "1.0.149"
[features]
default = []

View File

@ -1,19 +0,0 @@
mod model;
mod load_sources;
mod workspace;
pub use workspace::resolve_workspace;
pub use load_sources::load_sources;
pub use model::manifest::*;
pub use model::resolved_project::ResolvedWorkspace;
pub use model::deps_config::DepsConfig;
pub use model::project_descriptor::ProjectDescriptor;
pub use model::build_stack::BuildStack;
pub use model::resolved_graph::ResolvedGraph;
pub use model::loaded_sources::LoadedSources;
pub use model::project_sources::ProjectSources;
pub use model::loaded_file::LoadedFile;
pub use model::cache_blobs::CacheBlobs;
pub use model::cache_plan::CachePlan;

View File

@ -1,97 +0,0 @@
use anyhow::{Context, Result};
use camino::Utf8PathBuf;
use walkdir::WalkDir;
use crate::{
DepsConfig,
LoadedFile,
LoadedSources,
ProjectSources,
ResolvedWorkspace,
};
pub fn load_sources(cfg: &DepsConfig, resolved: &ResolvedWorkspace) -> Result<LoadedSources> {
let mut per_project = Vec::with_capacity(resolved.stack.projects.len());
for project_id in &resolved.stack.projects {
let project = resolved
.graph
.project(project_id)
.with_context(|| format!("deps: unknown project_id {:?} in build stack", project_id))?;
if cfg.explain {
eprintln!(
"[deps] load_sources: project {}@{} ({:?})",
project.name, project.version, project.project_dir
);
}
let mut files: Vec<LoadedFile> = Vec::new();
for root in &project.source_roots {
let abs_root = project.project_dir.join(root);
if cfg.explain {
eprintln!("[deps] scanning {:?}", abs_root);
}
if !abs_root.exists() {
anyhow::bail!(
"deps: source root does not exist for project {}@{}: {:?}",
project.name,
project.version,
abs_root
);
}
// Walk recursively.
for entry in WalkDir::new(&abs_root)
.follow_links(false)
.into_iter()
.filter_map(|e| e.ok())
{
let ft = entry.file_type();
if !ft.is_file() {
continue;
}
let path = entry.path();
// TODO: precisamos mexer no prometeu.json para configurar o frontend do projeto
// Filter extensions: start with PBS only.
if path.extension().and_then(|s| s.to_str()) != Some("pbs") {
continue;
}
// Convert to Utf8Path (the best effort) and use a stable "uri".
let path_utf8: Utf8PathBuf = match Utf8PathBuf::from_path_buf(path.to_path_buf()) {
Ok(p) => p,
Err(_) => {
anyhow::bail!("deps: non-utf8 path found while scanning sources: {:?}", path);
}
};
let text = std::fs::read_to_string(&path_utf8)
.with_context(|| format!("deps: failed to read source file {:?}", path_utf8))?;
// TODO: normalize newlines
files.push(LoadedFile {
uri: path_utf8.to_string(),
text,
});
}
}
// Determinism: sort a file list by uri (important for stable builds).
files.sort_by(|a, b| a.uri.cmp(&b.uri));
per_project.push(ProjectSources {
project_id: project_id.clone(),
files,
});
}
Ok(LoadedSources { per_project })
}

View File

@ -1,6 +0,0 @@
use prometeu_core::ProjectId;
#[derive(Debug, Clone)]
pub struct BuildStack {
pub projects: Vec<ProjectId>,
}

View File

@ -1,7 +0,0 @@
/// Cache blobs computed/validated by deps.
/// The pipeline may decide when to store, but deps executes IO and cache validity.
#[derive(Debug, Clone)]
pub struct CacheBlobs {
// placeholder
pub _unused: (),
}

View File

@ -1,4 +0,0 @@
#[derive(Debug, Clone)]
pub struct CachePlan {
}

View File

@ -1,7 +0,0 @@
use camino::Utf8PathBuf;
pub struct DepsConfig {
pub explain: bool,
pub cache_dir: Utf8PathBuf,
pub registry_dirs: Vec<Utf8PathBuf>, // or sources ?
}

View File

@ -1,5 +0,0 @@
#[derive(Debug, Clone)]
pub struct LoadedFile {
pub uri: String,
pub text: String,
}

View File

@ -1,8 +0,0 @@
use crate::model::project_sources::ProjectSources;
/// Sources already loaded by deps (IO happens in deps, not in pipeline).
#[derive(Debug, Clone)]
pub struct LoadedSources {
/// For each project in the stack, a list of files (uri + text).
pub per_project: Vec<ProjectSources>,
}

View File

@ -1,75 +0,0 @@
use camino::Utf8PathBuf;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Manifest {
pub name: String,
pub version: String,
#[serde(default)]
pub source_roots: Vec<String>,
pub language: LanguageDecl,
#[serde(default)]
pub deps: Vec<DepDecl>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LanguageDecl {
pub id: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DepDecl {
Local {
path: String,
},
Git {
git: String,
rev: Option<String>,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PrometeuLock {
pub schema: u32,
#[serde(default)]
pub mappings: Vec<LockMapping>,
}
impl PrometeuLock {
pub fn blank() -> Self {
Self {
schema: 0,
mappings: vec![],
}
}
pub fn lookup_git_local_dir(&self, url: &str, rev: &str) -> Option<&String> {
self.mappings.iter().find_map(|m| match m {
LockMapping::Git {
git, rev: r, local_dir
} if git == url && r == rev => Some(local_dir),
_ => None,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "kind", rename_all = "lowercase")]
pub enum LockMapping {
Git {
git: String,
rev: String,
local_dir: String,
},
Registry {
registry: String,
version: String,
local_dir: String,
},
}

View File

@ -1,11 +0,0 @@
pub mod deps_config;
pub mod project_descriptor;
pub mod build_stack;
pub mod resolved_graph;
pub mod loaded_sources;
pub mod project_sources;
pub mod loaded_file;
pub mod cache_blobs;
pub mod resolved_project;
pub mod cache_plan;
pub mod manifest;

View File

@ -1,14 +0,0 @@
use camino::Utf8PathBuf;
use prometeu_core::ProjectId;
use prometeu_language_api::SourcePolicy;
#[derive(Debug, Clone)]
pub struct ProjectDescriptor {
pub project_id: ProjectId,
pub name: String,
pub version: String,
pub project_dir: Utf8PathBuf,
pub source_roots: Vec<Utf8PathBuf>,
pub language_id: String,
pub source_policy: SourcePolicy,
}

View File

@ -1,8 +0,0 @@
use prometeu_core::ProjectId;
use crate::model::loaded_file::LoadedFile;
#[derive(Debug, Clone)]
pub struct ProjectSources {
pub project_id: ProjectId,
pub files: Vec<LoadedFile>,
}

View File

@ -1,16 +0,0 @@
use prometeu_core::ProjectId;
use crate::ProjectDescriptor;
#[derive(Debug, Clone)]
pub struct ResolvedGraph {
pub root: ProjectId,
pub projects: Vec<ProjectDescriptor>, // arena
// opcional: adjacency list para checks
pub edges: Vec<Vec<ProjectId>>, // edges[from] = vec[to]
}
impl ResolvedGraph {
pub fn project(&self, id: &ProjectId) -> Option<&ProjectDescriptor> {
self.projects.get(id.0 as usize)
}
}

View File

@ -1,9 +0,0 @@
use prometeu_core::ProjectId;
use crate::{BuildStack, ResolvedGraph};
#[derive(Debug, Clone)]
pub struct ResolvedWorkspace {
pub project_id: ProjectId,
pub graph: ResolvedGraph,
pub stack: BuildStack,
}

View File

@ -1,32 +0,0 @@
use anyhow::{Context, Result};
use camino::{Utf8Path, Utf8PathBuf};
use crate::workspace::model::DepRef;
pub trait DepsHost {
fn read_to_string(&self, path: &Utf8Path) -> Result<String>;
// fn ensure_project_local(&self, from_dir: &Utf8Path, dep: &DepRef) -> Result<Utf8PathBuf>;
}
pub struct FsHost;
impl DepsHost for FsHost {
fn read_to_string(&self, path: &Utf8Path) -> Result<String> {
std::fs::read_to_string(path)
.with_context(|| format!("failed to read {:?}", path))
}
// fn ensure_project_local(&self, from_dir: &Utf8Path, dep: &DepRef) -> Result<Utf8PathBuf> {
// match dep {
// DepRef::Local { path } => {
// let joined = from_dir.join(path);
// let canon = joined.canonicalize()
// .with_context(|| format!("deps: dep path does not exist: {:?}", joined))?;
// Utf8PathBuf::from_path_buf(canon)
// .map_err(|p| anyhow::anyhow!("deps: non-utf8 dep dir: {:?}", p))
// }
// _ => unimplemented!(),
// }
// }
}

View File

@ -1,6 +0,0 @@
mod resolve_workspace;
mod host;
mod model;
mod phases;
pub use resolve_workspace::resolve_workspace;

View File

@ -1,31 +0,0 @@
use camino::Utf8PathBuf;
use prometeu_core::ProjectId;
use prometeu_language_api::SourcePolicy;
use crate::Manifest;
#[derive(Debug, Clone)]
pub struct RawProjectNode {
pub dir: Utf8PathBuf,
pub manifest_path: Utf8PathBuf,
pub manifest: Manifest,
}
#[derive(Debug, Clone)]
pub enum DepRef {
Local {
path: Utf8PathBuf
},
}
#[derive(Debug, Clone)]
pub struct ProjectNode {
pub id: ProjectId,
pub dir: Utf8PathBuf,
pub name: String,
pub version: String,
pub source_roots: Vec<Utf8PathBuf>,
pub language_id: String,
pub deps: Vec<DepRef>,
pub source_policy: SourcePolicy,
}

View File

@ -1,131 +0,0 @@
use crate::model::manifest::DepDecl;
use crate::workspace::host::DepsHost;
use crate::workspace::model::RawProjectNode;
use crate::workspace::phases::state::ResolverState;
use crate::Manifest;
use anyhow::{anyhow, bail, Context, Result};
use camino::Utf8PathBuf;
use serde_json;
use std::fs::canonicalize;
/// Phase 1: Discover all projects in the workspace.
///
/// - Reads `prometeu.json` from each pending project directory.
/// - Parses `Manifest`.
/// - Registers the raw node.
/// - Enqueues local-path deps for discovery (v0).
///
/// Does NOT:
/// - assign ProjectId
/// - build edges
/// - validate versions
pub fn discover(
cfg: &crate::DepsConfig,
host: &dyn DepsHost,
state: &mut ResolverState,
) -> Result<()> {
while let Some(canon_dir) = state.pending.pop_front() {
// de-dup by directory
if state.raw_by_dir.contains_key(&canon_dir) {
continue;
}
let manifest_path = canon_dir.join("prometeu.json");
if !manifest_path.exists() || !manifest_path.is_file() {
bail!(
"deps: manifest not found: expected a file {:?} (project dir {:?})",
manifest_path,
canon_dir
);
}
if cfg.explain {
eprintln!("[deps][discover] reading {:?}", manifest_path);
}
let text = host
.read_to_string(&manifest_path)
.with_context(|| format!("deps: failed to read manifest {:?}", manifest_path))?;
let manifest: Manifest = serde_json::from_str(&text)
.with_context(|| format!("deps: invalid manifest JSON {:?}", manifest_path))?;
// Register raw node
let raw_idx = state.raw.len();
state.raw.push(RawProjectNode {
dir: canon_dir.clone(),
manifest_path: manifest_path.clone(),
manifest: manifest.clone(),
});
state.raw_by_dir.insert(canon_dir.clone(), raw_idx);
for dep in &manifest.deps {
match dep {
DepDecl::Local { path } => {
let dep_dir = canon_dir.join(path);
let dep_dir_std = dep_dir.canonicalize().with_context(|| {
format!(
"deps: dep path does not exist: {:?} (from {:?})",
dep_dir, canon_dir
)
})?;
let dep_dir_canon = Utf8PathBuf::from_path_buf(dep_dir_std)
.map_err(|p| anyhow!("deps: non-utf8 dep dir: {:?}", p))?;
if cfg.explain {
eprintln!("[deps][discover] local dep '{}' -> {:?}", path, dep_dir_canon);
}
state.pending.push_back(dep_dir_canon);
}
DepDecl::Git { git, rev } => {
let Some(rev) = rev.as_deref() else {
bail!(
"deps: git dependency '{}' requires an explicit 'rev' (commit hash) for now",
git
);
};
let Some(local_dir) = state.lock.lookup_git_local_dir(git, rev) else {
bail!(
"deps: git dependency requires prometeu.lock mapping, but entry not found: git='{}' rev='{}'",
git,
rev
);
};
// canonicalize the lock-provided local dir to keep identity stable
let local_dir_std = canonicalize(local_dir)
.with_context(|| format!("deps: prometeu.lock local_dir does not exist: {:?}", local_dir))?;
let local_dir_canon = Utf8PathBuf::from_path_buf(local_dir_std)
.map_err(|p| anyhow!("deps: non-utf8 lock local_dir: {:?}", p))?;
// validate manifest exists at the mapped project root
// (this check should not belong here, but it is ok)
let mapped_manifest = local_dir_canon.join("prometeu.json");
if !mapped_manifest.exists() || !mapped_manifest.is_file() {
bail!(
"deps: prometeu.lock maps git dep to {:?}, but manifest is missing: {:?}",
local_dir_canon,
mapped_manifest
);
}
if cfg.explain {
eprintln!(
"[deps][discover] git dep '{}' rev '{}' -> {:?}",
git, rev, local_dir_canon
);
}
state.pending.push_back(local_dir_canon);
}
}
}
}
Ok(())
}

View File

@ -1,62 +0,0 @@
use anyhow::{Context, Result};
use prometeu_core::ProjectId;
use crate::workspace::model::DepRef;
use crate::workspace::phases::state::ResolverState;
/// Phase 3: Localize dependencies and build graph edges.
///
/// For each project node:
/// - For each DepRef:
/// - host.ensure_project_local(from_dir, dep) -> dep_dir (local on disk)
/// - map dep_dir to ProjectId via st.by_dir
/// - st.edges[from].push(dep_id)
///
/// v0 policy:
/// - Only DepRef::LocalPath is supported.
/// - Git/Registry cause a hard error (future extension point).
pub fn localize(cfg: &crate::DepsConfig, state: &mut ResolverState) -> Result<()> {
// Reset edges (allows re-run / deterministic behavior)
for e in &mut state.edges {
e.clear();
}
for from_idx in 0..state.nodes.len() {
let from_id: ProjectId = state.nodes[from_idx].id;
let from_dir = state.nodes[from_idx].dir.clone();
if cfg.explain {
eprintln!(
"[deps][localize] from id={:?} dir={:?}",
from_id, from_dir
);
}
// Clone deps to avoid borrow conflicts (simple + safe for now)
let deps = state.nodes[from_idx].deps.clone();
for dep in deps {
match &dep {
DepRef::Local {
path
} => {
let dep_id = state.by_dir.get(path).copied().with_context(|| {
format!(
"deps: localized dep dir {:?} was not discovered; \
ensure the dep has a prometeu.json and is reachable via local paths",
path
)
})?;
state.edges[from_id.0 as usize].push(dep_id);
}
}
}
// Optional: keep edges deterministic
state.edges[from_id.0 as usize].sort_by_key(|id| id.0);
state.edges[from_id.0 as usize].dedup();
}
Ok(())
}

View File

@ -1,144 +0,0 @@
use crate::model::manifest::DepDecl;
use crate::workspace::model::{DepRef, ProjectNode};
use crate::workspace::phases::state::ResolverState;
use anyhow::{anyhow, bail, Context, Result};
use camino::Utf8PathBuf;
use prometeu_core::ProjectId;
use prometeu_languages_registry::get_language_spec;
use std::fs::canonicalize;
/// Phase 2: Materialize projects (allocate ProjectId / arena nodes).
///
/// Inputs:
/// - st.raw (RawProjectNode: dir + manifest)
///
/// Outputs:
/// - st.nodes (ProjectNode arena)
/// - st.by_dir (dir -> ProjectId)
/// - st.edges (allocated adjacency lists, empty for now)
/// - st.root (ProjectId for root_dir)
///
/// Does NOT:
/// - resolve deps to local dirs (that's phase localize)
/// - validate version conflicts/cycles
/// - resolve language/source policy
pub fn materialize(cfg: &crate::DepsConfig, state: &mut ResolverState) -> Result<()> {
// Reset materialized state (allows rerun in future refactors/tests)
state.nodes.clear();
state.by_dir.clear();
state.edges.clear();
state.root = None;
state.nodes.reserve(state.raw.len());
state.edges.reserve(state.raw.len());
for (idx, raw) in state.raw.iter().enumerate() {
let id = ProjectId(idx as u32);
// Default source roots if omitted
let source_roots: Vec<Utf8PathBuf> = raw
.manifest
.source_roots
.iter()
.map(|root| Utf8PathBuf::from(root))
.collect();
if source_roots.is_empty() {
bail!(
"deps: no source roots specified for project {}",
raw.manifest.name
)
}
// Convert DepDecl -> DepRef (no localization yet)
let mut deps: Vec<DepRef> = Vec::with_capacity(raw.manifest.deps.len());
for d in &raw.manifest.deps {
match d {
DepDecl::Local { path } => {
let joined = raw.dir.join(path);
let dir_std = joined.canonicalize()
.with_context(|| format!("deps: local dep path does not exist: {:?} (from {:?})", joined, raw.dir))?;
let dir_canon = Utf8PathBuf::from_path_buf(dir_std)
.map_err(|p| anyhow!("deps: non-utf8 dep dir: {:?}", p))?;
deps.push(DepRef::Local {
path: dir_canon
});
}
DepDecl::Git { git, rev } => {
let Some(rev) = rev.as_deref() else {
bail!(
"deps: git dependency '{}' requires an explicit 'rev' (commit hash) for now",
git
);
};
let Some(local_dir) = state.lock.lookup_git_local_dir(git, rev) else {
bail!(
"deps: git dependency requires prometeu.lock mapping, but entry not found: git='{}' rev='{}'",
git,
rev
);
};
// canonicalize the lock-provided local dir to keep identity stable
let path = canonicalize(local_dir).with_context(|| {
format!(
"deps: prometeu.lock local_dir does not exist: {:?}",
local_dir
)
})?;
let local_dir_canon = Utf8PathBuf::from_path_buf(path)
.map_err(|p| anyhow!("deps: non-utf8 lock local_dir: {:?}", p))?;
deps.push(DepRef::Local {
path: local_dir_canon,
});
}
}
}
if cfg.explain {
eprintln!(
"[deps][materialize] id={:?} {}@{} dir={:?} language={}",
id, raw.manifest.name, raw.manifest.version, raw.dir, raw.manifest.language.id
);
}
let source_policy = get_language_spec(raw.manifest.language.id.as_str())
.map(|spec| spec.source_policy.clone())
.ok_or(anyhow!(
"deps: unknown language spec: {}",
raw.manifest.language.id
))?;
// Record node
state.nodes.push(ProjectNode {
id,
dir: raw.dir.clone(),
name: raw.manifest.name.clone(),
version: raw.manifest.version.clone(),
source_roots,
language_id: raw.manifest.language.id.clone(),
deps,
source_policy,
});
state.by_dir.insert(raw.dir.clone(), id);
state.edges.push(Vec::new());
}
// Determine root id
if let Some(root_id) = state.by_dir.get(&state.root_dir).copied() {
state.root = Some(root_id);
} else {
// This should never happen if seed/discover worked.
// Keep it as a hard failure (in a later validate phase you can convert to a nicer diagnostic).
anyhow::bail!(
"deps: root project dir {:?} was not discovered/materialized",
state.root_dir
);
}
Ok(())
}

View File

@ -1,10 +0,0 @@
mod run_all;
mod state;
mod discover;
mod materialize;
mod localize;
mod validate;
mod policy;
mod stack;
pub use run_all::run_all;

View File

@ -1,17 +0,0 @@
use anyhow::{bail, Result};
use crate::workspace::phases::state::ResolverState;
pub fn policy(_cfg: &crate::DepsConfig, state: &mut ResolverState) -> Result<()> {
for node in &state.nodes {
if node.source_policy.extensions.is_empty() {
bail!(
"deps: project {}@{} has empty source_policy.extensions (language={})",
node.name,
node.version,
node.language_id
);
}
}
Ok(())
}

View File

@ -1,50 +0,0 @@
use anyhow::{Context, Result};
use camino::Utf8Path;
use crate::{BuildStack, DepsConfig, ProjectDescriptor, ResolvedGraph, ResolvedWorkspace};
use crate::workspace::host::FsHost;
use crate::workspace::phases::{discover, localize, materialize, policy, stack, state, validate};
pub fn run_all(cfg: &DepsConfig, fs_host: &FsHost, root_dir: &Utf8Path) -> Result<ResolvedWorkspace> {
let mut st = state::seed(cfg, root_dir)?;
discover::discover(cfg, fs_host, &mut st)?;
materialize::materialize(cfg, &mut st)?;
localize::localize(cfg, &mut st)?;
validate::validate(cfg, &st)?;
policy::policy(cfg, &mut st)?;
let build_stack: BuildStack = stack::stack(cfg, &mut st)?;
let root = st
.root
.context("deps: internal error: root ProjectId not set")?;
// Build the arena expected by ResolvedGraph: index == ProjectId.0
// materialize already assigns ProjectId(idx), so st.nodes order is stable.
let mut projects: Vec<ProjectDescriptor> = Vec::with_capacity(st.nodes.len());
for n in &st.nodes {
projects.push(ProjectDescriptor {
project_id: n.id,
name: n.name.clone(),
version: n.version.clone(),
project_dir: n.dir.clone(),
source_roots: n.source_roots.clone(),
language_id: n.language_id.clone(),
source_policy: n.source_policy.clone(),
});
}
let graph = ResolvedGraph {
root,
projects,
edges: st.edges,
};
Ok(ResolvedWorkspace {
project_id: root,
graph,
stack: build_stack,
})
}

View File

@ -1,97 +0,0 @@
use anyhow::{Context, Result};
use prometeu_core::ProjectId;
use std::collections::VecDeque;
use crate::BuildStack;
use crate::workspace::phases::state::ResolverState;
/// Phase: BuildStack (deps-first topo order).
///
/// Output:
/// - state.stack: Vec<ProjectId> where deps appear before dependents.
///
/// Determinism:
/// - ties are resolved by ProjectId order (stable across runs if discovery is stable).
pub fn stack(cfg: &crate::DepsConfig, state: &mut ResolverState) -> Result<BuildStack> {
let n = state.nodes.len();
let _root = state.root.context("deps: internal error: root ProjectId not set")?;
// Build indegree
let mut indeg = vec![0usize; n];
for outs in &state.edges {
for &to in outs {
indeg[to.0 as usize] += 1;
}
}
// Deterministic queue: push in ProjectId order
let mut q = VecDeque::new();
for i in 0..n {
if indeg[i] == 0 {
q.push_back(i);
}
}
let mut order: Vec<ProjectId> = Vec::with_capacity(n);
while let Some(i) = q.pop_front() {
order.push(ProjectId(i as u32));
// Ensure deterministic traversal of outgoing edges too
// (your localize already sort/dedup edges, but this doesn't hurt)
for &to in &state.edges[i] {
let j = to.0 as usize;
indeg[j] -= 1;
if indeg[j] == 0 {
// Deterministic insert: keep queue ordered by ProjectId
// Simple O(n) insertion is fine for now.
insert_sorted_by_id(&mut q, j);
}
}
}
// If validate ran, this should already be cycle-free; still keep a guard.
if order.len() != n {
anyhow::bail!(
"deps: internal error: stack generation did not visit all nodes ({} of {})",
order.len(),
n
);
}
if cfg.explain {
eprintln!("[deps][stack] build order:");
for id in &order {
let node = &state.nodes[id.0 as usize];
eprintln!(" - {:?} {}@{} dir={:?}", id, node.name, node.version, node.dir);
}
}
Ok(BuildStack {
projects: order,
})
}
/// Insert node index `i` into queue `q` keeping it sorted by ProjectId (index).
fn insert_sorted_by_id(q: &mut VecDeque<usize>, i: usize) {
// Common fast path: append if >= last
if let Some(&last) = q.back() {
if i >= last {
q.push_back(i);
return;
}
}
// Otherwise find insertion point
let mut pos = 0usize;
for &v in q.iter() {
if i < v {
break;
}
pos += 1;
}
// VecDeque has no insert, so rebuild (small sizes OK for hard reset)
let mut tmp: Vec<usize> = q.drain(..).collect();
tmp.insert(pos, i);
*q = VecDeque::from(tmp);
}

View File

@ -1,58 +0,0 @@
use camino::{Utf8Path, Utf8PathBuf};
use std::collections::{HashMap, VecDeque};
use anyhow::Context;
use crate::workspace::model::{RawProjectNode, ProjectNode};
use prometeu_core::ProjectId;
use crate::PrometeuLock;
use serde_json;
pub struct ResolverState {
pub root_dir: Utf8PathBuf,
// phase1 output
pub raw: Vec<RawProjectNode>,
pub raw_by_dir: HashMap<Utf8PathBuf, usize>,
pub pending: VecDeque<Utf8PathBuf>,
// phase2+
pub nodes: Vec<ProjectNode>,
pub by_dir: HashMap<Utf8PathBuf, ProjectId>,
pub edges: Vec<Vec<ProjectId>>,
pub root: Option<ProjectId>,
pub lock: PrometeuLock,
}
pub fn seed(_cfg: &crate::DepsConfig, root_dir: &Utf8Path) -> anyhow::Result<ResolverState> {
let path_buf = root_dir.canonicalize()?;
let root_dir_canon = Utf8PathBuf::from_path_buf(path_buf)
.map_err(|p| anyhow::anyhow!("deps: non-utf8 root dir: {:?}", p))?;
let lock_path = root_dir_canon.join("prometeu.lock");
let lock = if lock_path.exists() {
let txt = std::fs::read_to_string(&lock_path)?;
serde_json::from_str::<PrometeuLock>(&txt)
.with_context(|| format!("invalid prometeu.lock at {:?}", lock_path))?
} else {
PrometeuLock::blank()
};
let mut pending = VecDeque::new();
pending.push_back(root_dir_canon.clone());
Ok(ResolverState {
root_dir: root_dir_canon.clone(),
raw: vec![],
raw_by_dir: HashMap::new(),
pending,
nodes: vec![],
by_dir: HashMap::new(),
edges: vec![],
root: None,
lock,
})
}

View File

@ -1,108 +0,0 @@
use anyhow::{bail, Context, Result};
use prometeu_core::ProjectId;
use std::collections::{HashMap, VecDeque};
use crate::workspace::phases::state::ResolverState;
/// Phase: Validate workspace graph & invariants (v0).
///
/// Checks:
/// - root present
/// - edges are in-range
/// - no cycles
/// - no version conflicts for same project name
pub fn validate(cfg: &crate::DepsConfig, state: &ResolverState) -> Result<()> {
// 1) root present
let root = state.root.context("deps: internal error: root ProjectId not set")?;
if cfg.explain {
eprintln!("[deps][validate] root={:?}", root);
}
// 2) edges sanity
let n = state.nodes.len();
for (from_idx, outs) in state.edges.iter().enumerate() {
for &to in outs {
let to_idx = to.0 as usize;
if to_idx >= n {
bail!(
"deps: invalid edge: from {:?} -> {:?} (to out of range; nodes={})",
ProjectId(from_idx as u32),
to,
n
);
}
}
}
// 3) version conflicts by name
// name -> (version -> ProjectId)
let mut by_name: HashMap<&str, HashMap<&str, ProjectId>> = HashMap::new();
for node in &state.nodes {
let vmap = by_name.entry(node.name.as_str()).or_default();
vmap.entry(node.version.as_str()).or_insert(node.id);
}
for (name, versions) in &by_name {
if versions.len() > 1 {
// create deterministic message
let mut vs: Vec<(&str, ProjectId)> = versions.iter().map(|(v, id)| (*v, *id)).collect();
vs.sort_by(|a, b| a.0.cmp(b.0));
let mut msg = format!("deps: version conflict for project '{}':", name);
for (v, id) in vs {
let dir = &state.nodes[id.0 as usize].dir;
msg.push_str(&format!("\n - {} at {:?} (id={:?})", v, dir, id));
}
bail!(msg);
}
}
// 4) cycle detection (Kahn + leftover nodes)
// Build indegree
let mut indeg = vec![0usize; n];
for outs in &state.edges {
for &to in outs {
indeg[to.0 as usize] += 1;
}
}
let mut q = VecDeque::new();
for i in 0..n {
if indeg[i] == 0 {
q.push_back(i);
}
}
let mut visited = 0usize;
while let Some(i) = q.pop_front() {
visited += 1;
for &to in &state.edges[i] {
let j = to.0 as usize;
indeg[j] -= 1;
if indeg[j] == 0 {
q.push_back(j);
}
}
}
if visited != n {
// Nodes with indeg>0 are part of cycles (or downstream of them)
let mut cyclic: Vec<ProjectId> = Vec::new();
for i in 0..n {
if indeg[i] > 0 {
cyclic.push(ProjectId(i as u32));
}
}
// Deterministic error output
cyclic.sort_by_key(|id| id.0);
let mut msg = "deps: dependency cycle detected among:".to_string();
for id in cyclic {
let node = &state.nodes[id.0 as usize];
msg.push_str(&format!("\n - {:?} {}@{} dir={:?}", id, node.name, node.version, node.dir));
}
bail!(msg);
}
Ok(())
}

View File

@ -1,10 +0,0 @@
use anyhow::Result;
use camino::Utf8Path;
use crate::{DepsConfig, ResolvedWorkspace};
use crate::workspace::host::FsHost;
pub fn resolve_workspace(cfg: &DepsConfig, root_dir: &Utf8Path) -> Result<ResolvedWorkspace> {
let host = FsHost;
crate::workspace::phases::run_all(cfg, &host, root_dir)
}

View File

@ -1,10 +0,0 @@
[package]
name = "prometeu-language-api"
version = "0.1.0"
edition = "2021"
license = "MIT"
description = "Canonical language contract for Prometeu Backend: identifiers, references, and strict Frontend trait."
repository = "https://github.com/prometeu/runtime"
[dependencies]

View File

@ -1,21 +0,0 @@
#[derive(Debug, Clone)]
pub struct SourcePolicy {
pub extensions: Vec<&'static str>,
pub case_sensitive: bool,
}
impl SourcePolicy {
pub fn matches_ext(&self, ext: &str) -> bool {
if self.case_sensitive {
self.extensions.iter().any(|e| *e == ext)
} else {
self.extensions.iter().any(|e| e.eq_ignore_ascii_case(ext))
}
}
}
#[derive(Debug, Clone)]
pub struct LanguageSpec {
pub id: &'static str,
pub source_policy: SourcePolicy,
}

View File

@ -1,3 +0,0 @@
mod language_spec;
pub use language_spec::*;

View File

@ -1,19 +0,0 @@
[package]
name = "prometeu-lowering"
version = "0.1.0"
edition = "2021"
license.workspace = true
repository.workspace = true
[dependencies]
prometeu-bytecode = { path = "../prometeu-bytecode" }
prometeu-core = { path = "../prometeu-core" }
prometeu-language-api = { path = "../prometeu-language-api" }
clap = { version = "4.5.54", features = ["derive"] }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
anyhow = "1.0.100"
pathdiff = "0.2.1"
[dev-dependencies]
tempfile = "3.10.1"

View File

@ -0,0 +1,9 @@
plugins {
id("gradle.java-library-conventions")
}
dependencies {
implementation(project(":prometeu-infra"))
implementation(project(":prometeu-compiler:prometeu-compiler-core"))
}

View File

@ -0,0 +1,13 @@
package p.studio.compiler;
import p.studio.compiler.model.FrontendSpec;
import p.studio.utilities.structures.ReadOnlySet;
public class PBSDefinitions {
public static final FrontendSpec PBS = FrontendSpec
.builder()
.languageId("pbs")
.allowedExtensions(ReadOnlySet.from("pbs"))
.sourceRoots(ReadOnlySet.from("src"))
.build();
}

View File

@ -0,0 +1,7 @@
plugins {
id("gradle.java-library-conventions")
}
dependencies {
api(project(":prometeu-infra"))
}

View File

@ -0,0 +1,13 @@
package p.studio.compiler.model;
import lombok.Builder;
import lombok.Getter;
import p.studio.utilities.structures.ReadOnlySet;
@Builder
@Getter
public class FrontendSpec {
private final String languageId;
private final ReadOnlySet<String> allowedExtensions;
private final ReadOnlySet<String> sourceRoots;
}

View File

@ -0,0 +1,34 @@
package p.studio.compiler.source;
import lombok.Getter;
import p.studio.compiler.source.identifiers.FileId;
@Getter
public class Span {
private final FileId fileId;
private final long start;
private final long end; // exclusive
public Span(FileId fileId, long start, long end) {
this.fileId = fileId;
this.start = Math.min(start, end);
this.end = Math.max(start, end);
}
public static Span none() {
return new Span(FileId.none(), 0, 0);
}
public boolean isNone() {
return fileId.isNone();
}
public long length() {
return end - start;
}
public boolean contains(long pos) {
if (pos < 0L) return false;
return start <= pos && end > pos;
}
}

View File

@ -0,0 +1,29 @@
package p.studio.compiler.source.diagnostics;
import lombok.Getter;
import p.studio.compiler.source.Span;
import p.studio.utilities.structures.ReadOnlyList;
import java.util.List;
@Getter
public class Diagnostic {
private final Severity severity;
private final String code;
private final String message;
private final Span span;
private final ReadOnlyList<RelatedSpan> related;
public Diagnostic(
final Severity severity,
final String code,
final String message,
final Span span,
final List<RelatedSpan> related) {
this.severity = severity;
this.code = code;
this.message = message;
this.span = span;
this.related = ReadOnlyList.wrap(related);
}
}

View File

@ -0,0 +1,4 @@
package p.studio.compiler.source.diagnostics;
public class DiagnosticBundle {
}

View File

@ -0,0 +1,15 @@
package p.studio.compiler.source.diagnostics;
import lombok.Getter;
import p.studio.compiler.source.Span;
@Getter
public class RelatedSpan {
private final String message;
private final Span span;
public RelatedSpan(String message, Span span) {
this.message = message;
this.span = span;
}
}

View File

@ -0,0 +1,11 @@
package p.studio.compiler.source.diagnostics;
public enum Severity {
Error,
Warning,
;
public boolean isError() {
return this == Error;
}
}

View File

@ -0,0 +1,24 @@
package p.studio.compiler.source.identifiers;
import lombok.EqualsAndHashCode;
import lombok.Getter;
@Getter
@EqualsAndHashCode(onlyExplicitlyIncluded = true)
abstract class AbstractSourceIdentifier {
@EqualsAndHashCode.Include
private final long id;
AbstractSourceIdentifier(long id) {
this.id = id;
}
public int getIndex() {
return (int) id;
}
@Override
public String toString() {
return String.format("%s(%d)", getClass().getSimpleName(), id);
}
}

View File

@ -0,0 +1,17 @@
package p.studio.compiler.source.identifiers;
public class FileId extends AbstractSourceIdentifier {
public static final FileId NONE = new FileId(-1L);
public FileId(long id) {
super(id);
}
public static FileId none() {
return NONE;
}
public boolean isNone() {
return this == NONE;
}
}

View File

@ -0,0 +1,7 @@
package p.studio.compiler.source.identifiers;
public class ModuleId extends AbstractSourceIdentifier {
public ModuleId(long id) {
super(id);
}
}

View File

@ -0,0 +1,7 @@
package p.studio.compiler.source.identifiers;
public class NameId extends AbstractSourceIdentifier {
public NameId(long id) {
super(id);
}
}

View File

@ -0,0 +1,7 @@
package p.studio.compiler.source.identifiers;
public class NodeId extends AbstractSourceIdentifier {
public NodeId(long id) {
super(id);
}
}

View File

@ -0,0 +1,11 @@
package p.studio.compiler.source.identifiers;
public class ProjectId extends AbstractSourceIdentifier {
public ProjectId(int id) {
super(Integer.toUnsignedLong(id));
}
public ProjectId(long id) {
super(id);
}
}

View File

@ -0,0 +1,7 @@
package p.studio.compiler.source.identifiers;
public class SymbolId extends AbstractSourceIdentifier {
public SymbolId(int id) {
super(id);
}
}

View File

@ -0,0 +1,7 @@
package p.studio.compiler.source.identifiers;
public class TypeId extends AbstractSourceIdentifier {
public TypeId(long id) {
super(id);
}
}

View File

@ -0,0 +1,10 @@
plugins {
id("gradle.java-library-conventions")
}
dependencies {
implementation(project(":prometeu-infra"))
implementation(project(":prometeu-compiler:prometeu-compiler-core"))
implementation(project(":prometeu-compiler:prometeu-frontend-registry"))
implementation(libs.jackson.databind)
}

View File

@ -0,0 +1,11 @@
package p.studio.compiler.exceptions;
public class BuildException extends RuntimeException {
public BuildException(String message) {
super(message);
}
public BuildException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@ -0,0 +1,9 @@
package p.studio.compiler.model;
import p.studio.compiler.source.identifiers.ProjectId;
import p.studio.utilities.structures.ReadOnlyList;
import java.util.List;
public record BuildStack(ReadOnlyList<ProjectId> projects) {
}

View File

@ -0,0 +1,11 @@
package p.studio.compiler.model;
import lombok.Builder;
import lombok.Getter;
@Builder
@Getter
public class BuildingIssue {
private final String message;
private final Throwable exception;
}

View File

@ -0,0 +1,10 @@
package p.studio.compiler.model;
import java.nio.file.Path;
import java.util.List;
public record DependencyPipelineConfig(
boolean explain,
Path cacheDir,
List<Path> registryDirs) {
}

View File

@ -0,0 +1,4 @@
package p.studio.compiler.model;
public record LoadedFile(String uri, String text) {
}

View File

@ -0,0 +1,17 @@
package p.studio.compiler.model;
import java.util.List;
/**
* Sources already loaded by dependencies (IO happens in dependencies, not in pipeline).
*/
public record LoadedSources(
/**
* For each project in the stack, a list of files (uri + text).
*/
List<ProjectSources> perProject
) {
public LoadedSources {
perProject = List.copyOf(perProject);
}
}

View File

@ -0,0 +1,21 @@
package p.studio.compiler.model;
import lombok.Builder;
import lombok.Getter;
import p.studio.compiler.source.identifiers.ProjectId;
import p.studio.utilities.structures.ReadOnlyList;
import java.nio.file.Path;
import java.util.List;
@Builder
@Getter
public class ProjectDescriptor {
private final ProjectId projectId;
private final String name;
private final String version;
private final Path projectDir;
private final ReadOnlyList<Path> sourceRoots;
private final String languageId;
private final SourcePolicy sourcePolicy;
}

View File

@ -0,0 +1,9 @@
package p.studio.compiler.model;
import p.studio.compiler.source.identifiers.ProjectId;
import p.studio.utilities.structures.ReadOnlyList;
public record ProjectSources(
ProjectId projectId,
ReadOnlyList<LoadedFile> files) {
}

View File

@ -0,0 +1,47 @@
package p.studio.compiler.model;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import java.util.List;
import java.util.Optional;
public record PrometeuLock(
long schema,
List<LockMapping> mappings) {
public PrometeuLock {
mappings = mappings != null ? List.copyOf(mappings) : List.of();
}
public static PrometeuLock blank() {
return new PrometeuLock(0, List.of());
}
public Optional<String> lookupGitLocalDir(
final String url,
final String rev) {
return mappings
.stream()
.filter(m -> m instanceof LockMapping.Git g && g.url().equals(url) && g.rev().equals(rev))
.map(m -> ((LockMapping.Git) m).localDir())
.findFirst();
}
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.PROPERTY,
property = "kind"
)
@JsonSubTypes({
@JsonSubTypes.Type(value = LockMapping.Git.class, name = "git")
})
public interface LockMapping {
record Git(
String url,
String rev,
@JsonProperty("local-dir") String localDir
) implements LockMapping {}
}
}

View File

@ -0,0 +1,88 @@
package p.studio.compiler.model;
import com.fasterxml.jackson.annotation.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.Getter;
import p.studio.compiler.FrontendRegistryService;
import p.studio.compiler.exceptions.BuildException;
import p.studio.utilities.structures.ReadOnlyList;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
public record PrometeuManifest(
String name,
String version,
String language,
ReadOnlyList<DependencyDeclaration> dependencies) {
public static PrometeuManifest extract(final Path path, final ObjectMapper mapper) {
try {
final var root = mapper.readTree(path.toFile());
final var name = text(root, "name");
final var version = text(root, "version");
final var language = Optional
.ofNullable(root.get("language"))
.map(JsonNode::asText)
.orElseGet(() -> FrontendRegistryService.getDefaultFrontendSpec().getLanguageId());
final List<PrometeuManifest.DependencyDeclaration> dependencies = new ArrayList<>();
final var dependencyNodes = root.get("dependencies");
if (dependencyNodes != null && dependencyNodes.isArray()) {
for (final var d : dependencyNodes) {
if (d.has("path")) {
dependencies.add(new PrometeuManifest.DependencyDeclaration.Local(d.get("path").asText()));
} else if (d.has("url")) {
final var url = d.get("url").asText();
final var rev = d.has("rev") ? d.get("rev").asText(null) : null;
dependencies.add(new PrometeuManifest.DependencyDeclaration.Git(url, rev));
}
}
}
return new PrometeuManifest(name, version, language, ReadOnlyList.wrap(dependencies));
} catch (IOException e) {
throw new BuildException("dependencies: failed to read or parse prometeu manifest " + path, e);
}
}
private static String text(final JsonNode root, final String field) {
JsonNode n = root.get(field);
return n != null ? n.asText() : null;
}
@JsonTypeInfo(use = JsonTypeInfo.Id.DEDUCTION)
@JsonSubTypes({
@JsonSubTypes.Type(value = DependencyDeclaration.Local.class),
@JsonSubTypes.Type(value = DependencyDeclaration.Git.class)
})
public interface DependencyDeclaration {
@Getter
class Local implements DependencyDeclaration {
private final String path;
@JsonCreator
public Local(@JsonProperty("path") final String path) {
this.path = path;
}
}
@JsonIgnoreProperties(ignoreUnknown = true)
@Getter
class Git implements DependencyDeclaration {
private final String git;
private final String rev;
@JsonCreator
public Git(@JsonProperty("url") final String git, @JsonProperty("rev") final String rev) {
this.git = git;
this.rev = rev;
}
}
}
}

View File

@ -0,0 +1,15 @@
package p.studio.compiler.model;
import p.studio.compiler.source.identifiers.ProjectId;
import p.studio.utilities.structures.ReadOnlyList;
import java.util.List;
public record ResolvedGraph(
ProjectId root,
ReadOnlyList<ProjectDescriptor> projects,
ReadOnlyList<ReadOnlyList<ProjectId>> edges) {
public ProjectDescriptor project(ProjectId id) {
return projects.get(id.getIndex());
}
}

View File

@ -0,0 +1,9 @@
package p.studio.compiler.model;
import p.studio.compiler.source.identifiers.ProjectId;
public record ResolvedWorkspace(
ProjectId projectId,
ResolvedGraph graph,
BuildStack stack) {
}

View File

@ -0,0 +1,6 @@
package p.studio.compiler.model;
import p.studio.utilities.structures.ReadOnlyList;
public record SourcePolicy(ReadOnlyList<String> extensions, boolean caseSensitive) {
}

View File

@ -0,0 +1,71 @@
package p.studio.compiler.workspaces;
import p.studio.compiler.model.*;
import p.studio.compiler.source.identifiers.ProjectId;
import p.studio.compiler.exceptions.BuildException;
import p.studio.utilities.structures.ReadOnlyList;
import java.nio.file.Path;
import java.util.*;
public final class DependencyPipelineContext {
private final DependencyPipelineConfig config;
// Internal state mirroring Rust ResolverState
public Path mainProjectRootPathCanon;
// Phase 1 (Discover)
public final List<ProjectInfo> projectInfos = new ArrayList<>();
public final Map<Path, Long> projectIndexByDirectory = new HashMap<>();
public final Deque<Path> pending = new ArrayDeque<>();
// Phase 2+
public final List<ProjectNode> projectNodes = new ArrayList<>();
public final Map<Path, ProjectId> projectIdByDirectoryRoot = new HashMap<>();
public final List<List<ProjectId>> dependenciesByProject = new ArrayList<>();
public ProjectId root;
public BuildStack stack;
private DependencyPipelineContext(DependencyPipelineConfig config) {
this.config = config;
}
public static DependencyPipelineContext seed(DependencyPipelineConfig config) {
return new DependencyPipelineContext(config);
}
public DependencyPipelineConfig config() {
return config;
}
public ResolvedWorkspace toResolvedWorkspace() {
if (root == null) {
throw new BuildException("dependencies: internal error: root ProjectId not set");
}
final var projectDescriptors = ReadOnlyList.wrap(projectNodes
.stream()
.map(n -> {
final var languageId = n.getLanguageId();
final var sourcePolicy = new SourcePolicy(ReadOnlyList.empty(), true); // TODO: source policy should come from a frontend registry anginst language id
return ProjectDescriptor
.builder()
.projectId(n.getProjectId())
.name(n.getName())
.version(n.getVersion())
.projectDir(n.getProjectRootPath())
.sourceRoots(n.getSourceRoots())
.languageId(languageId)
.sourcePolicy(sourcePolicy)
.build();
})
.toList());
final var edges = ReadOnlyList.wrap(this
.dependenciesByProject
.stream()
.map(ReadOnlyList::wrap)
.toList());
final var graph = new ResolvedGraph(root, projectDescriptors, edges);
return new ResolvedWorkspace(root, graph, stack);
}
}

Some files were not shown because too many files have changed in this diff Show More