init hard reset, pipeline

This commit is contained in:
bQUARKz 2026-02-13 08:52:25 +00:00
parent 463f72a123
commit 2638b0fc88
Signed by: bquarkz
SSH Key Fingerprint: SHA256:Z7dgqoglWwoK6j6u4QC87OveEq74WOhFN+gitsxtkf8
37 changed files with 649 additions and 122 deletions

21
Cargo.lock generated
View File

@ -156,9 +156,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.100"
version = "1.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea"
[[package]]
name = "arrayref"
@ -325,6 +325,12 @@ dependencies = [
"wayland-client",
]
[[package]]
name = "camino"
version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48"
[[package]]
name = "cc"
version = "1.2.52"
@ -1878,13 +1884,10 @@ version = "0.1.0"
dependencies = [
"anyhow",
"clap",
"pathdiff",
"prometeu-bytecode",
"prometeu-core",
"prometeu-language-api",
"prometeu-deps",
"serde",
"serde_json",
"tempfile",
]
[[package]]
@ -1917,6 +1920,8 @@ dependencies = [
name = "prometeu-deps"
version = "0.1.0"
dependencies = [
"anyhow",
"camino",
"prometeu-core",
"serde",
]
@ -2566,9 +2571,9 @@ dependencies = [
[[package]]
name = "toml_parser"
version = "1.0.6+spec-1.1.0"
version = "1.0.8+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44"
checksum = "0742ff5ff03ea7e67c8ae6c93cac239e0d9784833362da3f9a9c1da8dfefcbdc"
dependencies = [
"winnow",
]

View File

@ -14,14 +14,9 @@ dist = true
include = ["../../VERSION.txt"]
[dependencies]
prometeu-bytecode = { path = "../prometeu-bytecode" }
prometeu-deps = { path = "../prometeu-deps" }
prometeu-core = { path = "../prometeu-core" }
prometeu-language-api = { path = "../prometeu-language-api" }
clap = { version = "4.5.54", features = ["derive"] }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
anyhow = "1.0.100"
pathdiff = "0.2.1"
[dev-dependencies]
tempfile = "3.10.1"

View File

@ -1,13 +1,15 @@
use anyhow::Result;
use crate::{BuildMode, PipelineConfig, PipelineInput, PipelineOutput};
use anyhow::{Context, Result};
use clap::{Parser, Subcommand};
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use prometeu_deps::{load_sources, prepare_caches, resolve_project, DepsConfig};
use crate::pipeline::run_phases;
/// Command line interface for the Prometeu Compiler.
#[derive(Parser)]
#[command(name = "prometeu-build-pipeline")]
#[command(version, about = "Official compiler for the PROMETEU Virtual Machine", long_about = None)]
#[command(name = "prometeu")]
#[command(version, about = "PROMETEU toolchain entrypoint", long_about = None)]
pub struct Cli {
/// The action to perform (build or verify).
#[command(subcommand)]
pub command: Commands,
}
@ -15,16 +17,13 @@ pub struct Cli {
/// Available subcommands for the compiler.
#[derive(Subcommand)]
pub enum Commands {
/// Builds a Prometeu project by compiling source code into a PBC file.
/// Builds a Prometeu project by compiling source code into an artifact (pbc/program image).
Build {
/// Path to the project root directory.
project_dir: PathBuf,
/// Explicit path to the entry file (defaults to src/main.pbs).
#[arg(short, long)]
entry: Option<PathBuf>,
/// Path to save the compiled .pbc file.
/// Path to save the compiled artifact.
/// If omitted, deps/pipeline decide a default under target/ or dist/.
#[arg(short, long)]
out: Option<PathBuf>,
@ -32,25 +31,21 @@ pub enum Commands {
#[arg(long, default_value_t = true)]
emit_symbols: bool,
/// Disable symbol generation.
#[arg(long)]
no_symbols: bool,
/// Whether to generate a .disasm file for debugging.
#[arg(long, default_value_t = true)]
emit_disasm: bool,
/// Disable disassembly generation.
#[arg(long)]
no_disasm: bool,
/// Whether to explain the dependency resolution process.
#[arg(long)]
explain_deps: bool,
/// Build mode (debug/release).
#[arg(long, default_value = "debug")]
mode: String,
},
/// Verifies if a Prometeu project is syntactically and semantically valid without emitting code.
/// Verifies if a Prometeu project is valid without emitting code.
Verify {
/// Path to the project root directory.
project_dir: PathBuf,
/// Whether to explain the dependency resolution process.
@ -59,8 +54,6 @@ pub enum Commands {
},
}
/// Main entry point for the compiler library's execution logic.
/// Parses CLI arguments and dispatches to the appropriate compiler functions.
pub fn run() -> Result<()> {
let cli = Cli::parse();
@ -69,20 +62,126 @@ pub fn run() -> Result<()> {
project_dir,
out,
emit_disasm,
no_disasm,
emit_symbols,
no_symbols,
explain_deps,
..
mode,
} => {
let build_mode = parse_mode(&mode)?;
let cfg = PipelineConfig {
mode: build_mode,
enable_cache: true,
enable_frontends: false,
};
let pipeline_output = run_pipeline(cfg, &project_dir, explain_deps)
.context("pipeline: failed to execute pipeline")?;
for diagnostics in &pipeline_output.diagnostics {
eprintln!("{:?}", diagnostics);
}
let emit_opts = EmitOptions {
out,
emit_symbols,
emit_disasm,
};
emit_artifacts(&emit_opts, &pipeline_output)
.context("emit: failed to write artifacts")?;
if pipeline_output.diagnostics.iter().any(|d| d.severity.is_error()) {
anyhow::bail!("build failed due to errors");
}
}
Commands::Verify {
project_dir,
explain_deps
explain_deps,
} => {
let cfg = PipelineConfig {
mode: BuildMode::Test,
enable_cache: true,
enable_frontends: false,
};
let pipeline_output = run_pipeline(cfg, &project_dir, explain_deps)
.context("pipeline: failed to execute pipeline")?;
for diagnostic in &pipeline_output.diagnostics {
eprintln!("{:?}", diagnostic);
}
if pipeline_output.diagnostics.iter().any(|d| d.severity.is_error()) {
anyhow::bail!("verify failed due to errors");
}
}
}
Ok(())
}
}
fn run_pipeline(cfg: PipelineConfig, project_dir: &Path, explain_deps: bool) -> anyhow::Result<PipelineOutput> {
let deps_cfg = DepsConfig {
explain: explain_deps,
cache_dir: Default::default(),
registry_dirs: vec![],
};
let resolved = resolve_project(&deps_cfg, project_dir)
.with_context(|| format!("deps: failed to resolve project at {:?}", project_dir))?;
// resolved deve te dar pelo menos:
// - graph
// - stack (deps-first topo order)
// - (opcional) caches já validados
let caches = if cfg.enable_cache {
Some(
prepare_caches(&deps_cfg, &resolved.cache_plan)
.context("deps: failed to prepare caches")?
)
} else {
None
};
let sources = load_sources(&deps_cfg, &resolved)
.context("deps: failed to load sources")?;
let input = PipelineInput {
graph: resolved.graph,
stack: resolved.stack,
sources,
caches,
};
Ok(run_phases(cfg, input))
}
/// Parse `--mode` from CLI.
fn parse_mode(s: &str) -> Result<BuildMode> {
match s.to_ascii_lowercase().as_str() {
"debug" => Ok(BuildMode::Debug),
"release" => Ok(BuildMode::Release),
"test" => Ok(BuildMode::Test),
other => anyhow::bail!("invalid --mode '{}': expected debug|release|test", other),
}
}
/// Emission options
struct EmitOptions {
out: Option<PathBuf>,
emit_symbols: bool,
emit_disasm: bool,
}
/// Placeholder emit function.
/// In hard reset, this can be a no-op until backend exists.
fn emit_artifacts(_opts: &EmitOptions, _outp: &PipelineOutput) -> Result<()> {
// Later:
// - decide output dir (opts.out or default)
// - write .pbc / program image
// - write symbols.json (if exists)
// - write disasm (if exists)
Ok(())
}

View File

@ -0,0 +1,23 @@
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BuildMode {
Debug,
Release,
Test,
}
#[derive(Debug, Clone)]
pub struct PipelineConfig {
pub mode: BuildMode,
pub enable_cache: bool,
pub enable_frontends: bool,
}
impl Default for PipelineConfig {
fn default() -> Self {
Self {
mode: BuildMode::Debug,
enable_cache: true,
enable_frontends: false, // Hard Reset default: pipeline runs with no FE.
}
}
}

View File

@ -0,0 +1,75 @@
use std::any::Any;
use prometeu_core::{Diagnostic, FileDB, FileId, NameInterner, ProjectId};
use prometeu_deps::BuildStack;
/// Per-project arena slot created from the BuildStack order.
/// The pipeline owns this vector and indexes it by stack position.
#[derive(Debug)]
pub struct ProjectCtx {
pub project_id: ProjectId,
/// FileIds inserted into `source_db` for this project.
pub files: Vec<FileId>,
/// Frontend output (TypedHIRBundle or similar) - intentionally opaque.
pub frontend_out: Option<Box<dyn Any>>,
/// Backend output (ProgramImage / BytecodeModule / Artifact).
/// Keep as opaque until you finalize your bytecode/image crate.
pub backend_out: Option<Box<dyn Any>>,
}
impl ProjectCtx {
pub fn new(project_id: ProjectId) -> Self {
Self {
project_id,
files: Vec::new(),
frontend_out: None,
backend_out: None,
}
}
}
/// Pipeline context (in-memory state).
/// Arena-friendly: uses Vec + IDs as the main storage.
#[derive(Debug)]
pub struct PipelineCtx {
pub source_db: FileDB,
pub interner: NameInterner,
pub diagnostics: Vec<Diagnostic>,
/// Projects in stack order (deps first).
pub projects: Vec<ProjectCtx>,
}
impl PipelineCtx {
pub fn new() -> Self {
Self {
source_db: FileDB::new(),
interner: NameInterner::new(),
diagnostics: Vec::new(),
projects: Vec::new(),
}
}
pub fn push_diagnostic(&mut self, d: Diagnostic) {
self.diagnostics.push(d);
}
/// Initialize per-project contexts from the BuildStack order.
pub fn init_projects_from_stack(&mut self, stack: &BuildStack) {
self.projects.clear();
self.projects.reserve(stack.projects.len());
for p in &stack.projects {
self.projects.push(ProjectCtx::new(p.project_id));
}
}
pub fn project_ctx_mut(&mut self, index_in_stack: usize) -> &mut ProjectCtx {
&mut self.projects[index_in_stack]
}
pub fn project_ctx(&self, index_in_stack: usize) -> &ProjectCtx {
&self.projects[index_in_stack]
}
}

View File

@ -1,3 +1,11 @@
mod cli;
pub mod cli;
pub mod config;
pub mod ctx;
pub mod pipeline;
pub mod phases;
pub use config::*;
pub use ctx::*;
pub use pipeline::*;
pub use cli::run;

View File

@ -0,0 +1,6 @@
use crate::{config::PipelineConfig, ctx::PipelineCtx, pipeline::PipelineInput};
pub fn run(_cfg: &PipelineConfig, _input: &PipelineInput, _ctx: &mut PipelineCtx) {
// Hard Reset stub:
// - later: consume TypedHIRBundle(s) and lower into ProgramImage/BytecodeModule.
}

View File

@ -0,0 +1,12 @@
use crate::{
config::PipelineConfig,
ctx::PipelineCtx,
pipeline::{PipelineInput},
};
pub fn run(_cfg: &PipelineConfig, input: &PipelineInput, ctx: &mut PipelineCtx) {
// Arena init: one ProjectCtx per project in stack order.
ctx.init_projects_from_stack(&input.stack);
// NOTE: no filesystem, no FE/BE assumptions here.
}

View File

@ -0,0 +1,13 @@
use crate::{config::PipelineConfig, ctx::PipelineCtx, pipeline::PipelineInput};
pub fn run(cfg: &PipelineConfig, input: &PipelineInput, _ctx: &mut PipelineCtx) {
if !cfg.enable_cache {
return;
}
// Hard Reset stub:
// - input.caches may carry blobs validated by deps.
// - pipeline decides *when* to hydrate/use them.
// - actual cache IO/validity remains in deps by your rules.
let _ = &input.caches;
}

View File

@ -0,0 +1,7 @@
use crate::{config::PipelineConfig, ctx::PipelineCtx, pipeline::{Artifacts, PipelineInput}};
pub fn run(_cfg: &PipelineConfig, _input: &PipelineInput, _ctx: &mut PipelineCtx) -> Artifacts {
// Hard Reset stub:
// - later: emit build outputs (to FS via deps if you want strict IO centralization).
Artifacts::default()
}

View File

@ -0,0 +1,11 @@
use crate::{config::PipelineConfig, ctx::PipelineCtx, pipeline::PipelineInput};
pub fn run(cfg: &PipelineConfig, _input: &PipelineInput, _ctx: &mut PipelineCtx) {
if !cfg.enable_frontends {
return;
}
// Hard Reset:
// - no FE wired yet.
// - later: iterate projects in stack order and call FE plugin(s).
}

View File

@ -0,0 +1,116 @@
use prometeu_core::{Diagnostic, Severity, Span};
use prometeu_deps::LoadedSources;
use crate::{
config::PipelineConfig,
ctx::PipelineCtx,
pipeline::PipelineInput,
};
pub fn run(_cfg: &PipelineConfig, input: &PipelineInput, ctx: &mut PipelineCtx) {
load_sources(&input.sources, ctx);
for i in 0..ctx.projects.len() {
let is_empty = ctx.projects[i].files.is_empty();
if is_empty {
let proj = &input.stack.projects[i];
ctx.push_diagnostic(Diagnostic {
severity: Severity::Warning,
code: "PIPELINE_NO_SOURCES".into(),
message: format!(
"Project '{}' has no source files loaded.",
proj.name
),
span: Span::none(),
related: vec![],
});
}
}
}
fn load_sources(sources: &LoadedSources, ctx: &mut PipelineCtx) {
let stack_len = ctx.projects.len();
let src_len = sources.per_project.len();
// 1) Diagnostic is sizes don't match
if src_len != stack_len {
ctx.push_diagnostic(Diagnostic {
severity: Severity::Error,
code: "PIPELINE_SOURCES_STACK_LEN_MISMATCH".into(),
message: format!(
"LoadedSources.per_project len ({}) does not match BuildStack len ({}).",
src_len, stack_len
),
span: Span::none(),
related: vec![],
});
}
// 2) Process the bare minimum (don't panic, just keep running with diagnostics)
let n = stack_len.min(src_len);
for i in 0..n {
let expected = ctx.projects[i].project_id;
let got = sources.per_project[i].project_id;
if got != expected {
ctx.push_diagnostic(Diagnostic {
severity: Severity::Error,
code: "PIPELINE_SOURCES_STACK_ORDER_MISMATCH".into(),
message: format!(
"LoadedSources is not aligned with BuildStack at index {}: expected project_id {:?}, got {:?}.",
i, expected, got
),
span: Span::none(),
related: vec![],
});
// there is no fix tolerance here, if it is wrong, it is wrong
// just catch as much diagnostics as possible before "crashing"
continue;
}
for f in &sources.per_project[i].files {
let file_id = ctx.source_db.upsert(&f.uri, &f.text);
ctx.projects[i].files.push(file_id);
}
}
// 3) If any LoadSources remains, it is a deps bug
if src_len > stack_len {
for extra in &sources.per_project[stack_len..] {
ctx.push_diagnostic(Diagnostic {
severity: Severity::Error,
code: "PIPELINE_SOURCES_EXTRA_PROJECT".into(),
message: format!(
"LoadedSources contains extra project_id {:?} not present in BuildStack.",
extra.project_id
),
span: Span::none(),
related: vec![],
});
}
}
// 4) If missing inputs, it is another deps bug...
if stack_len > src_len {
let mut diagnostics: Vec<Diagnostic> = Vec::new();
for missing in &ctx.projects[src_len..] {
diagnostics.push(Diagnostic {
severity: Severity::Error,
code: "PIPELINE_SOURCES_MISSING_PROJECT".into(),
message: format!(
"LoadedSources missing sources for project_id {:?} present in BuildStack.",
missing.project_id
),
span: Span::none(),
related: vec![],
});
}
for diagnostic in diagnostics {
ctx.push_diagnostic(diagnostic);
}
}
}

View File

@ -0,0 +1,6 @@
pub mod boot;
pub mod load_source;
pub mod cache;
pub mod frontend;
pub mod backend;
pub mod emit;

View File

@ -0,0 +1,65 @@
use std::path::Path;
use anyhow::Context;
use prometeu_core::Diagnostic;
use prometeu_deps::{load_sources, prepare_caches, resolve_project, BuildStack, CacheBlobs, DepsConfig, LoadedSources, ResolvedGraph};
use crate::{config::PipelineConfig, ctx::PipelineCtx, phases};
#[derive(Debug, Clone)]
pub struct PipelineInput {
pub graph: ResolvedGraph,
pub stack: BuildStack,
pub sources: LoadedSources,
pub caches: Option<CacheBlobs>,
}
#[derive(Debug, Default, Clone)]
pub struct PipelineStats {
pub projects_count: usize,
pub files_count: usize,
}
#[derive(Debug, Default, Clone)]
pub struct Artifacts {
// placeholder: later include produced ProgramImage(s), debug bundles, logs, etc.
}
#[derive(Debug, Default)]
pub struct PipelineOutput {
pub diagnostics: Vec<Diagnostic>,
pub artifacts: Artifacts,
pub stats: PipelineStats,
}
pub(crate) fn run_phases(cfg: PipelineConfig, input: PipelineInput) -> PipelineOutput {
let mut ctx = PipelineCtx::new();
// Boot: create project slots in arena order.
phases::boot::run(&cfg, &input, &mut ctx);
// Load source: populate FileDB from LoadedSources.
phases::load_source::run(&cfg, &input, &mut ctx);
// Cache hydrate (stub for now).
phases::cache::run(&cfg, &input, &mut ctx);
// Frontend phase (stub / optional).
phases::frontend::run(&cfg, &input, &mut ctx);
// Backend phase (stub).
phases::backend::run(&cfg, &input, &mut ctx);
// Emit phase (stub).
let artifacts = phases::emit::run(&cfg, &input, &mut ctx);
// Stats (basic).
let mut stats = PipelineStats::default();
stats.projects_count = ctx.projects.len();
stats.files_count = ctx.projects.iter().map(|p| p.files.len()).sum();
PipelineOutput {
diagnostics: ctx.diagnostics,
artifacts,
stats,
}
}

View File

@ -7,6 +7,15 @@ pub enum Severity {
Warning,
}
impl Severity {
pub fn is_error(&self) -> bool {
match self {
Severity::Error => true,
Severity::Warning => false,
}
}
}
impl Serialize for Severity {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where

View File

@ -2,12 +2,13 @@ use std::collections::HashMap;
use crate::FileId;
use crate::LineIndex;
#[derive(Default)]
#[derive(Default, Debug)]
pub struct FileDB {
files: Vec<FileData>,
uri_to_id: HashMap<String, FileId>,
}
#[derive(Debug)]
struct FileData {
uri: String,
text: String,
@ -22,12 +23,12 @@ impl FileDB {
}
}
pub fn upsert(&mut self, uri: &str, text: String) -> FileId {
pub fn upsert(&mut self, uri: &str, text: &str) -> FileId {
if let Some(&id) = self.uri_to_id.get(uri) {
let line_index = LineIndex::new(&text);
self.files[id.0 as usize] = FileData {
uri: uri.to_string(),
text,
uri: uri.to_owned(),
text: text.to_owned(),
line_index,
};
id
@ -35,8 +36,8 @@ impl FileDB {
let id = FileId(self.files.len() as u32);
let line_index = LineIndex::new(&text);
self.files.push(FileData {
uri: uri.to_string(),
text,
uri: uri.to_owned(),
text: text.to_owned(),
line_index,
});
self.uri_to_id.insert(uri.to_string(), id);

View File

@ -5,14 +5,15 @@ macro_rules! define_id {
pub struct $name(pub u32);
impl $name {
pub const INVALID: $name = $name(u32::MAX);
pub const NONE: $name = $name(u32::MAX);
#[inline]
pub const fn as_u32(self) -> u32 { self.0 }
// Temporary helper for places that still index Vec/slots by usize
#[inline]
pub const fn as_usize(self) -> usize { self.0 as usize }
pub fn is_none(self) -> bool {
self == $name::NONE
}
}
impl From<u32> for $name {

View File

@ -1,3 +1,4 @@
#[derive(Debug)]
pub struct LineIndex {
line_starts: Vec<u32>,
total_len: u32,

View File

@ -13,6 +13,20 @@ impl Span {
Self { file, start, end }
}
#[inline]
pub fn none() -> Self {
Self {
file: FileId::NONE,
start: 0,
end: 0,
}
}
#[inline]
pub fn is_none(&self) -> bool {
self.file.is_none()
}
#[inline]
pub fn len(&self) -> u32 {
self.end.saturating_sub(self.start)

View File

@ -8,6 +8,8 @@ description = ""
[dependencies]
serde = { version = "1.0.228", features = ["derive"] }
prometeu-core = { path = "../prometeu-core" }
anyhow = "1.0.101"
camino = "1.2.2"
[features]
default = []

View File

@ -1 +1,21 @@
mod project_registry;
mod model;
mod resolve_project;
mod load_sources;
mod prepare_caches;
pub use resolve_project::resolve_project;
pub use prepare_caches::prepare_caches;
pub use load_sources::load_sources;
pub use model::resolved_project::ResolvedProject;
pub use model::resolved_explanation::ResolveExplanation;
pub use model::deps_config::DepsConfig;
pub use model::project_descriptor::ProjectDescriptor;
pub use model::build_stack::BuildStack;
pub use model::resolved_graph::ResolvedGraph;
pub use model::loaded_sources::LoadedSources;
pub use model::project_sources::ProjectSources;
pub use model::loaded_file::LoadedFile;
pub use model::cache_blobs::CacheBlobs;
pub use model::cache_plan::CachePlan;

View File

@ -0,0 +1,5 @@
use crate::{DepsConfig, LoadedSources, ResolvedProject};
pub fn load_sources(p0: &DepsConfig, p1: &ResolvedProject) -> anyhow::Result<LoadedSources> {
todo!()
}

View File

@ -0,0 +1,7 @@
use crate::model::project_descriptor::ProjectDescriptor;
#[derive(Debug, Clone)]
pub struct BuildStack {
/// deps-first order
pub projects: Vec<ProjectDescriptor>,
}

View File

@ -0,0 +1,7 @@
/// Cache blobs computed/validated by deps.
/// The pipeline may decide when to store, but deps executes IO and cache validity.
#[derive(Debug, Clone)]
pub struct CacheBlobs {
// placeholder
pub _unused: (),
}

View File

@ -0,0 +1,4 @@
#[derive(Debug, Clone)]
pub struct CachePlan {
}

View File

@ -0,0 +1,8 @@
use camino::Utf8PathBuf;
pub struct DepsConfig {
pub explain: bool,
// diretórios e política (só deps entende isso)
pub cache_dir: Utf8PathBuf,
pub registry_dirs: Vec<Utf8PathBuf>, // ou sources
}

View File

@ -0,0 +1,5 @@
#[derive(Debug, Clone)]
pub struct LoadedFile {
pub uri: String,
pub text: String,
}

View File

@ -0,0 +1,8 @@
use crate::model::project_sources::ProjectSources;
/// Sources already loaded by deps (IO happens in deps, not in pipeline).
#[derive(Debug, Clone)]
pub struct LoadedSources {
/// For each project in the stack, a list of files (uri + text).
pub per_project: Vec<ProjectSources>,
}

View File

@ -0,0 +1,11 @@
pub mod deps_config;
pub mod project_descriptor;
pub mod build_stack;
pub mod resolved_graph;
pub mod loaded_sources;
pub mod project_sources;
pub mod loaded_file;
pub mod cache_blobs;
pub mod resolved_project;
pub mod resolved_explanation;
pub(crate) mod cache_plan;

View File

@ -0,0 +1,8 @@
use prometeu_core::ProjectId;
#[derive(Debug, Clone)]
pub struct ProjectDescriptor {
pub project_id: ProjectId,
pub name: String,
pub version: String,
}

View File

@ -0,0 +1,8 @@
use prometeu_core::ProjectId;
use crate::model::loaded_file::LoadedFile;
#[derive(Debug, Clone)]
pub struct ProjectSources {
pub project_id: ProjectId,
pub files: Vec<LoadedFile>,
}

View File

@ -0,0 +1,4 @@
#[derive(Debug, Clone)]
pub struct ResolveExplanation {
}

View File

@ -0,0 +1,10 @@
use prometeu_core::ProjectId;
use crate::ProjectDescriptor;
#[derive(Debug, Clone)]
pub struct ResolvedGraph {
pub root: ProjectId,
pub projects: Vec<ProjectDescriptor>, // arena
// opcional: adjacency list para checks
pub edges: Vec<Vec<ProjectId>>, // edges[from] = vec[to]
}

View File

@ -0,0 +1,9 @@
use crate::{BuildStack, ResolvedGraph, ResolveExplanation, CachePlan};
#[derive(Debug, Clone)]
pub struct ResolvedProject {
pub graph: ResolvedGraph,
pub stack: BuildStack,
pub explain: Option<ResolveExplanation>,
pub cache_plan: CachePlan,
}

View File

@ -0,0 +1,5 @@
use crate::{CacheBlobs, CachePlan, DepsConfig};
pub fn prepare_caches(cfg: &DepsConfig, cache_plan: &CachePlan) -> anyhow::Result<CacheBlobs> {
todo!()
}

View File

@ -1,69 +0,0 @@
use std::collections::HashMap;
use prometeu_core::ProjectId;
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub struct ProjectKey {
pub name: String,
pub version: String,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct ProjectMeta {
pub id: ProjectId,
pub name: String,
pub version: String,
}
#[derive(Debug, Default, Clone)]
pub struct ProjectRegistry {
by_name: HashMap<ProjectKey, ProjectId>,
projects: Vec<ProjectMeta>,
}
impl ProjectRegistry {
pub fn new() -> Self { Self::default() }
pub fn intern(&mut self, key: &ProjectKey) -> ProjectId {
if let Some(id) = self.by_name.get(key).copied() { return id; }
let id = ProjectId(self.projects.len() as u32);
self.by_name.insert(key.clone(), id);
self.projects.push(ProjectMeta { id, name: key.name.clone(), version: key.version.clone() });
id
}
pub fn meta(&self, id: ProjectId) -> Option<&ProjectMeta> {
self.projects.get(id.as_usize())
}
pub fn key_of(&self, id: ProjectId) -> Option<ProjectKey> {
self.meta(id).map(|m| ProjectKey { name: m.name.clone(), version: m.version.clone() })
}
pub fn len(&self) -> usize { self.projects.len() }
pub fn is_empty(&self) -> bool { self.projects.is_empty() }
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn project_registry_stable_ids_for_same_key() {
let mut reg = ProjectRegistry::new();
let k = ProjectKey { name: "sdk".into(), version: "1.0.0".into() };
let id1 = reg.intern(&k);
let id2 = reg.intern(&k);
assert_eq!(id1, id2);
// Different version -> different id
let k2 = ProjectKey { name: "sdk".into(), version: "1.1.0".into() };
let id3 = reg.intern(&k2);
assert_ne!(id1, id3);
// Meta lookup
let m1 = reg.meta(id1).unwrap();
assert_eq!(m1.name, "sdk");
assert_eq!(m1.version, "1.0.0");
}
}

View File

@ -0,0 +1,8 @@
use crate::{DepsConfig, ResolvedProject};
use camino::Utf8Path;
use std::path::Path;
pub fn resolve_project(cfg: &DepsConfig, path: &Path) -> anyhow::Result<ResolvedProject> {
let project_dir = Utf8Path::from_path(path).unwrap();
todo!()
}