first commit

This commit is contained in:
bQUARKz 2026-02-16 07:51:59 +00:00
commit ce951d2e10
Signed by: bquarkz
SSH Key Fingerprint: SHA256:Z7dgqoglWwoK6j6u4QC87OveEq74WOhFN+gitsxtkf8
101 changed files with 3146 additions and 0 deletions

12
.gitattributes vendored Normal file
View File

@ -0,0 +1,12 @@
#
# https://help.github.com/articles/dealing-with-line-endings/
#
# Linux start script should use lf
/gradlew text eol=lf
# These are Windows script files and should use crlf
*.bat text eol=crlf
# Binary files should be left untouched
*.jar binary

10
.gitignore vendored Normal file
View File

@ -0,0 +1,10 @@
# Ignore Gradle project-specific cache directory
.gradle
.idea
.output.txt
# Ignore Gradle build output directory
build
# Ignore Kotlin plugin data
.kotlin

1
README.md Normal file
View File

@ -0,0 +1 @@
# Prometeu Studio

View File

@ -0,0 +1,8 @@
plugins {
`kotlin-dsl`
}
repositories {
gradlePluginPortal()
mavenCentral()
}

View File

@ -0,0 +1,9 @@
dependencyResolutionManagement {
versionCatalogs {
create("libs") {
from(files("../gradle/libs.versions.toml"))
}
}
}
rootProject.name = "buildSrc"

View File

@ -0,0 +1,4 @@
plugins {
id("gradle.java-common-conventions")
application
}

View File

@ -0,0 +1,30 @@
plugins {
java
}
repositories {
mavenCentral()
}
dependencies {
constraints {
}
compileOnly("org.projectlombok:lombok:1.18.32")
annotationProcessor("org.projectlombok:lombok:1.18.32")
testCompileOnly("org.projectlombok:lombok:1.18.32")
testAnnotationProcessor("org.projectlombok:lombok:1.18.32")
testImplementation("org.junit.jupiter:junit-jupiter:5.12.1")
testRuntimeOnly("org.junit.platform:junit-platform-launcher")
}
java {
toolchain {
languageVersion = JavaLanguageVersion.of(21)
}
}
tasks.named<Test>("test") {
useJUnitPlatform()
}

View File

@ -0,0 +1,8 @@
plugins {
id("gradle.java-library-conventions")
}
dependencies {
implementation("com.google.dagger:dagger:2.50")
annotationProcessor("com.google.dagger:dagger-compiler:2.50")
}

View File

@ -0,0 +1,4 @@
plugins {
id("gradle.java-common-conventions")
`java-library`
}

5
gradle.properties Normal file
View File

@ -0,0 +1,5 @@
# This file was generated by the Gradle 'init' task.
# https://docs.gradle.org/current/userguide/build_environment.html#sec:gradle_configuration_properties
org.gradle.configuration-cache=false

11
gradle/libs.versions.toml Normal file
View File

@ -0,0 +1,11 @@
[versions]
javafx = "23.0.2"
richtextfx = "0.11.2"
[libraries]
javafx-controls = { group = "org.openjfx", name = "javafx-controls", version.ref = "javafx" }
javafx-fxml = { group = "org.openjfx", name = "javafx-fxml", version.ref = "javafx" }
richtextfx = { group = "org.fxmisc.richtext", name = "richtextfx", version.ref = "richtextfx" }
[plugins]
javafx = { id = "org.openjfx.javafxplugin", version = "0.1.0" }

BIN
gradle/wrapper/gradle-wrapper.jar vendored Normal file

Binary file not shown.

View File

@ -0,0 +1,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-9.3.1-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

248
gradlew vendored Executable file
View File

@ -0,0 +1,248 @@
#!/bin/sh
#
# Copyright © 2015 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
if ! command -v java >/dev/null 2>&1
then
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Collect all arguments for the java command:
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
# and any embedded shellness will be escaped.
# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
# treated as '${Hostname}' itself on the command line.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

93
gradlew.bat vendored Normal file
View File

@ -0,0 +1,93 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@rem SPDX-License-Identifier: Apache-2.0
@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:execute
@rem Setup the command line
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %*
:end
@rem End local scope for the variables with windows NT shell
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1,9 @@
[package]
name = "prometeu-language-pbs"
version = "0.1.0"
edition = "2021"
license = "MIT"
description = ""
[dependencies]
prometeu-language-api = { path = "../../prometeu-language-api" }

View File

@ -0,0 +1,16 @@
use std::sync::OnceLock;
use prometeu_language_api::{LanguageSpec, SourcePolicy};
pub static LANGUAGE_SPEC: OnceLock<LanguageSpec> = OnceLock::new();
fn registry() -> &'static LanguageSpec {
LANGUAGE_SPEC.get_or_init(|| {
LanguageSpec {
id: "pbs",
source_policy: SourcePolicy {
extensions: vec!["pbs"],
case_sensitive: true,
},
}
})
}

View File

@ -0,0 +1,3 @@
mod language_spec;
pub use language_spec::LANGUAGE_SPEC;

View File

@ -0,0 +1,11 @@
[package]
name = "prometeu-languages-registry"
version = "0.1.0"
edition = "2021"
license = "MIT"
description = ""
[dependencies]
prometeu-language-api = { path = "../../prometeu-language-api" }
prometeu-language-pbs = { path = "../prometeu-language-pbs" }

View File

@ -0,0 +1,20 @@
use prometeu_language_api::LanguageSpec;
use std::collections::HashMap;
use std::sync::OnceLock;
use prometeu_language_pbs::LANGUAGE_SPEC as PBS_LANGUAGE_SPEC;
static REGISTRY: OnceLock<HashMap<&'static str, LanguageSpec>> = OnceLock::new();
fn registry() -> &'static HashMap<&'static str, LanguageSpec> {
let pbs = PBS_LANGUAGE_SPEC.get().unwrap();
REGISTRY.get_or_init(|| {
HashMap::from([
(pbs.id, pbs.clone()),
])
})
}
pub fn get_language_spec(id: &str) -> Option<&LanguageSpec> {
registry().get(id)
}

View File

@ -0,0 +1,3 @@
mod language_spec_registry;
pub use language_spec_registry::get_language_spec;

View File

@ -0,0 +1,24 @@
[package]
name = "prometeu-build-pipeline"
version = "0.1.0"
edition = "2021"
license.workspace = true
repository.workspace = true
[[bin]]
name = "prometeu-build-pipeline"
path = "src/main.rs"
[package.metadata.dist]
dist = true
include = ["../../VERSION.txt"]
[dependencies]
prometeu-deps = { path = "../prometeu-deps" }
prometeu-core = { path = "../prometeu-core" }
prometeu-languages-registry = { path = "../languages/prometeu-languages-registry" }
clap = { version = "4.5.54", features = ["derive"] }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
anyhow = "1.0.100"
camino = "1.2.2"

View File

@ -0,0 +1,158 @@
use crate::pipeline::run_phases;
use crate::{BuildMode, PipelineConfig, PipelineInput, PipelineOutput};
use anyhow::{Context, Result};
use clap::{Parser, Subcommand};
use prometeu_deps::{load_sources, resolve_workspace, DepsConfig};
use std::path::{Path, PathBuf};
use camino::Utf8Path;
use crate::emit_artifacts::{emit_artifacts, EmitOptions};
/// Command line interface for the Prometeu Compiler.
#[derive(Parser)]
#[command(name = "prometeu")]
#[command(version, about = "PROMETEU toolchain entrypoint", long_about = None)]
pub struct Cli {
#[command(subcommand)]
pub command: Commands,
}
/// Available subcommands for the compiler.
#[derive(Subcommand)]
pub enum Commands {
/// Builds a Prometeu project by compiling source code into an artifact (pbc/program image).
Build {
/// Path to the project root directory.
project_dir: PathBuf,
/// Path to save the compiled artifact.
/// If omitted, deps/pipeline decide a default under target/ or dist/.
#[arg(short, long)]
out: Option<PathBuf>,
/// Whether to generate a .json symbols file for source mapping.
#[arg(long, default_value_t = true)]
emit_symbols: bool,
/// Whether to generate a .disasm file for debugging.
#[arg(long, default_value_t = true)]
emit_disasm: bool,
/// Whether to explain the dependency resolution process.
#[arg(long)]
explain_deps: bool,
/// Build mode (debug/release).
#[arg(long, default_value = "debug")]
mode: String,
},
/// Verifies if a Prometeu project is valid without emitting code.
Verify {
project_dir: PathBuf,
/// Whether to explain the dependency resolution process.
#[arg(long)]
explain_deps: bool,
},
}
pub fn run() -> Result<()> {
let cli = Cli::parse();
match cli.command {
Commands::Build {
project_dir,
out,
emit_disasm,
emit_symbols,
explain_deps,
mode,
} => {
let build_mode = parse_mode(&mode)?;
let cfg = PipelineConfig {
mode: build_mode,
enable_cache: true,
enable_frontends: false,
};
let pipeline_output = run_pipeline(cfg, &project_dir, explain_deps)
.context("pipeline: failed to execute pipeline")?;
for diagnostics in &pipeline_output.diagnostics {
eprintln!("{:?}", diagnostics);
}
let emit_opts = EmitOptions {
out,
emit_symbols,
emit_disasm,
};
emit_artifacts(&emit_opts, &pipeline_output)
.context("emit: failed to write artifacts")?;
if pipeline_output.diagnostics.iter().any(|d| d.severity.is_error()) {
anyhow::bail!("build failed due to errors");
}
}
Commands::Verify {
project_dir,
explain_deps,
} => {
let cfg = PipelineConfig {
mode: BuildMode::Test,
enable_cache: true,
enable_frontends: false,
};
let pipeline_output = run_pipeline(cfg, &project_dir, explain_deps)
.context("pipeline: failed to execute pipeline")?;
for diagnostic in &pipeline_output.diagnostics {
eprintln!("{:?}", diagnostic);
}
if pipeline_output.diagnostics.iter().any(|d| d.severity.is_error()) {
anyhow::bail!("verify failed due to errors");
}
}
}
Ok(())
}
fn run_pipeline(cfg: PipelineConfig, project_dir: &Path, explain_deps: bool) -> Result<PipelineOutput> {
let deps_cfg = DepsConfig {
explain: explain_deps,
cache_dir: Default::default(),
registry_dirs: vec![],
};
let project_dir_path_buf = Utf8Path::from_path(project_dir)
.with_context(|| format!("deps: failed to convert project_dir to Utf8Path: {:?}", project_dir))?;
let resolved = resolve_workspace(&deps_cfg, project_dir_path_buf)
.with_context(|| format!("deps: failed to resolve project at {:?}", project_dir))?;
let sources = load_sources(&deps_cfg, &resolved)
.context("deps: failed to load sources")?;
let input = PipelineInput {
graph: resolved.graph,
stack: resolved.stack,
sources
};
Ok(run_phases(cfg, input))
}
/// Parse `--mode` from CLI.
fn parse_mode(s: &str) -> Result<BuildMode> {
match s.to_ascii_lowercase().as_str() {
"debug" => Ok(BuildMode::Debug),
"release" => Ok(BuildMode::Release),
"test" => Ok(BuildMode::Test),
other => anyhow::bail!("invalid --mode '{}': expected debug|release|test", other),
}
}

View File

@ -0,0 +1,23 @@
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BuildMode {
Debug,
Release,
Test,
}
#[derive(Debug, Clone)]
pub struct PipelineConfig {
pub mode: BuildMode,
pub enable_cache: bool,
pub enable_frontends: bool,
}
impl Default for PipelineConfig {
fn default() -> Self {
Self {
mode: BuildMode::Debug,
enable_cache: true,
enable_frontends: false, // Hard Reset default: pipeline runs with no FE.
}
}
}

View File

@ -0,0 +1,71 @@
use std::any::Any;
use prometeu_core::{Diagnostic, FileDB, FileId, NameInterner, ProjectId};
use prometeu_deps::BuildStack;
/// Per-project arena slot created from the BuildStack order.
/// The pipeline owns this vector and indexes it by stack position.
#[derive(Debug)]
pub struct ProjectCtx {
pub project_id: ProjectId,
/// FileIds inserted into `source_db` for this project.
pub files: Vec<FileId>,
/// Frontend output (TypedHIRBundle or similar) - intentionally opaque.
pub frontend_out: Option<Box<dyn Any>>,
/// Backend output (ProgramImage / BytecodeModule / Artifact).
/// Keep as opaque until you finalize your bytecode/image crate.
pub backend_out: Option<Box<dyn Any>>,
}
impl ProjectCtx {
pub fn new(project_id: ProjectId) -> Self {
Self {
project_id,
files: Vec::new(),
frontend_out: None,
backend_out: None,
}
}
}
#[derive(Debug)]
pub struct PipelineCtx {
pub source_db: FileDB,
pub interner: NameInterner,
pub diagnostics: Vec<Diagnostic>,
pub projects: Vec<ProjectCtx>,
}
impl PipelineCtx {
pub fn new() -> Self {
Self {
source_db: FileDB::new(),
interner: NameInterner::new(),
diagnostics: Vec::new(),
projects: Vec::new(),
}
}
pub fn push_diagnostic(&mut self, d: Diagnostic) {
self.diagnostics.push(d);
}
/// Initialize per-project contexts from the BuildStack order.
pub fn init_projects_from_stack(&mut self, stack: &BuildStack) {
self.projects.clear();
self.projects.reserve(stack.projects.len());
for project_id in &stack.projects {
self.projects.push(ProjectCtx::new(project_id.clone()));
}
}
pub fn project_ctx_mut(&mut self, index_in_stack: usize) -> &mut ProjectCtx {
&mut self.projects[index_in_stack]
}
pub fn project_ctx(&self, index_in_stack: usize) -> &ProjectCtx {
&self.projects[index_in_stack]
}
}

View File

@ -0,0 +1,17 @@
use std::path::PathBuf;
use crate::PipelineOutput;
pub struct EmitOptions {
pub(crate) out: Option<PathBuf>,
pub(crate) emit_symbols: bool,
pub(crate) emit_disasm: bool,
}
pub fn emit_artifacts(_opts: &EmitOptions, _outp: &PipelineOutput) -> anyhow::Result<()> {
// Later:
// - decide output dir (opts.out or default)
// - write .pbc / program image
// - write symbols.json (if exists)
// - write disasm (if exists)
Ok(())
}

View File

@ -0,0 +1,12 @@
pub mod cli;
pub mod config;
pub mod ctx;
pub mod pipeline;
pub mod phases;
mod emit_artifacts;
pub use config::*;
pub use ctx::*;
pub use pipeline::*;
pub use cli::run;

View File

@ -0,0 +1,7 @@
use anyhow::Result;
/// Main entry point for the Prometeu Compiler binary.
/// It delegates execution to the library's `run` function.
fn main() -> Result<()> {
prometeu_build_pipeline::run()
}

View File

@ -0,0 +1,12 @@
use crate::{
config::PipelineConfig,
ctx::PipelineCtx,
pipeline::{PipelineInput},
};
pub fn run(_cfg: &PipelineConfig, input: &PipelineInput, ctx: &mut PipelineCtx) {
// Arena init: one ProjectCtx per project in stack order.
ctx.init_projects_from_stack(&input.stack);
// NOTE: no filesystem, no FE/BE assumptions here.
}

View File

@ -0,0 +1,7 @@
use crate::{config::PipelineConfig, ctx::PipelineCtx, pipeline::{Artifacts, PipelineInput}};
pub fn run(_cfg: &PipelineConfig, _input: &PipelineInput, _ctx: &mut PipelineCtx) -> Artifacts {
// Hard Reset stub:
// - later: emit build outputs (to FS via deps if you want strict IO centralization).
Artifacts::default()
}

View File

@ -0,0 +1,11 @@
use crate::{config::PipelineConfig, ctx::PipelineCtx, pipeline::PipelineInput};
pub fn run(cfg: &PipelineConfig, _input: &PipelineInput, _ctx: &mut PipelineCtx) {
if !cfg.enable_frontends {
return;
}
// Hard Reset:
// - no FE wired yet.
// - later: iterate projects in stack order and call FE plugin(s).
}

View File

@ -0,0 +1,117 @@
use prometeu_core::{Diagnostic, Severity, Span};
use prometeu_deps::LoadedSources;
use crate::{
config::PipelineConfig,
ctx::PipelineCtx,
pipeline::PipelineInput,
};
pub fn run(_cfg: &PipelineConfig, input: &PipelineInput, ctx: &mut PipelineCtx) {
load_sources(&input.sources, ctx);
for i in 0..ctx.projects.len() {
let is_empty = ctx.projects[i].files.is_empty();
if is_empty {
let project_id = &input.stack.projects[i];
let project_name = input.graph.project(project_id).unwrap().name.clone();
ctx.push_diagnostic(Diagnostic {
severity: Severity::Warning,
code: "PIPELINE_NO_SOURCES".into(),
message: format!(
"Project '{}' has no source files loaded.",
project_name
),
span: Span::none(),
related: vec![],
});
}
}
}
fn load_sources(sources: &LoadedSources, ctx: &mut PipelineCtx) {
let stack_len = ctx.projects.len();
let src_len = sources.per_project.len();
// 1) Diagnostic is sizes don't match
if src_len != stack_len {
ctx.push_diagnostic(Diagnostic {
severity: Severity::Error,
code: "PIPELINE_SOURCES_STACK_LEN_MISMATCH".into(),
message: format!(
"LoadedSources.per_project len ({}) does not match BuildStack len ({}).",
src_len, stack_len
),
span: Span::none(),
related: vec![],
});
}
// 2) Process the bare minimum (don't panic, just keep running with diagnostics)
let n = stack_len.min(src_len);
for i in 0..n {
let expected = ctx.projects[i].project_id;
let got = sources.per_project[i].project_id;
if got != expected {
ctx.push_diagnostic(Diagnostic {
severity: Severity::Error,
code: "PIPELINE_SOURCES_STACK_ORDER_MISMATCH".into(),
message: format!(
"LoadedSources is not aligned with BuildStack at index {}: expected project_id {:?}, got {:?}.",
i, expected, got
),
span: Span::none(),
related: vec![],
});
// there is no fix tolerance here, if it is wrong, it is wrong
// just catch as much diagnostics as possible before "crashing"
continue;
}
for f in &sources.per_project[i].files {
let file_id = ctx.source_db.upsert(&f.uri, &f.text);
ctx.projects[i].files.push(file_id);
}
}
// 3) If any LoadSources remains, it is a deps bug
if src_len > stack_len {
for extra in &sources.per_project[stack_len..] {
ctx.push_diagnostic(Diagnostic {
severity: Severity::Error,
code: "PIPELINE_SOURCES_EXTRA_PROJECT".into(),
message: format!(
"LoadedSources contains extra project_id {:?} not present in BuildStack.",
extra.project_id
),
span: Span::none(),
related: vec![],
});
}
}
// 4) If missing inputs, it is another deps bug...
if stack_len > src_len {
let mut diagnostics: Vec<Diagnostic> = Vec::new();
for missing in &ctx.projects[src_len..] {
diagnostics.push(Diagnostic {
severity: Severity::Error,
code: "PIPELINE_SOURCES_MISSING_PROJECT".into(),
message: format!(
"LoadedSources missing sources for project_id {:?} present in BuildStack.",
missing.project_id
),
span: Span::none(),
related: vec![],
});
}
for diagnostic in diagnostics {
ctx.push_diagnostic(diagnostic);
}
}
}

View File

@ -0,0 +1,6 @@
use crate::{config::PipelineConfig, ctx::PipelineCtx, pipeline::PipelineInput};
pub fn run(_cfg: &PipelineConfig, _input: &PipelineInput, _ctx: &mut PipelineCtx) {
// Hard Reset stub:
// - later: consume TypedHIRBundle(s) and lower into ProgramImage/BytecodeModule.
}

View File

@ -0,0 +1,5 @@
pub mod boot;
pub mod load_source;
pub mod language;
pub mod lowering;
pub mod emit;

View File

@ -0,0 +1,59 @@
use crate::{config::PipelineConfig, ctx::PipelineCtx, phases};
use prometeu_core::Diagnostic;
use prometeu_deps::{BuildStack, LoadedSources, ResolvedGraph};
#[derive(Debug, Clone)]
pub struct PipelineInput {
pub graph: ResolvedGraph,
pub stack: BuildStack,
pub sources: LoadedSources
}
#[derive(Debug, Default, Clone)]
pub struct PipelineStats {
pub projects_count: usize,
pub files_count: usize,
}
#[derive(Debug, Default, Clone)]
pub struct Artifacts {
// placeholder: later include produced ProgramImage(s), debug bundles, logs, etc.
}
#[derive(Debug, Default)]
pub struct PipelineOutput {
pub diagnostics: Vec<Diagnostic>,
pub artifacts: Artifacts,
pub stats: PipelineStats,
}
pub(crate) fn run_phases(cfg: PipelineConfig, input: PipelineInput) -> PipelineOutput {
let mut ctx = PipelineCtx::new();
// Boot: create project slots in arena order.
phases::boot::run(&cfg, &input, &mut ctx);
// Load source: populate FileDB from LoadedSources.
phases::load_source::run(&cfg, &input, &mut ctx);
// Frontend phase (stub / optional).
phases::language::run(&cfg, &input, &mut ctx);
// Backend phase (stub).
phases::lowering::run(&cfg, &input, &mut ctx);
// Emit phase (stub).
let artifacts = phases::emit::run(&cfg, &input, &mut ctx);
// Stats (basic).
let mut stats = PipelineStats::default();
stats.projects_count = ctx.projects.len();
stats.files_count = ctx.projects.iter().map(|p| p.files.len()).sum();
PipelineOutput {
diagnostics: ctx.diagnostics,
artifacts,
stats,
}
}

View File

@ -0,0 +1,10 @@
[package]
name = "prometeu-core"
version = "0.1.0"
edition = "2024"
license.workspace = true
[dependencies]
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
prometeu-bytecode = { path = "../prometeu-bytecode" }

View File

@ -0,0 +1,3 @@
mod source;
pub use source::*;

View File

@ -0,0 +1,81 @@
use serde::{Serialize, Serializer};
use crate::Span;
#[derive(Debug, Clone, PartialEq)]
pub enum Severity {
Error,
Warning,
}
impl Severity {
pub fn is_error(&self) -> bool {
match self {
Severity::Error => true,
Severity::Warning => false,
}
}
}
impl Serialize for Severity {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Severity::Error => serializer.serialize_str("error"),
Severity::Warning => serializer.serialize_str("warning"),
}
}
}
#[derive(Debug, Clone, Serialize)]
pub struct Diagnostic {
pub severity: Severity,
pub code: String,
pub message: String,
pub span: Span,
pub related: Vec<(String, Span)>,
}
#[derive(Debug, Clone, Serialize)]
pub struct DiagnosticBundle {
pub diagnostics: Vec<Diagnostic>,
}
impl DiagnosticBundle {
pub fn new() -> Self {
Self {
diagnostics: Vec::new(),
}
}
pub fn push(&mut self, diagnostic: Diagnostic) {
self.diagnostics.push(diagnostic);
}
pub fn error(code: &str, message: String, span: Span) -> Self {
let mut bundle = Self::new();
bundle.push(Diagnostic {
severity: Severity::Error,
code: code.to_string(),
message,
span,
related: Vec::new(),
});
bundle
}
pub fn has_errors(&self) -> bool {
self.diagnostics
.iter()
.any(|d| matches!(d.severity, Severity::Error))
}
}
impl From<Diagnostic> for DiagnosticBundle {
fn from(diagnostic: Diagnostic) -> Self {
let mut bundle = Self::new();
bundle.push(diagnostic);
bundle
}
}

View File

@ -0,0 +1,69 @@
use std::collections::HashMap;
use crate::FileId;
use crate::LineIndex;
#[derive(Default, Debug)]
pub struct FileDB {
files: Vec<FileData>,
uri_to_id: HashMap<String, FileId>,
}
#[derive(Debug)]
struct FileData {
uri: String,
text: String,
line_index: LineIndex,
}
impl FileDB {
pub fn new() -> Self {
Self {
files: Vec::new(),
uri_to_id: HashMap::new(),
}
}
pub fn upsert(&mut self, uri: &str, text: &str) -> FileId {
if let Some(&id) = self.uri_to_id.get(uri) {
let line_index = LineIndex::new(&text);
self.files[id.0 as usize] = FileData {
uri: uri.to_owned(),
text: text.to_owned(),
line_index,
};
id
} else {
let id = FileId(self.files.len() as u32);
let line_index = LineIndex::new(&text);
self.files.push(FileData {
uri: uri.to_owned(),
text: text.to_owned(),
line_index,
});
self.uri_to_id.insert(uri.to_string(), id);
id
}
}
pub fn file_id(&self, uri: &str) -> Option<FileId> {
self.uri_to_id.get(uri).copied()
}
pub fn uri(&self, id: FileId) -> &str {
&self.files[id.0 as usize].uri
}
pub fn text(&self, id: FileId) -> &str {
&self.files[id.0 as usize].text
}
pub fn line_index(&self, id: FileId) -> &LineIndex {
&self.files[id.0 as usize].line_index
}
/// Returns a list of all known file IDs in insertion order.
pub fn all_files(&self) -> Vec<FileId> {
(0..self.files.len()).map(|i| FileId(i as u32)).collect()
}
}

View File

@ -0,0 +1,60 @@
macro_rules! define_id {
($name:ident) => {
#[repr(transparent)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, serde::Serialize, serde::Deserialize)]
pub struct $name(pub u32);
impl $name {
pub const NONE: $name = $name(u32::MAX);
#[inline]
pub const fn as_u32(self) -> u32 { self.0 }
#[inline]
pub fn is_none(self) -> bool {
self == $name::NONE
}
}
impl From<u32> for $name {
#[inline]
fn from(value: u32) -> Self { Self(value) }
}
impl From<$name> for u32 {
#[inline]
fn from(value: $name) -> Self { value.0 }
}
};
}
define_id!(FileId);
define_id!(NodeId);
define_id!(NameId);
define_id!(SymbolId);
define_id!(TypeId);
define_id!(ModuleId);
define_id!(ProjectId);
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
use std::mem::size_of;
#[test]
fn ids_are_repr_transparent_and_hashable() {
assert_eq!(size_of::<FileId>(), 4);
assert_eq!(size_of::<NodeId>(), 4);
assert_eq!(size_of::<NameId>(), 4);
assert_eq!(size_of::<SymbolId>(), 4);
assert_eq!(size_of::<TypeId>(), 4);
assert_eq!(size_of::<ModuleId>(), 4);
assert_eq!(size_of::<ProjectId>(), 4);
// Hash/Eq usage
let mut m: HashMap<SymbolId, &str> = HashMap::new();
m.insert(SymbolId(1), "one");
assert_eq!(m.get(&SymbolId(1)).copied(), Some("one"));
}
}

View File

@ -0,0 +1,41 @@
#[derive(Debug)]
pub struct LineIndex {
line_starts: Vec<u32>,
total_len: u32,
}
impl LineIndex {
pub fn new(text: &str) -> Self {
let mut line_starts = vec![0];
for (offset, c) in text.char_indices() {
if c == '\n' {
line_starts.push((offset + 1) as u32);
}
}
Self {
line_starts,
total_len: text.len() as u32,
}
}
pub fn offset_to_line_col(&self, offset: u32) -> (u32, u32) {
let line = match self.line_starts.binary_search(&offset) {
Ok(line) => line as u32,
Err(line) => (line - 1) as u32,
};
let col = offset - self.line_starts[line as usize];
(line, col)
}
pub fn line_col_to_offset(&self, line: u32, col: u32) -> Option<u32> {
let start = *self.line_starts.get(line as usize)?;
let offset = start + col;
let next_start = self.line_starts.get(line as usize + 1).copied().unwrap_or(self.total_len);
if offset < next_start || (offset == next_start && offset == self.total_len) {
Some(offset)
} else {
None
}
}
}

View File

@ -0,0 +1,13 @@
mod ids;
mod span;
mod file_db;
mod name_interner;
mod diagnostics;
mod line_index;
pub use ids::*;
pub use span::Span;
pub use file_db::FileDB;
pub use line_index::LineIndex;
pub use name_interner::NameInterner;
pub use diagnostics::*;

View File

@ -0,0 +1,56 @@
use std::collections::HashMap;
use crate::NameId;
#[derive(Debug, Default, Clone)]
pub struct NameInterner {
names: Vec<String>,
ids: HashMap<String, NameId>,
}
impl NameInterner {
pub fn new() -> Self {
Self {
names: Vec::new(),
ids: HashMap::new(),
}
}
pub fn intern(&mut self, s: &str) -> NameId {
if let Some(id) = self.ids.get(s) {
return *id;
}
let id = NameId(self.names.len() as u32);
self.names.push(s.to_string());
self.ids.insert(self.names[id.0 as usize].clone(), id);
id
}
pub fn get(&self, s: &str) -> Option<NameId> {
self.ids.get(s).copied()
}
pub fn resolve(&self, id: NameId) -> &str {
&self.names[id.0 as usize]
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn interner_intern_resolve_roundtrip() {
let mut interner = NameInterner::new();
let id = interner.intern("foo");
assert_eq!(interner.resolve(id), "foo");
}
#[test]
fn interner_dedups_strings() {
let mut interner = NameInterner::new();
let id1 = interner.intern("bar");
let id2 = interner.intern("bar");
assert_eq!(id1, id2);
}
}

View File

@ -0,0 +1,39 @@
use crate::FileId;
#[derive(Clone, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct Span {
pub file: FileId,
pub start: u32, // byte offset
pub end: u32, // byte offset, exclusive
}
impl Span {
#[inline]
pub fn new(file: FileId, start: u32, end: u32) -> Self {
Self { file, start, end }
}
#[inline]
pub fn none() -> Self {
Self {
file: FileId::NONE,
start: 0,
end: 0,
}
}
#[inline]
pub fn is_none(&self) -> bool {
self.file.is_none()
}
#[inline]
pub fn len(&self) -> u32 {
self.end.saturating_sub(self.start)
}
#[inline]
pub fn contains(&self, byte: u32) -> bool {
self.start <= byte && byte < self.end
}
}

View File

@ -0,0 +1,69 @@
use prometeu_core::{FileDB, LineIndex};
#[test]
fn test_line_index_roundtrip() {
let text = "line 1\nline 2\nline 3";
let index = LineIndex::new(text);
// Roundtrip for each character
for (offset, _) in text.char_indices() {
let (line, col) = index.offset_to_line_col(offset as u32);
let recovered_offset = index.line_col_to_offset(line, col).expect("Should recover offset");
assert_eq!(offset as u32, recovered_offset, "Offset mismatch at line {}, col {}", line, col);
}
}
#[test]
fn test_line_index_boundaries() {
let text = "a\nbc\n";
let index = LineIndex::new(text);
// "a" -> (0, 0)
assert_eq!(index.offset_to_line_col(0), (0, 0));
assert_eq!(index.line_col_to_offset(0, 0), Some(0));
// "\n" -> (0, 1)
assert_eq!(index.offset_to_line_col(1), (0, 1));
assert_eq!(index.line_col_to_offset(0, 1), Some(1));
// "b" -> (1, 0)
assert_eq!(index.offset_to_line_col(2), (1, 0));
assert_eq!(index.line_col_to_offset(1, 0), Some(2));
// "c" -> (1, 1)
assert_eq!(index.offset_to_line_col(3), (1, 1));
assert_eq!(index.line_col_to_offset(1, 1), Some(3));
// "\n" (second) -> (1, 2)
assert_eq!(index.offset_to_line_col(4), (1, 2));
assert_eq!(index.line_col_to_offset(1, 2), Some(4));
// EOF (after last \n) -> (2, 0)
assert_eq!(index.offset_to_line_col(5), (2, 0));
assert_eq!(index.line_col_to_offset(2, 0), Some(5));
// Out of bounds
assert_eq!(index.line_col_to_offset(2, 1), None);
assert_eq!(index.line_col_to_offset(3, 0), None);
}
#[test]
fn test_file_db_upsert_and_access() {
let mut db = FileDB::new();
let uri = "file:///test.txt";
let text = "hello\nworld".to_string();
let id = db.upsert(uri, text.clone());
assert_eq!(db.file_id(uri), Some(id));
assert_eq!(db.uri(id), uri);
assert_eq!(db.text(id), &text);
let index = db.line_index(id);
assert_eq!(index.offset_to_line_col(6), (1, 0)); // 'w' is at offset 6
// Update existing file
let new_text = "new content".to_string();
let same_id = db.upsert(uri, new_text.clone());
assert_eq!(id, same_id);
assert_eq!(db.text(id), &new_text);
}

View File

@ -0,0 +1,14 @@
use prometeu_core::{FileId, Span};
#[test]
fn span_end_is_exclusive() {
let file = FileId(1);
let s = Span::new(file, 2, 5);
// len = end - start
assert_eq!(s.len(), 3);
// contains is [start, end)
assert!(s.contains(2));
assert!(s.contains(3));
assert!(s.contains(4));
assert!(!s.contains(5));
}

View File

@ -0,0 +1,19 @@
[package]
name = "prometeu-deps"
version = "0.1.0"
edition = "2021"
license = "MIT"
description = ""
[dependencies]
serde = { version = "1.0.228", features = ["derive"] }
prometeu-core = { path = "../prometeu-core" }
prometeu-language-api = { path = "../prometeu-language-api" }
prometeu-languages-registry = { path = "../languages/prometeu-languages-registry" }
anyhow = "1.0.101"
camino = "1.2.2"
walkdir = "2.5.0"
serde_json = "1.0.149"
[features]
default = []

View File

@ -0,0 +1,19 @@
mod model;
mod load_sources;
mod workspace;
pub use workspace::resolve_workspace;
pub use load_sources::load_sources;
pub use model::manifest::*;
pub use model::resolved_project::ResolvedWorkspace;
pub use model::deps_config::DepsConfig;
pub use model::project_descriptor::ProjectDescriptor;
pub use model::build_stack::BuildStack;
pub use model::resolved_graph::ResolvedGraph;
pub use model::loaded_sources::LoadedSources;
pub use model::project_sources::ProjectSources;
pub use model::loaded_file::LoadedFile;
pub use model::cache_blobs::CacheBlobs;
pub use model::cache_plan::CachePlan;

View File

@ -0,0 +1,97 @@
use anyhow::{Context, Result};
use camino::Utf8PathBuf;
use walkdir::WalkDir;
use crate::{
DepsConfig,
LoadedFile,
LoadedSources,
ProjectSources,
ResolvedWorkspace,
};
pub fn load_sources(cfg: &DepsConfig, resolved: &ResolvedWorkspace) -> Result<LoadedSources> {
let mut per_project = Vec::with_capacity(resolved.stack.projects.len());
for project_id in &resolved.stack.projects {
let project = resolved
.graph
.project(project_id)
.with_context(|| format!("deps: unknown project_id {:?} in build stack", project_id))?;
if cfg.explain {
eprintln!(
"[deps] load_sources: project {}@{} ({:?})",
project.name, project.version, project.project_dir
);
}
let mut files: Vec<LoadedFile> = Vec::new();
for root in &project.source_roots {
let abs_root = project.project_dir.join(root);
if cfg.explain {
eprintln!("[deps] scanning {:?}", abs_root);
}
if !abs_root.exists() {
anyhow::bail!(
"deps: source root does not exist for project {}@{}: {:?}",
project.name,
project.version,
abs_root
);
}
// Walk recursively.
for entry in WalkDir::new(&abs_root)
.follow_links(false)
.into_iter()
.filter_map(|e| e.ok())
{
let ft = entry.file_type();
if !ft.is_file() {
continue;
}
let path = entry.path();
// TODO: precisamos mexer no prometeu.json para configurar o frontend do projeto
// Filter extensions: start with PBS only.
if path.extension().and_then(|s| s.to_str()) != Some("pbs") {
continue;
}
// Convert to Utf8Path (the best effort) and use a stable "uri".
let path_utf8: Utf8PathBuf = match Utf8PathBuf::from_path_buf(path.to_path_buf()) {
Ok(p) => p,
Err(_) => {
anyhow::bail!("deps: non-utf8 path found while scanning sources: {:?}", path);
}
};
let text = std::fs::read_to_string(&path_utf8)
.with_context(|| format!("deps: failed to read source file {:?}", path_utf8))?;
// TODO: normalize newlines
files.push(LoadedFile {
uri: path_utf8.to_string(),
text,
});
}
}
// Determinism: sort a file list by uri (important for stable builds).
files.sort_by(|a, b| a.uri.cmp(&b.uri));
per_project.push(ProjectSources {
project_id: project_id.clone(),
files,
});
}
Ok(LoadedSources { per_project })
}

View File

@ -0,0 +1,6 @@
use prometeu_core::ProjectId;
#[derive(Debug, Clone)]
pub struct BuildStack {
pub projects: Vec<ProjectId>,
}

View File

@ -0,0 +1,7 @@
/// Cache blobs computed/validated by deps.
/// The pipeline may decide when to store, but deps executes IO and cache validity.
#[derive(Debug, Clone)]
pub struct CacheBlobs {
// placeholder
pub _unused: (),
}

View File

@ -0,0 +1,4 @@
#[derive(Debug, Clone)]
pub struct CachePlan {
}

View File

@ -0,0 +1,7 @@
use camino::Utf8PathBuf;
pub struct DepsConfig {
pub explain: bool,
pub cache_dir: Utf8PathBuf,
pub registry_dirs: Vec<Utf8PathBuf>, // or sources ?
}

View File

@ -0,0 +1,5 @@
#[derive(Debug, Clone)]
pub struct LoadedFile {
pub uri: String,
pub text: String,
}

View File

@ -0,0 +1,8 @@
use crate::model::project_sources::ProjectSources;
/// Sources already loaded by deps (IO happens in deps, not in pipeline).
#[derive(Debug, Clone)]
pub struct LoadedSources {
/// For each project in the stack, a list of files (uri + text).
pub per_project: Vec<ProjectSources>,
}

View File

@ -0,0 +1,75 @@
use camino::Utf8PathBuf;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Manifest {
pub name: String,
pub version: String,
#[serde(default)]
pub source_roots: Vec<String>,
pub language: LanguageDecl,
#[serde(default)]
pub deps: Vec<DepDecl>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LanguageDecl {
pub id: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DepDecl {
Local {
path: String,
},
Git {
git: String,
rev: Option<String>,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PrometeuLock {
pub schema: u32,
#[serde(default)]
pub mappings: Vec<LockMapping>,
}
impl PrometeuLock {
pub fn blank() -> Self {
Self {
schema: 0,
mappings: vec![],
}
}
pub fn lookup_git_local_dir(&self, url: &str, rev: &str) -> Option<&String> {
self.mappings.iter().find_map(|m| match m {
LockMapping::Git {
git, rev: r, local_dir
} if git == url && r == rev => Some(local_dir),
_ => None,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "kind", rename_all = "lowercase")]
pub enum LockMapping {
Git {
git: String,
rev: String,
local_dir: String,
},
Registry {
registry: String,
version: String,
local_dir: String,
},
}

View File

@ -0,0 +1,11 @@
pub mod deps_config;
pub mod project_descriptor;
pub mod build_stack;
pub mod resolved_graph;
pub mod loaded_sources;
pub mod project_sources;
pub mod loaded_file;
pub mod cache_blobs;
pub mod resolved_project;
pub mod cache_plan;
pub mod manifest;

View File

@ -0,0 +1,14 @@
use camino::Utf8PathBuf;
use prometeu_core::ProjectId;
use prometeu_language_api::SourcePolicy;
#[derive(Debug, Clone)]
pub struct ProjectDescriptor {
pub project_id: ProjectId,
pub name: String,
pub version: String,
pub project_dir: Utf8PathBuf,
pub source_roots: Vec<Utf8PathBuf>,
pub language_id: String,
pub source_policy: SourcePolicy,
}

View File

@ -0,0 +1,8 @@
use prometeu_core::ProjectId;
use crate::model::loaded_file::LoadedFile;
#[derive(Debug, Clone)]
pub struct ProjectSources {
pub project_id: ProjectId,
pub files: Vec<LoadedFile>,
}

View File

@ -0,0 +1,16 @@
use prometeu_core::ProjectId;
use crate::ProjectDescriptor;
#[derive(Debug, Clone)]
pub struct ResolvedGraph {
pub root: ProjectId,
pub projects: Vec<ProjectDescriptor>, // arena
// opcional: adjacency list para checks
pub edges: Vec<Vec<ProjectId>>, // edges[from] = vec[to]
}
impl ResolvedGraph {
pub fn project(&self, id: &ProjectId) -> Option<&ProjectDescriptor> {
self.projects.get(id.0 as usize)
}
}

View File

@ -0,0 +1,9 @@
use prometeu_core::ProjectId;
use crate::{BuildStack, ResolvedGraph};
#[derive(Debug, Clone)]
pub struct ResolvedWorkspace {
pub project_id: ProjectId,
pub graph: ResolvedGraph,
pub stack: BuildStack,
}

View File

@ -0,0 +1,32 @@
use anyhow::{Context, Result};
use camino::{Utf8Path, Utf8PathBuf};
use crate::workspace::model::DepRef;
pub trait DepsHost {
fn read_to_string(&self, path: &Utf8Path) -> Result<String>;
// fn ensure_project_local(&self, from_dir: &Utf8Path, dep: &DepRef) -> Result<Utf8PathBuf>;
}
pub struct FsHost;
impl DepsHost for FsHost {
fn read_to_string(&self, path: &Utf8Path) -> Result<String> {
std::fs::read_to_string(path)
.with_context(|| format!("failed to read {:?}", path))
}
// fn ensure_project_local(&self, from_dir: &Utf8Path, dep: &DepRef) -> Result<Utf8PathBuf> {
// match dep {
// DepRef::Local { path } => {
// let joined = from_dir.join(path);
// let canon = joined.canonicalize()
// .with_context(|| format!("deps: dep path does not exist: {:?}", joined))?;
// Utf8PathBuf::from_path_buf(canon)
// .map_err(|p| anyhow::anyhow!("deps: non-utf8 dep dir: {:?}", p))
// }
// _ => unimplemented!(),
// }
// }
}

View File

@ -0,0 +1,6 @@
mod resolve_workspace;
mod host;
mod model;
mod phases;
pub use resolve_workspace::resolve_workspace;

View File

@ -0,0 +1,31 @@
use camino::Utf8PathBuf;
use prometeu_core::ProjectId;
use prometeu_language_api::SourcePolicy;
use crate::Manifest;
#[derive(Debug, Clone)]
pub struct RawProjectNode {
pub dir: Utf8PathBuf,
pub manifest_path: Utf8PathBuf,
pub manifest: Manifest,
}
#[derive(Debug, Clone)]
pub enum DepRef {
Local {
path: Utf8PathBuf
},
}
#[derive(Debug, Clone)]
pub struct ProjectNode {
pub id: ProjectId,
pub dir: Utf8PathBuf,
pub name: String,
pub version: String,
pub source_roots: Vec<Utf8PathBuf>,
pub language_id: String,
pub deps: Vec<DepRef>,
pub source_policy: SourcePolicy,
}

View File

@ -0,0 +1,131 @@
use crate::model::manifest::DepDecl;
use crate::workspace::host::DepsHost;
use crate::workspace::model::RawProjectNode;
use crate::workspace::phases::state::ResolverState;
use crate::Manifest;
use anyhow::{anyhow, bail, Context, Result};
use camino::Utf8PathBuf;
use serde_json;
use std::fs::canonicalize;
/// Phase 1: Discover all projects in the workspace.
///
/// - Reads `prometeu.json` from each pending project directory.
/// - Parses `Manifest`.
/// - Registers the raw node.
/// - Enqueues local-path deps for discovery (v0).
///
/// Does NOT:
/// - assign ProjectId
/// - build edges
/// - validate versions
pub fn discover(
cfg: &crate::DepsConfig,
host: &dyn DepsHost,
state: &mut ResolverState,
) -> Result<()> {
while let Some(canon_dir) = state.pending.pop_front() {
// de-dup by directory
if state.raw_by_dir.contains_key(&canon_dir) {
continue;
}
let manifest_path = canon_dir.join("prometeu.json");
if !manifest_path.exists() || !manifest_path.is_file() {
bail!(
"deps: manifest not found: expected a file {:?} (project dir {:?})",
manifest_path,
canon_dir
);
}
if cfg.explain {
eprintln!("[deps][discover] reading {:?}", manifest_path);
}
let text = host
.read_to_string(&manifest_path)
.with_context(|| format!("deps: failed to read manifest {:?}", manifest_path))?;
let manifest: Manifest = serde_json::from_str(&text)
.with_context(|| format!("deps: invalid manifest JSON {:?}", manifest_path))?;
// Register raw node
let raw_idx = state.raw.len();
state.raw.push(RawProjectNode {
dir: canon_dir.clone(),
manifest_path: manifest_path.clone(),
manifest: manifest.clone(),
});
state.raw_by_dir.insert(canon_dir.clone(), raw_idx);
for dep in &manifest.deps {
match dep {
DepDecl::Local { path } => {
let dep_dir = canon_dir.join(path);
let dep_dir_std = dep_dir.canonicalize().with_context(|| {
format!(
"deps: dep path does not exist: {:?} (from {:?})",
dep_dir, canon_dir
)
})?;
let dep_dir_canon = Utf8PathBuf::from_path_buf(dep_dir_std)
.map_err(|p| anyhow!("deps: non-utf8 dep dir: {:?}", p))?;
if cfg.explain {
eprintln!("[deps][discover] local dep '{}' -> {:?}", path, dep_dir_canon);
}
state.pending.push_back(dep_dir_canon);
}
DepDecl::Git { git, rev } => {
let Some(rev) = rev.as_deref() else {
bail!(
"deps: git dependency '{}' requires an explicit 'rev' (commit hash) for now",
git
);
};
let Some(local_dir) = state.lock.lookup_git_local_dir(git, rev) else {
bail!(
"deps: git dependency requires prometeu.lock mapping, but entry not found: git='{}' rev='{}'",
git,
rev
);
};
// canonicalize the lock-provided local dir to keep identity stable
let local_dir_std = canonicalize(local_dir)
.with_context(|| format!("deps: prometeu.lock local_dir does not exist: {:?}", local_dir))?;
let local_dir_canon = Utf8PathBuf::from_path_buf(local_dir_std)
.map_err(|p| anyhow!("deps: non-utf8 lock local_dir: {:?}", p))?;
// validate manifest exists at the mapped project root
// (this check should not belong here, but it is ok)
let mapped_manifest = local_dir_canon.join("prometeu.json");
if !mapped_manifest.exists() || !mapped_manifest.is_file() {
bail!(
"deps: prometeu.lock maps git dep to {:?}, but manifest is missing: {:?}",
local_dir_canon,
mapped_manifest
);
}
if cfg.explain {
eprintln!(
"[deps][discover] git dep '{}' rev '{}' -> {:?}",
git, rev, local_dir_canon
);
}
state.pending.push_back(local_dir_canon);
}
}
}
}
Ok(())
}

View File

@ -0,0 +1,62 @@
use anyhow::{Context, Result};
use prometeu_core::ProjectId;
use crate::workspace::model::DepRef;
use crate::workspace::phases::state::ResolverState;
/// Phase 3: Localize dependencies and build graph edges.
///
/// For each project node:
/// - For each DepRef:
/// - host.ensure_project_local(from_dir, dep) -> dep_dir (local on disk)
/// - map dep_dir to ProjectId via st.by_dir
/// - st.edges[from].push(dep_id)
///
/// v0 policy:
/// - Only DepRef::LocalPath is supported.
/// - Git/Registry cause a hard error (future extension point).
pub fn localize(cfg: &crate::DepsConfig, state: &mut ResolverState) -> Result<()> {
// Reset edges (allows re-run / deterministic behavior)
for e in &mut state.edges {
e.clear();
}
for from_idx in 0..state.nodes.len() {
let from_id: ProjectId = state.nodes[from_idx].id;
let from_dir = state.nodes[from_idx].dir.clone();
if cfg.explain {
eprintln!(
"[deps][localize] from id={:?} dir={:?}",
from_id, from_dir
);
}
// Clone deps to avoid borrow conflicts (simple + safe for now)
let deps = state.nodes[from_idx].deps.clone();
for dep in deps {
match &dep {
DepRef::Local {
path
} => {
let dep_id = state.by_dir.get(path).copied().with_context(|| {
format!(
"deps: localized dep dir {:?} was not discovered; \
ensure the dep has a prometeu.json and is reachable via local paths",
path
)
})?;
state.edges[from_id.0 as usize].push(dep_id);
}
}
}
// Optional: keep edges deterministic
state.edges[from_id.0 as usize].sort_by_key(|id| id.0);
state.edges[from_id.0 as usize].dedup();
}
Ok(())
}

View File

@ -0,0 +1,144 @@
use crate::model::manifest::DepDecl;
use crate::workspace::model::{DepRef, ProjectNode};
use crate::workspace::phases::state::ResolverState;
use anyhow::{anyhow, bail, Context, Result};
use camino::Utf8PathBuf;
use prometeu_core::ProjectId;
use prometeu_languages_registry::get_language_spec;
use std::fs::canonicalize;
/// Phase 2: Materialize projects (allocate ProjectId / arena nodes).
///
/// Inputs:
/// - st.raw (RawProjectNode: dir + manifest)
///
/// Outputs:
/// - st.nodes (ProjectNode arena)
/// - st.by_dir (dir -> ProjectId)
/// - st.edges (allocated adjacency lists, empty for now)
/// - st.root (ProjectId for root_dir)
///
/// Does NOT:
/// - resolve deps to local dirs (that's phase localize)
/// - validate version conflicts/cycles
/// - resolve language/source policy
pub fn materialize(cfg: &crate::DepsConfig, state: &mut ResolverState) -> Result<()> {
// Reset materialized state (allows rerun in future refactors/tests)
state.nodes.clear();
state.by_dir.clear();
state.edges.clear();
state.root = None;
state.nodes.reserve(state.raw.len());
state.edges.reserve(state.raw.len());
for (idx, raw) in state.raw.iter().enumerate() {
let id = ProjectId(idx as u32);
// Default source roots if omitted
let source_roots: Vec<Utf8PathBuf> = raw
.manifest
.source_roots
.iter()
.map(|root| Utf8PathBuf::from(root))
.collect();
if source_roots.is_empty() {
bail!(
"deps: no source roots specified for project {}",
raw.manifest.name
)
}
// Convert DepDecl -> DepRef (no localization yet)
let mut deps: Vec<DepRef> = Vec::with_capacity(raw.manifest.deps.len());
for d in &raw.manifest.deps {
match d {
DepDecl::Local { path } => {
let joined = raw.dir.join(path);
let dir_std = joined.canonicalize()
.with_context(|| format!("deps: local dep path does not exist: {:?} (from {:?})", joined, raw.dir))?;
let dir_canon = Utf8PathBuf::from_path_buf(dir_std)
.map_err(|p| anyhow!("deps: non-utf8 dep dir: {:?}", p))?;
deps.push(DepRef::Local {
path: dir_canon
});
}
DepDecl::Git { git, rev } => {
let Some(rev) = rev.as_deref() else {
bail!(
"deps: git dependency '{}' requires an explicit 'rev' (commit hash) for now",
git
);
};
let Some(local_dir) = state.lock.lookup_git_local_dir(git, rev) else {
bail!(
"deps: git dependency requires prometeu.lock mapping, but entry not found: git='{}' rev='{}'",
git,
rev
);
};
// canonicalize the lock-provided local dir to keep identity stable
let path = canonicalize(local_dir).with_context(|| {
format!(
"deps: prometeu.lock local_dir does not exist: {:?}",
local_dir
)
})?;
let local_dir_canon = Utf8PathBuf::from_path_buf(path)
.map_err(|p| anyhow!("deps: non-utf8 lock local_dir: {:?}", p))?;
deps.push(DepRef::Local {
path: local_dir_canon,
});
}
}
}
if cfg.explain {
eprintln!(
"[deps][materialize] id={:?} {}@{} dir={:?} language={}",
id, raw.manifest.name, raw.manifest.version, raw.dir, raw.manifest.language.id
);
}
let source_policy = get_language_spec(raw.manifest.language.id.as_str())
.map(|spec| spec.source_policy.clone())
.ok_or(anyhow!(
"deps: unknown language spec: {}",
raw.manifest.language.id
))?;
// Record node
state.nodes.push(ProjectNode {
id,
dir: raw.dir.clone(),
name: raw.manifest.name.clone(),
version: raw.manifest.version.clone(),
source_roots,
language_id: raw.manifest.language.id.clone(),
deps,
source_policy,
});
state.by_dir.insert(raw.dir.clone(), id);
state.edges.push(Vec::new());
}
// Determine root id
if let Some(root_id) = state.by_dir.get(&state.root_dir).copied() {
state.root = Some(root_id);
} else {
// This should never happen if seed/discover worked.
// Keep it as a hard failure (in a later validate phase you can convert to a nicer diagnostic).
anyhow::bail!(
"deps: root project dir {:?} was not discovered/materialized",
state.root_dir
);
}
Ok(())
}

View File

@ -0,0 +1,10 @@
mod run_all;
mod state;
mod discover;
mod materialize;
mod localize;
mod validate;
mod policy;
mod stack;
pub use run_all::run_all;

View File

@ -0,0 +1,17 @@
use anyhow::{bail, Result};
use crate::workspace::phases::state::ResolverState;
pub fn policy(_cfg: &crate::DepsConfig, state: &mut ResolverState) -> Result<()> {
for node in &state.nodes {
if node.source_policy.extensions.is_empty() {
bail!(
"deps: project {}@{} has empty source_policy.extensions (language={})",
node.name,
node.version,
node.language_id
);
}
}
Ok(())
}

View File

@ -0,0 +1,50 @@
use anyhow::{Context, Result};
use camino::Utf8Path;
use crate::{BuildStack, DepsConfig, ProjectDescriptor, ResolvedGraph, ResolvedWorkspace};
use crate::workspace::host::FsHost;
use crate::workspace::phases::{discover, localize, materialize, policy, stack, state, validate};
pub fn run_all(cfg: &DepsConfig, fs_host: &FsHost, root_dir: &Utf8Path) -> Result<ResolvedWorkspace> {
let mut st = state::seed(cfg, root_dir)?;
discover::discover(cfg, fs_host, &mut st)?;
materialize::materialize(cfg, &mut st)?;
localize::localize(cfg, &mut st)?;
validate::validate(cfg, &st)?;
policy::policy(cfg, &mut st)?;
let build_stack: BuildStack = stack::stack(cfg, &mut st)?;
let root = st
.root
.context("deps: internal error: root ProjectId not set")?;
// Build the arena expected by ResolvedGraph: index == ProjectId.0
// materialize already assigns ProjectId(idx), so st.nodes order is stable.
let mut projects: Vec<ProjectDescriptor> = Vec::with_capacity(st.nodes.len());
for n in &st.nodes {
projects.push(ProjectDescriptor {
project_id: n.id,
name: n.name.clone(),
version: n.version.clone(),
project_dir: n.dir.clone(),
source_roots: n.source_roots.clone(),
language_id: n.language_id.clone(),
source_policy: n.source_policy.clone(),
});
}
let graph = ResolvedGraph {
root,
projects,
edges: st.edges,
};
Ok(ResolvedWorkspace {
project_id: root,
graph,
stack: build_stack,
})
}

View File

@ -0,0 +1,97 @@
use anyhow::{Context, Result};
use prometeu_core::ProjectId;
use std::collections::VecDeque;
use crate::BuildStack;
use crate::workspace::phases::state::ResolverState;
/// Phase: BuildStack (deps-first topo order).
///
/// Output:
/// - state.stack: Vec<ProjectId> where deps appear before dependents.
///
/// Determinism:
/// - ties are resolved by ProjectId order (stable across runs if discovery is stable).
pub fn stack(cfg: &crate::DepsConfig, state: &mut ResolverState) -> Result<BuildStack> {
let n = state.nodes.len();
let _root = state.root.context("deps: internal error: root ProjectId not set")?;
// Build indegree
let mut indeg = vec![0usize; n];
for outs in &state.edges {
for &to in outs {
indeg[to.0 as usize] += 1;
}
}
// Deterministic queue: push in ProjectId order
let mut q = VecDeque::new();
for i in 0..n {
if indeg[i] == 0 {
q.push_back(i);
}
}
let mut order: Vec<ProjectId> = Vec::with_capacity(n);
while let Some(i) = q.pop_front() {
order.push(ProjectId(i as u32));
// Ensure deterministic traversal of outgoing edges too
// (your localize already sort/dedup edges, but this doesn't hurt)
for &to in &state.edges[i] {
let j = to.0 as usize;
indeg[j] -= 1;
if indeg[j] == 0 {
// Deterministic insert: keep queue ordered by ProjectId
// Simple O(n) insertion is fine for now.
insert_sorted_by_id(&mut q, j);
}
}
}
// If validate ran, this should already be cycle-free; still keep a guard.
if order.len() != n {
anyhow::bail!(
"deps: internal error: stack generation did not visit all nodes ({} of {})",
order.len(),
n
);
}
if cfg.explain {
eprintln!("[deps][stack] build order:");
for id in &order {
let node = &state.nodes[id.0 as usize];
eprintln!(" - {:?} {}@{} dir={:?}", id, node.name, node.version, node.dir);
}
}
Ok(BuildStack {
projects: order,
})
}
/// Insert node index `i` into queue `q` keeping it sorted by ProjectId (index).
fn insert_sorted_by_id(q: &mut VecDeque<usize>, i: usize) {
// Common fast path: append if >= last
if let Some(&last) = q.back() {
if i >= last {
q.push_back(i);
return;
}
}
// Otherwise find insertion point
let mut pos = 0usize;
for &v in q.iter() {
if i < v {
break;
}
pos += 1;
}
// VecDeque has no insert, so rebuild (small sizes OK for hard reset)
let mut tmp: Vec<usize> = q.drain(..).collect();
tmp.insert(pos, i);
*q = VecDeque::from(tmp);
}

View File

@ -0,0 +1,58 @@
use camino::{Utf8Path, Utf8PathBuf};
use std::collections::{HashMap, VecDeque};
use anyhow::Context;
use crate::workspace::model::{RawProjectNode, ProjectNode};
use prometeu_core::ProjectId;
use crate::PrometeuLock;
use serde_json;
pub struct ResolverState {
pub root_dir: Utf8PathBuf,
// phase1 output
pub raw: Vec<RawProjectNode>,
pub raw_by_dir: HashMap<Utf8PathBuf, usize>,
pub pending: VecDeque<Utf8PathBuf>,
// phase2+
pub nodes: Vec<ProjectNode>,
pub by_dir: HashMap<Utf8PathBuf, ProjectId>,
pub edges: Vec<Vec<ProjectId>>,
pub root: Option<ProjectId>,
pub lock: PrometeuLock,
}
pub fn seed(_cfg: &crate::DepsConfig, root_dir: &Utf8Path) -> anyhow::Result<ResolverState> {
let path_buf = root_dir.canonicalize()?;
let root_dir_canon = Utf8PathBuf::from_path_buf(path_buf)
.map_err(|p| anyhow::anyhow!("deps: non-utf8 root dir: {:?}", p))?;
let lock_path = root_dir_canon.join("prometeu.lock");
let lock = if lock_path.exists() {
let txt = std::fs::read_to_string(&lock_path)?;
serde_json::from_str::<PrometeuLock>(&txt)
.with_context(|| format!("invalid prometeu.lock at {:?}", lock_path))?
} else {
PrometeuLock::blank()
};
let mut pending = VecDeque::new();
pending.push_back(root_dir_canon.clone());
Ok(ResolverState {
root_dir: root_dir_canon.clone(),
raw: vec![],
raw_by_dir: HashMap::new(),
pending,
nodes: vec![],
by_dir: HashMap::new(),
edges: vec![],
root: None,
lock,
})
}

View File

@ -0,0 +1,108 @@
use anyhow::{bail, Context, Result};
use prometeu_core::ProjectId;
use std::collections::{HashMap, VecDeque};
use crate::workspace::phases::state::ResolverState;
/// Phase: Validate workspace graph & invariants (v0).
///
/// Checks:
/// - root present
/// - edges are in-range
/// - no cycles
/// - no version conflicts for same project name
pub fn validate(cfg: &crate::DepsConfig, state: &ResolverState) -> Result<()> {
// 1) root present
let root = state.root.context("deps: internal error: root ProjectId not set")?;
if cfg.explain {
eprintln!("[deps][validate] root={:?}", root);
}
// 2) edges sanity
let n = state.nodes.len();
for (from_idx, outs) in state.edges.iter().enumerate() {
for &to in outs {
let to_idx = to.0 as usize;
if to_idx >= n {
bail!(
"deps: invalid edge: from {:?} -> {:?} (to out of range; nodes={})",
ProjectId(from_idx as u32),
to,
n
);
}
}
}
// 3) version conflicts by name
// name -> (version -> ProjectId)
let mut by_name: HashMap<&str, HashMap<&str, ProjectId>> = HashMap::new();
for node in &state.nodes {
let vmap = by_name.entry(node.name.as_str()).or_default();
vmap.entry(node.version.as_str()).or_insert(node.id);
}
for (name, versions) in &by_name {
if versions.len() > 1 {
// create deterministic message
let mut vs: Vec<(&str, ProjectId)> = versions.iter().map(|(v, id)| (*v, *id)).collect();
vs.sort_by(|a, b| a.0.cmp(b.0));
let mut msg = format!("deps: version conflict for project '{}':", name);
for (v, id) in vs {
let dir = &state.nodes[id.0 as usize].dir;
msg.push_str(&format!("\n - {} at {:?} (id={:?})", v, dir, id));
}
bail!(msg);
}
}
// 4) cycle detection (Kahn + leftover nodes)
// Build indegree
let mut indeg = vec![0usize; n];
for outs in &state.edges {
for &to in outs {
indeg[to.0 as usize] += 1;
}
}
let mut q = VecDeque::new();
for i in 0..n {
if indeg[i] == 0 {
q.push_back(i);
}
}
let mut visited = 0usize;
while let Some(i) = q.pop_front() {
visited += 1;
for &to in &state.edges[i] {
let j = to.0 as usize;
indeg[j] -= 1;
if indeg[j] == 0 {
q.push_back(j);
}
}
}
if visited != n {
// Nodes with indeg>0 are part of cycles (or downstream of them)
let mut cyclic: Vec<ProjectId> = Vec::new();
for i in 0..n {
if indeg[i] > 0 {
cyclic.push(ProjectId(i as u32));
}
}
// Deterministic error output
cyclic.sort_by_key(|id| id.0);
let mut msg = "deps: dependency cycle detected among:".to_string();
for id in cyclic {
let node = &state.nodes[id.0 as usize];
msg.push_str(&format!("\n - {:?} {}@{} dir={:?}", id, node.name, node.version, node.dir));
}
bail!(msg);
}
Ok(())
}

View File

@ -0,0 +1,10 @@
use anyhow::Result;
use camino::Utf8Path;
use crate::{DepsConfig, ResolvedWorkspace};
use crate::workspace::host::FsHost;
pub fn resolve_workspace(cfg: &DepsConfig, root_dir: &Utf8Path) -> Result<ResolvedWorkspace> {
let host = FsHost;
crate::workspace::phases::run_all(cfg, &host, root_dir)
}

View File

@ -0,0 +1,10 @@
[package]
name = "prometeu-language-api"
version = "0.1.0"
edition = "2021"
license = "MIT"
description = "Canonical language contract for Prometeu Backend: identifiers, references, and strict Frontend trait."
repository = "https://github.com/prometeu/runtime"
[dependencies]

View File

@ -0,0 +1,21 @@
#[derive(Debug, Clone)]
pub struct SourcePolicy {
pub extensions: Vec<&'static str>,
pub case_sensitive: bool,
}
impl SourcePolicy {
pub fn matches_ext(&self, ext: &str) -> bool {
if self.case_sensitive {
self.extensions.iter().any(|e| *e == ext)
} else {
self.extensions.iter().any(|e| e.eq_ignore_ascii_case(ext))
}
}
}
#[derive(Debug, Clone)]
pub struct LanguageSpec {
pub id: &'static str,
pub source_policy: SourcePolicy,
}

View File

@ -0,0 +1,3 @@
mod language_spec;
pub use language_spec::*;

View File

@ -0,0 +1,19 @@
[package]
name = "prometeu-lowering"
version = "0.1.0"
edition = "2021"
license.workspace = true
repository.workspace = true
[dependencies]
prometeu-bytecode = { path = "../prometeu-bytecode" }
prometeu-core = { path = "../prometeu-core" }
prometeu-language-api = { path = "../prometeu-language-api" }
clap = { version = "4.5.54", features = ["derive"] }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
anyhow = "1.0.100"
pathdiff = "0.2.1"
[dev-dependencies]
tempfile = "3.10.1"

View File

@ -0,0 +1,7 @@
plugins {
id("gradle.java-library-conventions")
}
dependencies {
api(project(":prometeu-infra"))
}

View File

@ -0,0 +1,6 @@
plugins {
id("gradle.java-library-conventions")
}
dependencies {
}

View File

@ -0,0 +1,20 @@
plugins {
id("gradle.java-application-conventions")
alias(libs.plugins.javafx)
}
dependencies {
implementation(project(":prometeu-infra"))
implementation(libs.javafx.controls)
implementation(libs.javafx.fxml)
implementation(libs.richtextfx)
}
javafx {
version = libs.versions.javafx.get()
modules("javafx.controls", "javafx.fxml")
}
application {
mainClass = "p.studio.App"
}

View File

@ -0,0 +1,31 @@
package p.studio;
import javafx.application.Application;
import javafx.scene.Scene;
import javafx.stage.Stage;
import p.studio.utilities.i18n.I18n;
import p.studio.window.MainView;
public class App extends Application {
@Override
public void init() throws Exception {
super.init();
Container.init();
}
@Override
public void start(Stage stage) {
var root = new MainView();
var scene = new Scene(root, 1200, 800);
scene.getStylesheets().add(Container.theme().getDefaultTheme());
stage.titleProperty().bind(Container.i18n().bind(I18n.APP_TITLE));
stage.setScene(scene);
stage.show();
}
public static void main(String[] args) {
launch();
}
}

View File

@ -0,0 +1,21 @@
package p.studio;
import p.studio.utilities.ThemeService;
import p.studio.utilities.i18n.I18nService;
public class Container {
private static final I18nService i18nService;
private static final ThemeService themeService;
static {
i18nService = new I18nService();
themeService = new ThemeService();
}
public static void init() {
}
public static I18nService i18n() { return i18nService; }
public static ThemeService theme() { return new ThemeService(); }
}

View File

@ -0,0 +1,12 @@
package p.studio.utilities;
import java.util.Objects;
public class ThemeService {
private static final String THEME_PATH = "/themes/";
private static final String DEFAULT_THEME = THEME_PATH + "default-prometeu.css";
public String getDefaultTheme() {
return Objects.requireNonNull(getClass().getResource(DEFAULT_THEME)).toExternalForm();
}
}

View File

@ -0,0 +1,33 @@
package p.studio.utilities.i18n;
import lombok.Getter;
public enum I18n {
APP_TITLE("app.title"),
MENU_FILE("menu.file"),
MENU_FILE_NEWPROJECT("menu.file.newProject"),
MENU_FILE_OPEN("menu.file.open"),
MENU_FILE_SAVE("menu.file.save"),
MENU_EDIT("menu.edit"),
MENU_VIEW("menu.view"),
MENU_HELP("menu.help"),
TOOLBAR_PLAY("toolbar.play"),
TOOLBAR_STOP("toolbar.stop"),
TOOLBAR_EXPORT("toolbar.export"),
WORKSPACE_CODE("workspace.code"),
WORKSPACE_ASSETS("workspace.assets"),
WORKSPACE_DEBUG("workspace.debug"),
;
@Getter
private final String key;
I18n(String key) {
this.key = key;
}
}

View File

@ -0,0 +1,44 @@
package p.studio.utilities.i18n;
import javafx.beans.binding.Bindings;
import javafx.beans.binding.StringBinding;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.SimpleObjectProperty;
import java.text.MessageFormat;
import java.util.Locale;
import java.util.ResourceBundle;
public final class I18nService {
private final ObjectProperty<Locale> locale = new SimpleObjectProperty<>(Locale.ENGLISH);
public ObjectProperty<Locale> localeProperty() { return locale; }
public Locale getLocale() { return locale.get(); }
public void setLocale(Locale l) { locale.set(l); }
private ResourceBundle bundle(Locale l) {
return ResourceBundle.getBundle("i18n.messages", l);
}
public String text(I18n i18n) {
return bundle(getLocale()).getString(i18n.getKey());
}
public String format(I18n i18n, Object... args) {
return new MessageFormat(text(i18n), getLocale()).format(args);
}
public StringBinding bind(I18n key) {
return Bindings.createStringBinding(
() -> text(key),
locale
);
}
public StringBinding bind(I18n key, Object... args) {
return Bindings.createStringBinding(
() -> format(key, args),
locale
);
}
}

View File

@ -0,0 +1,29 @@
package p.studio.window;
import javafx.scene.layout.BorderPane;
import p.studio.workspaces.PlaceholderWorkspace;
import p.studio.workspaces.WorkspaceHost;
import p.studio.workspaces.WorkspaceId;
import p.studio.workspaces.editor.EditorWorkspace;
public final class MainView extends BorderPane {
private static final WorkspaceHost HOST = new WorkspaceHost();
public MainView() {
var menubar = new MenuBar();
setTop(menubar);
HOST.register(new EditorWorkspace());
HOST.register(new PlaceholderWorkspace(WorkspaceId.ASSETS, "Assets"));
HOST.register(new PlaceholderWorkspace(WorkspaceId.BUILD, "Build"));
HOST.register(new PlaceholderWorkspace(WorkspaceId.DEVICE, "Device"));
var bar = new WorkspaceBar(HOST::show);
setLeft(bar);
setCenter(HOST);
// default
bar.select(WorkspaceId.EDITOR);
HOST.show(WorkspaceId.EDITOR);
}
}

View File

@ -0,0 +1,36 @@
package p.studio.window;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuItem;
import p.studio.Container;
import p.studio.utilities.i18n.I18n;
public final class MenuBar extends javafx.scene.control.MenuBar {
public MenuBar() {
Menu file = new Menu();
file.textProperty().bind(Container.i18n().bind(I18n.MENU_FILE));
MenuItem newProject = new MenuItem();
newProject.textProperty().bind(Container.i18n().bind(I18n.MENU_FILE_NEWPROJECT));
MenuItem open = new MenuItem();
open.textProperty().bind(Container.i18n().bind(I18n.MENU_FILE_OPEN));
MenuItem save = new MenuItem();
save.textProperty().bind(Container.i18n().bind(I18n.MENU_FILE_SAVE));
file.getItems().addAll(newProject, open, save);
Menu edit = new Menu();
edit.textProperty().bind(Container.i18n().bind(I18n.MENU_EDIT));
Menu view = new Menu();
view.textProperty().bind(Container.i18n().bind(I18n.MENU_VIEW));
Menu help = new Menu();
help.textProperty().bind(Container.i18n().bind(I18n.MENU_HELP));
getMenus().addAll(file, edit, view, help);
}
}

View File

@ -0,0 +1,47 @@
package p.studio.window;
import javafx.geometry.Insets;
import javafx.scene.control.ToggleButton;
import javafx.scene.control.ToggleGroup;
import javafx.scene.layout.VBox;
import p.studio.workspaces.WorkspaceId;
import java.util.EnumMap;
import java.util.Map;
import java.util.function.Consumer;
public final class WorkspaceBar extends VBox {
private final ToggleGroup group = new ToggleGroup();
private final Map<WorkspaceId, ToggleButton> buttons = new EnumMap<>(WorkspaceId.class);
public WorkspaceBar(Consumer<WorkspaceId> onSelect) {
setPadding(new Insets(8));
setSpacing(8);
setPrefWidth(56);
addBtn(WorkspaceId.EDITOR, "📝", "Editor", onSelect);
addBtn(WorkspaceId.ASSETS, "📦", "Assets", onSelect);
addBtn(WorkspaceId.BUILD, "⚙️", "Build", onSelect);
addBtn(WorkspaceId.DEVICE, "🎮", "Device", onSelect);
}
private void addBtn(WorkspaceId id, String icon, String tooltip, Consumer<WorkspaceId> onSelect) {
ToggleButton b = new ToggleButton(icon);
b.setToggleGroup(group);
b.setFocusTraversable(false);
b.setPrefSize(40, 40);
b.setMinSize(40, 40);
b.setMaxSize(40, 40);
b.setUserData(id);
b.setOnAction(e -> onSelect.accept(id));
b.setStyle("-fx-font-size: 16px;");
buttons.put(id, b);
getChildren().add(b);
}
public void select(WorkspaceId id) {
ToggleButton b = buttons.get(id);
if (b != null) b.setSelected(true);
}
}

View File

@ -0,0 +1,20 @@
package p.studio.workspaces;
import javafx.scene.Node;
import javafx.scene.control.Label;
import javafx.scene.layout.StackPane;
import p.studio.utilities.i18n.I18n;
public final class PlaceholderWorkspace implements Workspace {
private final WorkspaceId id;
private final StackPane root = new StackPane();
public PlaceholderWorkspace(WorkspaceId id, String label) {
this.id = id;
root.getChildren().add(new Label(label + " (TODO)"));
}
@Override public WorkspaceId id() { return id; }
@Override public I18n title() { return I18n.WORKSPACE_ASSETS; }
@Override public Node root() { return root; }
}

View File

@ -0,0 +1,12 @@
package p.studio.workspaces;
import p.studio.utilities.i18n.I18n;
public interface Workspace {
WorkspaceId id();
I18n title();
javafx.scene.Node root();
default void onShow() {}
default void onHide() {}
}

View File

@ -0,0 +1,45 @@
package p.studio.workspaces;
import javafx.scene.Node;
import javafx.scene.layout.StackPane;
import java.util.EnumMap;
import java.util.Map;
public final class WorkspaceHost extends StackPane {
private final Map<WorkspaceId, Workspace> workspaces = new EnumMap<>(WorkspaceId.class);
private WorkspaceId active;
public void register(Workspace ws) {
workspaces.put(ws.id(), ws);
Node r = ws.root();
r.setVisible(false);
r.setManaged(false);
getChildren().add(r);
}
public void show(WorkspaceId id) {
if (active == id) return;
if (active != null) {
Workspace old = workspaces.get(active);
old.onHide();
Node n = old.root();
n.setVisible(false);
n.setManaged(false);
}
Workspace next = workspaces.get(id);
if (next == null) throw new IllegalStateException("Workspace not registered: " + id);
Node n = next.root();
n.setVisible(true);
n.setManaged(true);
next.onShow();
active = id;
}
public WorkspaceId active() { return active; }
}

View File

@ -0,0 +1,8 @@
package p.studio.workspaces;
public enum WorkspaceId {
EDITOR,
ASSETS,
BUILD,
DEVICE
}

View File

@ -0,0 +1,40 @@
package p.studio.workspaces.editor;
import javafx.geometry.Insets;
import javafx.scene.control.Button;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Region;
public final class EditorToolbar extends HBox {
public EditorToolbar() {
setPadding(new Insets(6, 10, 6, 10));
setSpacing(8);
getStyleClass().add("main-toolbar");
Button newBtn = iconButton("📄", "New");
Button openBtn = iconButton("📂", "Open");
Button saveBtn = iconButton("💾", "Save");
Button runBtn = iconButton("", "Run");
runBtn.getStyleClass().add("accent");
Region spacer = new Region();
HBox.setHgrow(spacer, javafx.scene.layout.Priority.ALWAYS);
getChildren().addAll(
newBtn,
openBtn,
saveBtn,
spacer,
runBtn
);
}
private Button iconButton(String icon, String tooltip) {
Button b = new Button(icon);
b.setFocusTraversable(false);
b.getStyleClass().add("toolbar-button");
return b;
}
}

View File

@ -0,0 +1,34 @@
package p.studio.workspaces.editor;
import javafx.scene.Node;
import javafx.scene.layout.BorderPane;
import org.fxmisc.richtext.CodeArea;
import org.fxmisc.richtext.LineNumberFactory;
import p.studio.utilities.i18n.I18n;
import p.studio.workspaces.Workspace;
import p.studio.workspaces.WorkspaceId;
public final class EditorWorkspace implements Workspace {
private final BorderPane root = new BorderPane();
private final EditorToolbar toolbar = new EditorToolbar();
private final CodeArea codeArea = new CodeArea();
public EditorWorkspace() {
codeArea.setParagraphGraphicFactory(LineNumberFactory.get(codeArea));
codeArea.replaceText("""
fn frame(): void
{
// hello Prometeu
}
""");
root.setTop(toolbar);
root.setCenter(codeArea);
}
@Override public WorkspaceId id() { return WorkspaceId.EDITOR; }
@Override public I18n title() { return I18n.WORKSPACE_CODE; }
@Override public Node root() { return root; }
public CodeArea codeArea() { return codeArea; }
}

View File

@ -0,0 +1,16 @@
app.title=Prometeu Studio
menu.file=File
menu.file.newProject=New Project
menu.file.open=Open
menu.file.save=Save
menu.edit=Edit
menu.view=View
menu.help=Help
toolbar.play=Play
toolbar.stop=Stop
toolbar.export=Export
workspace.code=Code
workspace.assets=Assets
workspace.debug=Debug

View File

@ -0,0 +1,16 @@
app.title=Prometeu Studio
menu.file=Arquivo
menu.file.newProject=Novo Projeto
menu.file.open=Abrir
menu.file.save=Salvar
menu.edit=Editar
menu.view=Visualizar
menu.help=Ajuda
toolbar.play=Executar
toolbar.stop=Parar
toolbar.export=Exportar
workspace.code=Código
workspace.assets=Assets
workspace.debug=Depurar

View File

@ -0,0 +1,36 @@
.root {
-fx-font-family: "Inter", "Segoe UI", sans-serif;
-fx-base: #1e1e1e;
-fx-background: #1e1e1e;
}
.main-toolbar {
-fx-background-color: #252526;
-fx-border-color: #2d2d2d;
-fx-border-width: 0 0 1 0;
}
.toolbar-button {
-fx-background-color: transparent;
-fx-text-fill: #d4d4d4;
-fx-font-size: 14px;
-fx-padding: 6 10 6 10;
-fx-background-radius: 6;
}
.toolbar-button:hover {
-fx-background-color: #2a2d2e;
}
.toolbar-button:pressed {
-fx-background-color: #37373d;
}
.toolbar-button.accent {
-fx-background-color: #0e639c;
-fx-text-fill: white;
}
.toolbar-button.accent:hover {
-fx-background-color: #1177bb;
}

View File

@ -0,0 +1,14 @@
/*
* This source file was generated by the Gradle 'init' task
*/
package p.studio.app;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
class MessageUtilsTest {
@Test void testGetMessage() {
assertEquals("Hello World!", MessageUtils.getMessage());
}
}

Some files were not shown because too many files have changed in this diff Show More