226 lines
10 KiB
Rust
226 lines
10 KiB
Rust
use tokio_util::sync::CancellationToken;
|
|
use tower_lsp::Client;
|
|
|
|
use crate::analysis_db::{AnalysisSnapshot, SharedDb};
|
|
use prometeu_analysis::{TextIndex};
|
|
use tower_lsp::lsp_types as lsp;
|
|
use prometeu_analysis::ids::FileId;
|
|
use crate::rebuild::compiler_bridge::{ParserFacade, Severity};
|
|
|
|
#[derive(Clone, Debug)]
|
|
pub struct LspDiagnostic {
|
|
pub range: lsp::Range,
|
|
pub severity: Option<lsp::DiagnosticSeverity>,
|
|
pub code: Option<lsp::NumberOrString>,
|
|
pub message: String,
|
|
}
|
|
|
|
#[derive(Clone, Debug)]
|
|
pub struct FlatSymbol {
|
|
pub name: String,
|
|
pub kind: lsp::SymbolKind,
|
|
pub location: lsp::Location,
|
|
}
|
|
|
|
/// Requests a project rebuild (coarse). Cancels the previous rebuild if in progress.
|
|
pub async fn request_rebuild(db: SharedDb, client: Client) {
|
|
// 1) short lock: cancel previous token and install a new one
|
|
let new_token = CancellationToken::new();
|
|
{
|
|
let mut guard = db.write().await;
|
|
if let Some(prev) = guard.active_rebuild.take() {
|
|
prev.cancel();
|
|
}
|
|
guard.active_rebuild = Some(new_token.clone());
|
|
}
|
|
|
|
// 2) spawn task: run analysis outside the lock
|
|
tokio::spawn(async move {
|
|
// Safe point: check before starting
|
|
if new_token.is_cancelled() { return; }
|
|
|
|
// Clone snapshot of files (URIs and texts) under a short read lock
|
|
let (files, revision) = {
|
|
let guard = db.read().await;
|
|
let mut v = Vec::new();
|
|
for fid in guard.file_ids() {
|
|
let uri = guard.file_db.uri(fid).to_string();
|
|
let text = guard.file_db.text(fid).to_string();
|
|
v.push((fid, uri, text));
|
|
}
|
|
(v, guard.revision)
|
|
};
|
|
|
|
// Prepare accumulators
|
|
let mut diagnostics_by_uri: std::collections::HashMap<String, Vec<LspDiagnostic>> = std::collections::HashMap::new();
|
|
let mut symbols_flat: Vec<FlatSymbol> = Vec::new();
|
|
|
|
// For each file: run a minimal frontend to collect diagnostics and top-level symbols
|
|
for (fid, uri, text) in files.into_iter() {
|
|
if new_token.is_cancelled() { return; }
|
|
let text_index = TextIndex::new(&text);
|
|
|
|
// Parser + basic pipeline
|
|
let mut interner = prometeu_analysis::NameInterner::new();
|
|
let mut parser = ParserFacade::new(&text, fid, &mut interner);
|
|
match parser.parse_and_collect() {
|
|
Ok(parsed) => {
|
|
// Diagnostics (from parse/collect are already inside parsed.diags)
|
|
let mut file_diags = Vec::new();
|
|
for d in parsed.diagnostics {
|
|
let range = span_to_range(fid, &text_index, d.span.start, d.span.end);
|
|
file_diags.push(LspDiagnostic {
|
|
range,
|
|
severity: Some(match d.severity { Severity::Error => lsp::DiagnosticSeverity::ERROR, Severity::Warning => lsp::DiagnosticSeverity::WARNING }),
|
|
code: Some(lsp::NumberOrString::String(d.code)),
|
|
message: d.message,
|
|
});
|
|
}
|
|
diagnostics_by_uri.insert(uri.clone(), file_diags);
|
|
|
|
// Symbols: flatten only top-level decls with their decl_span
|
|
for sym in parsed.symbols {
|
|
let lsp_loc = lsp::Location {
|
|
uri: uri.parse().unwrap_or_else(|_| lsp::Url::parse("untitled:").unwrap()),
|
|
range: span_to_range(fid, &text_index, sym.decl_span.start, sym.decl_span.end),
|
|
};
|
|
let kind = match sym.kind {
|
|
prometeu_compiler::analysis::symbols::SymbolKind::Function => lsp::SymbolKind::FUNCTION,
|
|
prometeu_compiler::analysis::symbols::SymbolKind::Service => lsp::SymbolKind::INTERFACE,
|
|
prometeu_compiler::analysis::symbols::SymbolKind::Struct => lsp::SymbolKind::STRUCT,
|
|
prometeu_compiler::analysis::symbols::SymbolKind::Contract => lsp::SymbolKind::CLASS,
|
|
prometeu_compiler::analysis::symbols::SymbolKind::ErrorType => lsp::SymbolKind::ENUM,
|
|
_ => lsp::SymbolKind::VARIABLE,
|
|
};
|
|
symbols_flat.push(FlatSymbol { name: sym.name, kind, location: lsp_loc });
|
|
}
|
|
}
|
|
Err(diags) => {
|
|
// Parser returned errors only; publish them
|
|
let mut file_diags = Vec::new();
|
|
for d in diags {
|
|
let range = span_to_range(fid, &text_index, d.span.start, d.span.end);
|
|
file_diags.push(LspDiagnostic {
|
|
range,
|
|
severity: Some(match d.severity { Severity::Error => lsp::DiagnosticSeverity::ERROR, Severity::Warning => lsp::DiagnosticSeverity::WARNING }),
|
|
code: Some(lsp::NumberOrString::String(d.code)),
|
|
message: d.message,
|
|
});
|
|
}
|
|
diagnostics_by_uri.insert(uri.clone(), file_diags);
|
|
}
|
|
}
|
|
}
|
|
|
|
if new_token.is_cancelled() { return; }
|
|
|
|
// 3) short lock: swap state + revision++ if not cancelled; then publish diagnostics
|
|
let snapshot = AnalysisSnapshot { diagnostics_by_uri: diagnostics_by_uri.clone(), symbols_flat };
|
|
{
|
|
let mut guard = db.write().await;
|
|
if new_token.is_cancelled() { return; }
|
|
// if no new changes since we started, accept this snapshot
|
|
guard.last_good = Some(snapshot);
|
|
guard.revision = revision.saturating_add(1);
|
|
}
|
|
|
|
// Publish diagnostics per file
|
|
for (uri, diags) in diagnostics_by_uri.into_iter() {
|
|
let lsp_diags: Vec<lsp::Diagnostic> = diags.into_iter().map(|d| lsp::Diagnostic {
|
|
range: d.range,
|
|
severity: d.severity,
|
|
code: d.code,
|
|
message: d.message,
|
|
..Default::default()
|
|
}).collect();
|
|
let _ = client.publish_diagnostics(uri.parse().unwrap_or_else(|_| lsp::Url::parse("untitled:").unwrap()), lsp_diags, None).await;
|
|
}
|
|
});
|
|
}
|
|
|
|
fn span_to_range(file: FileId, idx: &TextIndex, start: u32, end: u32) -> lsp::Range {
|
|
// Ignore `file` here since idx is built from that file's text
|
|
let (s_line, s_col) = idx.byte_to_lsp(start);
|
|
let (e_line, e_col) = idx.byte_to_lsp(end);
|
|
lsp::Range {
|
|
start: lsp::Position { line: s_line, character: s_col },
|
|
end: lsp::Position { line: e_line, character: e_col },
|
|
}
|
|
}
|
|
|
|
/// Minimal integration with the compiler frontend for the MVP rebuild loop.
|
|
mod compiler_bridge {
|
|
use super::*;
|
|
use prometeu_compiler::frontends::pbs as p;
|
|
use prometeu_compiler::common::spans as cspans;
|
|
use prometeu_compiler::common::diagnostics as cdiag;
|
|
|
|
#[derive(Clone, Debug)]
|
|
pub enum Severity { Error, Warning }
|
|
|
|
#[derive(Clone, Debug)]
|
|
pub struct Diag { pub severity: Severity, pub code: String, pub message: String, pub span: cspans::Span }
|
|
|
|
#[derive(Clone, Debug)]
|
|
pub struct SymbolItem { pub name: String, pub kind: prometeu_compiler::analysis::symbols::SymbolKind, pub decl_span: cspans::Span }
|
|
|
|
#[derive(Clone, Debug)]
|
|
pub struct ParsedResult { pub diagnostics: Vec<Diag>, pub symbols: Vec<SymbolItem> }
|
|
|
|
pub struct ParserFacade<'a> {
|
|
text: &'a str,
|
|
file_id: FileId,
|
|
interner: &'a mut prometeu_analysis::NameInterner,
|
|
}
|
|
|
|
impl<'a> ParserFacade<'a> {
|
|
pub fn new(text: &'a str, file_id: FileId, interner: &'a mut prometeu_analysis::NameInterner) -> Self {
|
|
Self { text, file_id, interner }
|
|
}
|
|
|
|
pub fn parse_and_collect(&mut self) -> Result<ParsedResult, Vec<Diag>> {
|
|
let mut parser = p::parser::Parser::new(self.text, cspans::FileId(self.file_id.0), self.interner);
|
|
let parsed = match parser.parse_file() {
|
|
Ok(p) => p,
|
|
Err(bundle) => {
|
|
let diags = bundle.diagnostics.into_iter().map(|d| Diag { severity: match d.severity { cdiag::Severity::Error => Severity::Error, cdiag::Severity::Warning => Severity::Warning }, code: d.code, message: d.message, span: d.span }).collect();
|
|
return Err(diags);
|
|
}
|
|
};
|
|
|
|
let mut collector = p::collector::SymbolCollector::new(self.interner);
|
|
let (type_symbols, value_symbols) = match collector.collect(&parsed.arena, parsed.root) {
|
|
Ok(v) => v,
|
|
Err(bundle) => {
|
|
let diags = bundle.diagnostics.into_iter().map(|d| Diag { severity: match d.severity { cdiag::Severity::Error => Severity::Error, cdiag::Severity::Warning => Severity::Warning }, code: d.code, message: d.message, span: d.span }).collect();
|
|
return Err(diags);
|
|
}
|
|
};
|
|
let module_symbols = p::symbols::ModuleSymbols { type_symbols, value_symbols };
|
|
|
|
struct EmptyProvider;
|
|
impl p::resolver::ModuleProvider for EmptyProvider { fn get_module_symbols(&self, _from_path: &str) -> Option<&p::symbols::ModuleSymbols> { None } }
|
|
let mut resolver = p::resolver::Resolver::new(&module_symbols, &EmptyProvider, self.interner);
|
|
// bootstrap primitives using a throwaway interner behavior
|
|
resolver.bootstrap_types(self.interner);
|
|
if let Err(bundle) = resolver.resolve(&parsed.arena, parsed.root) {
|
|
let diags = bundle.diagnostics.into_iter().map(|d| Diag { severity: match d.severity { cdiag::Severity::Error => Severity::Error, cdiag::Severity::Warning => Severity::Warning }, code: d.code, message: d.message, span: d.span }).collect();
|
|
return Err(diags);
|
|
}
|
|
|
|
// Collect top-level symbols only for MVP
|
|
let mut symbols = Vec::new();
|
|
for s in &resolver.symbol_arena.symbols {
|
|
// Keep only decls in this file
|
|
if s.decl_span.file.0 == self.file_id.0 {
|
|
let name = self.interner.resolve(s.name).to_string();
|
|
let kind = s.kind;
|
|
symbols.push(SymbolItem { name, kind, decl_span: s.decl_span.clone() });
|
|
}
|
|
}
|
|
|
|
Ok(ParsedResult { diagnostics: vec![], symbols })
|
|
}
|
|
}
|
|
}
|