implements PLN-0024

This commit is contained in:
bQUARKz 2026-03-31 17:14:57 +01:00
parent 8dd3b58f39
commit bcc89dcfbd
Signed by: bquarkz
SSH Key Fingerprint: SHA256:Z7dgqoglWwoK6j6u4QC87OveEq74WOhFN+gitsxtkf8
25 changed files with 1049 additions and 13 deletions

View File

@ -11,4 +11,4 @@
{"type":"discussion","id":"DSC-0010","status":"done","ticket":"studio-code-editor-workspace-foundations","title":"Establish Code Editor workspace foundations in Studio without LSP","created_at":"2026-03-30","updated_at":"2026-03-31","tags":["studio","editor","workspace","multi-frontend","lsp-deferred"],"agendas":[],"decisions":[],"plans":[],"lessons":[{"id":"LSN-0026","file":"discussion/lessons/DSC-0010-studio-code-editor-workspace-foundations/LSN-0026-read-only-editor-foundations-and-semantic-deferral.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31"}]}
{"type":"discussion","id":"DSC-0011","status":"done","ticket":"compiler-analyze-compile-build-pipeline-split","title":"Split compiler pipeline into analyze, compile, and build entrypoints","created_at":"2026-03-30","updated_at":"2026-03-30","tags":["compiler","pipeline","artifacts","build","analysis"],"agendas":[],"decisions":[],"plans":[],"lessons":[{"id":"LSN-0025","file":"discussion/lessons/DSC-0011-compiler-analyze-compile-build-pipeline-split/LSN-0025-compiler-pipeline-entrypoints-and-result-boundaries.md","status":"done","created_at":"2026-03-30","updated_at":"2026-03-30"}]}
{"type":"discussion","id":"DSC-0012","status":"done","ticket":"studio-editor-document-vfs-boundary","title":"Definir um boundary de VFS documental para tree/view/open files no Code Editor do Studio","created_at":"2026-03-31","updated_at":"2026-03-31","tags":["studio","editor","workspace","vfs","filesystem","boundary"],"agendas":[],"decisions":[],"plans":[],"lessons":[{"id":"LSN-0027","file":"discussion/lessons/DSC-0012-studio-editor-document-vfs-boundary/LSN-0027-project-document-vfs-and-session-owned-editor-boundary.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31"}]}
{"type":"discussion","id":"DSC-0013","status":"open","ticket":"studio-editor-write-wave-supported-non-frontend-files","title":"Definir a wave inicial de edicao no Code Editor apenas para arquivos aceitos e nao relacionados ao FE","created_at":"2026-03-31","updated_at":"2026-03-31","tags":["studio","editor","workspace","write","read-only","vfs","frontend-boundary"],"agendas":[{"id":"AGD-0013","file":"AGD-0013-studio-editor-write-wave-supported-non-frontend-files.md","status":"accepted","created_at":"2026-03-31","updated_at":"2026-03-31"},{"id":"AGD-0014","file":"AGD-0014-studio-editor-frontend-edit-rights.md","status":"accepted","created_at":"2026-03-31","updated_at":"2026-03-31"}],"decisions":[{"id":"DEC-0010","file":"DEC-0010-studio-controlled-non-frontend-editor-write-wave.md","status":"accepted","created_at":"2026-03-31","updated_at":"2026-03-31","ref_agenda":"AGD-0013"},{"id":"DEC-0011","file":"DEC-0011-studio-frontend-read-only-minimum-lsp-phase.md","status":"accepted","created_at":"2026-03-31","updated_at":"2026-03-31","ref_agenda":"AGD-0014"}],"plans":[{"id":"PLN-0019","file":"PLN-0019-propagate-dec-0010-into-studio-and-vfs-specs.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0010"]},{"id":"PLN-0020","file":"PLN-0020-build-dec-0010-vfs-access-policy-and-save-core.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0010"]},{"id":"PLN-0021","file":"PLN-0021-integrate-dec-0010-editor-write-ui-and-workflow.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0010"]},{"id":"PLN-0022","file":"PLN-0022-propagate-dec-0011-into-studio-vfs-and-lsp-specs.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0011"]},{"id":"PLN-0023","file":"PLN-0023-scaffold-flat-packed-prometeu-lsp-api-and-session-seams.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0011"]},{"id":"PLN-0024","file":"PLN-0024-implement-read-only-fe-diagnostics-symbols-and-definition.md","status":"review","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0011"]},{"id":"PLN-0025","file":"PLN-0025-implement-fe-semantic-highlight-consumption.md","status":"review","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0011"]}],"lessons":[]}
{"type":"discussion","id":"DSC-0013","status":"open","ticket":"studio-editor-write-wave-supported-non-frontend-files","title":"Definir a wave inicial de edicao no Code Editor apenas para arquivos aceitos e nao relacionados ao FE","created_at":"2026-03-31","updated_at":"2026-03-31","tags":["studio","editor","workspace","write","read-only","vfs","frontend-boundary"],"agendas":[{"id":"AGD-0013","file":"AGD-0013-studio-editor-write-wave-supported-non-frontend-files.md","status":"accepted","created_at":"2026-03-31","updated_at":"2026-03-31"},{"id":"AGD-0014","file":"AGD-0014-studio-editor-frontend-edit-rights.md","status":"accepted","created_at":"2026-03-31","updated_at":"2026-03-31"}],"decisions":[{"id":"DEC-0010","file":"DEC-0010-studio-controlled-non-frontend-editor-write-wave.md","status":"accepted","created_at":"2026-03-31","updated_at":"2026-03-31","ref_agenda":"AGD-0013"},{"id":"DEC-0011","file":"DEC-0011-studio-frontend-read-only-minimum-lsp-phase.md","status":"accepted","created_at":"2026-03-31","updated_at":"2026-03-31","ref_agenda":"AGD-0014"}],"plans":[{"id":"PLN-0019","file":"PLN-0019-propagate-dec-0010-into-studio-and-vfs-specs.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0010"]},{"id":"PLN-0020","file":"PLN-0020-build-dec-0010-vfs-access-policy-and-save-core.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0010"]},{"id":"PLN-0021","file":"PLN-0021-integrate-dec-0010-editor-write-ui-and-workflow.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0010"]},{"id":"PLN-0022","file":"PLN-0022-propagate-dec-0011-into-studio-vfs-and-lsp-specs.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0011"]},{"id":"PLN-0023","file":"PLN-0023-scaffold-flat-packed-prometeu-lsp-api-and-session-seams.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0011"]},{"id":"PLN-0024","file":"PLN-0024-implement-read-only-fe-diagnostics-symbols-and-definition.md","status":"done","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0011"]},{"id":"PLN-0025","file":"PLN-0025-implement-fe-semantic-highlight-consumption.md","status":"review","created_at":"2026-03-31","updated_at":"2026-03-31","ref_decisions":["DEC-0011"]}],"lessons":[]}

View File

@ -2,9 +2,9 @@
id: PLN-0024
ticket: studio-editor-write-wave-supported-non-frontend-files
title: Implement read-only FE diagnostics, symbols, and definition over VFS snapshots
status: review
status: done
created: 2026-03-31
completed:
completed: 2026-03-31
tags: [studio, lsp, diagnostics, symbols, definition, frontend]
---

View File

@ -1,6 +1,10 @@
package p.lsp;
import p.lsp.dtos.PrometeuLspSessionStateDTO;
import p.lsp.messages.PrometeuLspAnalyzeDocumentRequest;
import p.lsp.messages.PrometeuLspAnalyzeDocumentResult;
import p.lsp.messages.PrometeuLspDefinitionRequest;
import p.lsp.messages.PrometeuLspDefinitionResult;
import p.studio.vfs.ProjectDocumentVfs;
public interface PrometeuLspService extends AutoCloseable {
@ -10,6 +14,10 @@ public interface PrometeuLspService extends AutoCloseable {
PrometeuLspSessionStateDTO snapshot();
PrometeuLspAnalyzeDocumentResult analyzeDocument(PrometeuLspAnalyzeDocumentRequest request);
PrometeuLspDefinitionResult definition(PrometeuLspDefinitionRequest request);
@Override
default void close() {
}

View File

@ -0,0 +1,16 @@
package p.lsp.dtos;
import java.nio.file.Path;
import java.util.Objects;
public record PrometeuLspDefinitionTargetDTO(
String name,
Path documentPath,
PrometeuLspRangeDTO range) {
public PrometeuLspDefinitionTargetDTO {
name = Objects.requireNonNull(name, "name");
documentPath = Objects.requireNonNull(documentPath, "documentPath").toAbsolutePath().normalize();
range = Objects.requireNonNull(range, "range");
}
}

View File

@ -0,0 +1,22 @@
package p.lsp.dtos;
import java.nio.file.Path;
import java.util.Objects;
public record PrometeuLspDiagnosticDTO(
Path documentPath,
PrometeuLspRangeDTO range,
PrometeuLspDiagnosticSeverityDTO severity,
String phase,
String code,
String message) {
public PrometeuLspDiagnosticDTO {
documentPath = Objects.requireNonNull(documentPath, "documentPath").toAbsolutePath().normalize();
range = Objects.requireNonNull(range, "range");
severity = Objects.requireNonNull(severity, "severity");
phase = phase == null ? "" : phase;
code = code == null ? "" : code;
message = Objects.requireNonNull(message, "message");
}
}

View File

@ -0,0 +1,6 @@
package p.lsp.dtos;
public enum PrometeuLspDiagnosticSeverityDTO {
ERROR,
WARNING
}

View File

@ -0,0 +1,15 @@
package p.lsp.dtos;
public record PrometeuLspRangeDTO(
int startOffset,
int endOffset) {
public PrometeuLspRangeDTO {
startOffset = Math.max(0, startOffset);
endOffset = Math.max(startOffset, endOffset);
}
public boolean contains(final int offset) {
return startOffset <= offset && offset < endOffset;
}
}

View File

@ -0,0 +1,21 @@
package p.lsp.dtos;
import java.nio.file.Path;
import java.util.List;
import java.util.Objects;
public record PrometeuLspSymbolDTO(
String name,
PrometeuLspSymbolKindDTO kind,
Path documentPath,
PrometeuLspRangeDTO range,
List<PrometeuLspSymbolDTO> children) {
public PrometeuLspSymbolDTO {
name = Objects.requireNonNull(name, "name");
kind = Objects.requireNonNull(kind, "kind");
documentPath = Objects.requireNonNull(documentPath, "documentPath").toAbsolutePath().normalize();
range = Objects.requireNonNull(range, "range");
children = List.copyOf(Objects.requireNonNull(children, "children"));
}
}

View File

@ -0,0 +1,19 @@
package p.lsp.dtos;
public enum PrometeuLspSymbolKindDTO {
FUNCTION,
METHOD,
CONSTRUCTOR,
GLOBAL,
CONST,
STRUCT,
CONTRACT,
HOST,
BUILTIN_TYPE,
SERVICE,
ERROR,
ENUM,
CALLBACK,
IMPLEMENTS,
UNKNOWN
}

View File

@ -1,13 +1,22 @@
package p.lsp.messages;
import p.lsp.dtos.PrometeuLspDiagnosticDTO;
import p.lsp.dtos.PrometeuLspSessionStateDTO;
import p.lsp.dtos.PrometeuLspSymbolDTO;
import java.util.List;
import java.util.Objects;
public record PrometeuLspAnalyzeDocumentResult(
PrometeuLspSessionStateDTO sessionState) {
PrometeuLspSessionStateDTO sessionState,
List<PrometeuLspDiagnosticDTO> diagnostics,
List<PrometeuLspSymbolDTO> documentSymbols,
List<PrometeuLspSymbolDTO> workspaceSymbols) {
public PrometeuLspAnalyzeDocumentResult {
Objects.requireNonNull(sessionState, "sessionState");
diagnostics = List.copyOf(Objects.requireNonNull(diagnostics, "diagnostics"));
documentSymbols = List.copyOf(Objects.requireNonNull(documentSymbols, "documentSymbols"));
workspaceSymbols = List.copyOf(Objects.requireNonNull(workspaceSymbols, "workspaceSymbols"));
}
}

View File

@ -0,0 +1,14 @@
package p.lsp.messages;
import java.nio.file.Path;
import java.util.Objects;
public record PrometeuLspDefinitionRequest(
Path documentPath,
int offset) {
public PrometeuLspDefinitionRequest {
documentPath = Objects.requireNonNull(documentPath, "documentPath").toAbsolutePath().normalize();
offset = Math.max(0, offset);
}
}

View File

@ -0,0 +1,19 @@
package p.lsp.messages;
import p.lsp.dtos.PrometeuLspDefinitionTargetDTO;
import java.nio.file.Path;
import java.util.List;
import java.util.Objects;
public record PrometeuLspDefinitionResult(
Path documentPath,
int offset,
List<PrometeuLspDefinitionTargetDTO> targets) {
public PrometeuLspDefinitionResult {
documentPath = Objects.requireNonNull(documentPath, "documentPath").toAbsolutePath().normalize();
offset = Math.max(0, offset);
targets = List.copyOf(Objects.requireNonNull(targets, "targets"));
}
}

View File

@ -5,4 +5,12 @@ plugins {
dependencies {
implementation(project(":prometeu-lsp:prometeu-lsp-api"))
implementation(project(":prometeu-vfs:prometeu-vfs-api"))
implementation(project(":prometeu-compiler:prometeu-compiler-core"))
implementation(project(":prometeu-compiler:prometeu-frontend-api"))
implementation(project(":prometeu-compiler:prometeu-frontend-registry"))
implementation(project(":prometeu-compiler:prometeu-deps"))
implementation(project(":prometeu-compiler:prometeu-build-pipeline"))
implementation(project(":prometeu-compiler:frontends:prometeu-frontend-pbs"))
testImplementation(project(":prometeu-vfs:prometeu-vfs-v1"))
}

View File

@ -0,0 +1,430 @@
package p.lsp.v1.internal;
import p.lsp.PrometeuLspProjectContext;
import p.lsp.dtos.*;
import p.lsp.messages.PrometeuLspAnalyzeDocumentRequest;
import p.lsp.messages.PrometeuLspAnalyzeDocumentResult;
import p.lsp.messages.PrometeuLspDefinitionRequest;
import p.lsp.messages.PrometeuLspDefinitionResult;
import p.studio.compiler.FrontendRegistryService;
import p.studio.compiler.messages.BuildingIssue;
import p.studio.compiler.messages.BuildingIssueSink;
import p.studio.compiler.messages.BuilderPipelineConfig;
import p.studio.compiler.messages.FESurfaceContext;
import p.studio.compiler.messages.FrontendPhaseContext;
import p.studio.compiler.messages.HostAdmissionContext;
import p.studio.compiler.models.AnalysisSnapshot;
import p.studio.compiler.models.BuilderPipelineContext;
import p.studio.compiler.models.SourceHandle;
import p.studio.compiler.pbs.ast.PbsAst;
import p.studio.compiler.pbs.lexer.PbsLexer;
import p.studio.compiler.pbs.lexer.PbsToken;
import p.studio.compiler.pbs.lexer.PbsTokenKind;
import p.studio.compiler.pbs.parser.PbsParser;
import p.studio.compiler.source.Span;
import p.studio.compiler.source.diagnostics.Diagnostic;
import p.studio.compiler.source.diagnostics.DiagnosticSink;
import p.studio.compiler.source.identifiers.FileId;
import p.studio.compiler.workspaces.AssetSurfaceContextLoader;
import p.studio.compiler.workspaces.PipelineStage;
import p.studio.compiler.workspaces.stages.LoadSourcesPipelineStage;
import p.studio.compiler.workspaces.stages.ResolveDepsPipelineStage;
import p.studio.utilities.logs.LogAggregator;
import p.studio.vfs.ProjectDocumentVfs;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
final class PrometeuLspSemanticReadPhase {
private PrometeuLspSemanticReadPhase() {
}
static PrometeuLspAnalyzeDocumentResult analyze(
final PrometeuLspProjectContext projectContext,
final ProjectDocumentVfs projectDocumentVfs,
final PrometeuLspAnalyzeDocumentRequest request) {
final SemanticSession session = buildSession(projectContext, projectDocumentVfs, request.documentPath());
final Path normalizedRequestedDocument = normalize(request.documentPath());
return new PrometeuLspAnalyzeDocumentResult(
new PrometeuLspSessionStateDTO(true, List.of("diagnostics", "symbols", "definition", "highlight")),
session.diagnosticsByDocument().getOrDefault(normalizedRequestedDocument, List.of()),
session.documentSymbolsByDocument().getOrDefault(normalizedRequestedDocument, List.of()),
session.workspaceSymbols());
}
static PrometeuLspDefinitionResult definition(
final PrometeuLspProjectContext projectContext,
final ProjectDocumentVfs projectDocumentVfs,
final PrometeuLspDefinitionRequest request) {
final SemanticSession session = buildSession(projectContext, projectDocumentVfs, request.documentPath());
final List<PrometeuLspDefinitionTargetDTO> targets = resolveDefinitionTargets(session, request);
return new PrometeuLspDefinitionResult(request.documentPath(), request.offset(), targets);
}
private static SemanticSession buildSession(
final PrometeuLspProjectContext projectContext,
final ProjectDocumentVfs projectDocumentVfs,
final Path requestedDocumentPath) {
final BuilderPipelineConfig config = new BuilderPipelineConfig(
false,
projectContext.rootPath().toString(),
"core-v1",
new PrometeuLspVfsOverlaySourceProviderFactory(projectDocumentVfs, requestedDocumentPath));
final BuilderPipelineContext context = BuilderPipelineContext.fromConfig(config);
final AnalysisRuntimeSnapshot snapshot = runAnalysisStages(context);
return index(snapshot, requestedDocumentPath);
}
private static AnalysisRuntimeSnapshot runAnalysisStages(final BuilderPipelineContext context) {
final LogAggregator logs = LogAggregator.empty();
final List<BuildingIssue> diagnostics = new ArrayList<>();
final List<PipelineStage> stages = List.of(
new ResolveDepsPipelineStage(),
new LoadSourcesPipelineStage());
for (final PipelineStage stage : stages) {
final BuildingIssueSink stageIssues = stage.run(context, logs);
diagnostics.addAll(stageIssues.asCollection());
if (stageIssues.hasErrors()) {
break;
}
}
final DiagnosticSink frontendDiagnostics = DiagnosticSink.empty();
if (context.resolvedWorkspace != null && context.fileTable != null) {
final BuildingIssueSink frontendIssues = runFrontendPhase(context, logs, frontendDiagnostics);
diagnostics.addAll(frontendIssues.asCollection());
}
return new AnalysisRuntimeSnapshot(
new AnalysisSnapshot(
diagnostics,
context.resolvedWorkspace,
context.fileTable,
context.irBackend),
List.copyOf(frontendDiagnostics.asCollection()));
}
private static BuildingIssueSink runFrontendPhase(
final BuilderPipelineContext context,
final LogAggregator logs,
final DiagnosticSink frontendDiagnostics) {
final var frontendSpec = context.resolvedWorkspace.frontendSpec();
final var service = FrontendRegistryService.getFrontendPhaseService(frontendSpec.getLanguageId());
if (service.isEmpty()) {
return BuildingIssueSink.empty().report(builder -> builder
.error(true)
.message("[BUILD]: unable to find a service for frontend phase: " + frontendSpec.getLanguageId()));
}
final FESurfaceContext feSurfaceContext = new AssetSurfaceContextLoader().load(context.resolvedWorkspace.mainProject().getRootPath());
final FrontendPhaseContext frontendPhaseContext = new FrontendPhaseContext(
context.resolvedWorkspace.graph().projectTable(),
context.fileTable,
context.resolvedWorkspace.stack(),
context.resolvedWorkspace.stdlib(),
HostAdmissionContext.permissiveDefault(),
feSurfaceContext);
final BuildingIssueSink issues = BuildingIssueSink.empty();
context.irBackend = service.get().compile(frontendPhaseContext, frontendDiagnostics, logs, issues);
return issues;
}
private static SemanticSession index(
final AnalysisRuntimeSnapshot runtimeSnapshot,
final Path requestedDocumentPath) {
final AnalysisSnapshot snapshot = runtimeSnapshot.analysisSnapshot();
final Map<Path, List<PrometeuLspDiagnosticDTO>> diagnosticsByDocument = diagnosticsByDocument(
snapshot.diagnostics(),
snapshot,
runtimeSnapshot.frontendDiagnostics());
final SemanticIndex semanticIndex = new SemanticIndex();
if (snapshot.fileTable() == null) {
return new SemanticSession(
normalize(requestedDocumentPath),
diagnosticsByDocument,
Map.of(),
List.of(),
Map.of(),
Map.of());
}
for (final FileId fileId : snapshot.fileTable()) {
final SourceHandle sourceHandle = snapshot.fileTable().get(fileId);
if (!isPbsSource(sourceHandle)) {
continue;
}
final String source = sourceHandle.readUtf8().orElse("");
final DiagnosticSink diagnostics = DiagnosticSink.empty();
final var tokens = PbsLexer.lex(source, fileId, diagnostics);
final PbsAst.File ast = PbsParser.parse(tokens, fileId, diagnostics, PbsParser.ParseMode.ORDINARY);
semanticIndex.index(sourceHandle.getCanonPath(), ast, tokens.asList());
}
return new SemanticSession(
normalize(requestedDocumentPath),
diagnosticsByDocument,
semanticIndex.documentSymbolsByDocument(),
semanticIndex.workspaceSymbols(),
semanticIndex.symbolsByName(),
semanticIndex.tokensByDocument());
}
private static boolean isPbsSource(final SourceHandle sourceHandle) {
return "pbs".equalsIgnoreCase(sourceHandle.getExtension());
}
private static Map<Path, List<PrometeuLspDiagnosticDTO>> diagnosticsByDocument(
final List<BuildingIssue> issues,
final AnalysisSnapshot snapshot,
final List<Diagnostic> frontendDiagnostics) {
final Map<Path, List<PrometeuLspDiagnosticDTO>> diagnosticsByDocument = new LinkedHashMap<>();
for (final var issue : issues) {
if (issue.getFileId() == null || issue.getFileId() < 0) {
continue;
}
if (snapshot.fileTable() == null || issue.getFileId() >= snapshot.fileTable().size()) {
continue;
}
final SourceHandle sourceHandle = snapshot.fileTable().get(new FileId(issue.getFileId()));
final Path documentPath = sourceHandle.getCanonPath().toAbsolutePath().normalize();
diagnosticsByDocument
.computeIfAbsent(documentPath, ignored -> new ArrayList<>())
.add(new PrometeuLspDiagnosticDTO(
documentPath,
new PrometeuLspRangeDTO(
safeOffset(issue.getStart()),
safeEnd(issue.getStart(), issue.getEnd())),
issue.isError()
? PrometeuLspDiagnosticSeverityDTO.ERROR
: PrometeuLspDiagnosticSeverityDTO.WARNING,
issue.getPhase(),
issue.getCode(),
issue.getMessage()));
}
for (final Diagnostic diagnostic : frontendDiagnostics) {
if (snapshot.fileTable() == null || diagnostic.getSpan() == null || diagnostic.getSpan().getFileId().isNone()) {
continue;
}
final SourceHandle sourceHandle = snapshot.fileTable().get(diagnostic.getSpan().getFileId());
final Path documentPath = sourceHandle.getCanonPath().toAbsolutePath().normalize();
diagnosticsByDocument
.computeIfAbsent(documentPath, ignored -> new ArrayList<>())
.add(new PrometeuLspDiagnosticDTO(
documentPath,
new PrometeuLspRangeDTO(
(int) diagnostic.getSpan().getStart(),
(int) diagnostic.getSpan().getEnd()),
diagnostic.getSeverity().isError()
? PrometeuLspDiagnosticSeverityDTO.ERROR
: PrometeuLspDiagnosticSeverityDTO.WARNING,
diagnostic.getPhase().name(),
diagnostic.getCode(),
diagnostic.getMessage()));
}
return freezeMapOfLists(diagnosticsByDocument);
}
private static int safeOffset(final Integer value) {
return value == null ? 0 : Math.max(0, value);
}
private static int safeEnd(final Integer start, final Integer end) {
final int safeStart = safeOffset(start);
final int safeEnd = safeOffset(end);
return Math.max(safeStart, safeEnd);
}
private static List<PrometeuLspDefinitionTargetDTO> resolveDefinitionTargets(
final SemanticSession session,
final PrometeuLspDefinitionRequest request) {
final Path documentPath = normalize(request.documentPath());
final List<PbsToken> tokens = session.tokensByDocument().getOrDefault(documentPath, List.of());
final PbsToken activeToken = tokenAt(tokens, request.offset());
if (activeToken == null || activeToken.kind() != PbsTokenKind.IDENTIFIER) {
return List.of();
}
final String lexeme = activeToken.lexeme();
final List<PrometeuLspSymbolDTO> sameDocumentMatches = session.symbolsByName().getOrDefault(lexeme, List.of()).stream()
.filter(symbol -> symbol.documentPath().equals(documentPath))
.toList();
final List<PrometeuLspSymbolDTO> effectiveMatches = sameDocumentMatches.isEmpty()
? session.symbolsByName().getOrDefault(lexeme, List.of())
: sameDocumentMatches;
return effectiveMatches.stream()
.map(symbol -> new PrometeuLspDefinitionTargetDTO(symbol.name(), symbol.documentPath(), symbol.range()))
.toList();
}
private static PbsToken tokenAt(final List<PbsToken> tokens, final int offset) {
for (final PbsToken token : tokens) {
if (token.start() <= offset && offset < token.end()) {
return token;
}
}
return null;
}
private static Path normalize(final Path path) {
final Path normalized = Objects.requireNonNull(path, "path").toAbsolutePath().normalize();
try {
return Files.exists(normalized) ? normalized.toRealPath() : normalized;
} catch (IOException ignored) {
return normalized;
}
}
private static <T> Map<Path, List<T>> freezeMapOfLists(final Map<Path, List<T>> mutable) {
final Map<Path, List<T>> frozen = new LinkedHashMap<>();
for (final var entry : mutable.entrySet()) {
frozen.put(entry.getKey(), List.copyOf(entry.getValue()));
}
return Map.copyOf(frozen);
}
private record SemanticSession(
Path requestedDocumentPath,
Map<Path, List<PrometeuLspDiagnosticDTO>> diagnosticsByDocument,
Map<Path, List<PrometeuLspSymbolDTO>> documentSymbolsByDocument,
List<PrometeuLspSymbolDTO> workspaceSymbols,
Map<String, List<PrometeuLspSymbolDTO>> symbolsByName,
Map<Path, List<PbsToken>> tokensByDocument) {
}
private record AnalysisRuntimeSnapshot(
AnalysisSnapshot analysisSnapshot,
List<Diagnostic> frontendDiagnostics) {
}
private static final class SemanticIndex {
private final Map<Path, List<PrometeuLspSymbolDTO>> documentSymbolsByDocument = new LinkedHashMap<>();
private final List<PrometeuLspSymbolDTO> workspaceSymbols = new ArrayList<>();
private final Map<String, List<PrometeuLspSymbolDTO>> symbolsByName = new LinkedHashMap<>();
private final Map<Path, List<PbsToken>> tokensByDocument = new LinkedHashMap<>();
void index(
final Path documentPath,
final PbsAst.File ast,
final List<PbsToken> tokens) {
final Path normalizedDocumentPath = normalize(documentPath);
tokensByDocument.put(normalizedDocumentPath, List.copyOf(tokens));
final List<PrometeuLspSymbolDTO> documentSymbols = new ArrayList<>();
for (final PbsAst.TopDecl topDecl : ast.topDecls()) {
final PrometeuLspSymbolDTO symbol = symbolForTopDecl(normalizedDocumentPath, topDecl);
if (symbol == null) {
continue;
}
documentSymbols.add(symbol);
workspaceSymbols.add(symbol);
symbolsByName.computeIfAbsent(symbol.name(), ignored -> new ArrayList<>()).add(symbol);
for (final PrometeuLspSymbolDTO child : symbol.children()) {
symbolsByName.computeIfAbsent(child.name(), ignored -> new ArrayList<>()).add(child);
}
}
documentSymbolsByDocument.put(normalizedDocumentPath, List.copyOf(documentSymbols));
}
private PrometeuLspSymbolDTO symbolForTopDecl(
final Path documentPath,
final PbsAst.TopDecl topDecl) {
if (topDecl instanceof PbsAst.FunctionDecl functionDecl) {
return symbol(documentPath, functionDecl.name(), PrometeuLspSymbolKindDTO.FUNCTION, functionDecl.span(), List.of());
}
if (topDecl instanceof PbsAst.StructDecl structDecl) {
return symbol(documentPath, structDecl.name(), PrometeuLspSymbolKindDTO.STRUCT, structDecl.span(), structChildren(documentPath, structDecl));
}
if (topDecl instanceof PbsAst.ContractDecl contractDecl) {
return symbol(documentPath, contractDecl.name(), PrometeuLspSymbolKindDTO.CONTRACT, contractDecl.span(), signatureChildren(documentPath, contractDecl.signatures().asList()));
}
if (topDecl instanceof PbsAst.HostDecl hostDecl) {
return symbol(documentPath, hostDecl.name(), PrometeuLspSymbolKindDTO.HOST, hostDecl.span(), signatureChildren(documentPath, hostDecl.signatures().asList()));
}
if (topDecl instanceof PbsAst.BuiltinTypeDecl builtinTypeDecl) {
return symbol(documentPath, builtinTypeDecl.name(), PrometeuLspSymbolKindDTO.BUILTIN_TYPE, builtinTypeDecl.span(), signatureChildren(documentPath, builtinTypeDecl.signatures().asList()));
}
if (topDecl instanceof PbsAst.ServiceDecl serviceDecl) {
return symbol(documentPath, serviceDecl.name(), PrometeuLspSymbolKindDTO.SERVICE, serviceDecl.span(), functionChildren(documentPath, serviceDecl.methods().asList()));
}
if (topDecl instanceof PbsAst.ErrorDecl errorDecl) {
return symbol(documentPath, errorDecl.name(), PrometeuLspSymbolKindDTO.ERROR, errorDecl.span(), List.of());
}
if (topDecl instanceof PbsAst.EnumDecl enumDecl) {
return symbol(documentPath, enumDecl.name(), PrometeuLspSymbolKindDTO.ENUM, enumDecl.span(), List.of());
}
if (topDecl instanceof PbsAst.CallbackDecl callbackDecl) {
return symbol(documentPath, callbackDecl.name(), PrometeuLspSymbolKindDTO.CALLBACK, callbackDecl.span(), List.of());
}
if (topDecl instanceof PbsAst.GlobalDecl globalDecl) {
return symbol(documentPath, globalDecl.name(), PrometeuLspSymbolKindDTO.GLOBAL, globalDecl.span(), List.of());
}
if (topDecl instanceof PbsAst.ConstDecl constDecl) {
return symbol(documentPath, constDecl.name(), PrometeuLspSymbolKindDTO.CONST, constDecl.span(), List.of());
}
if (topDecl instanceof PbsAst.ImplementsDecl implementsDecl) {
return symbol(documentPath, implementsDecl.binderName(), PrometeuLspSymbolKindDTO.IMPLEMENTS, implementsDecl.span(), functionChildren(documentPath, implementsDecl.methods().asList()));
}
return null;
}
private List<PrometeuLspSymbolDTO> structChildren(
final Path documentPath,
final PbsAst.StructDecl structDecl) {
final List<PrometeuLspSymbolDTO> children = new ArrayList<>();
children.addAll(functionChildren(documentPath, structDecl.methods().asList()));
for (final PbsAst.CtorDecl ctorDecl : structDecl.ctors()) {
children.add(symbol(documentPath, ctorDecl.name(), PrometeuLspSymbolKindDTO.CONSTRUCTOR, ctorDecl.span(), List.of()));
}
return List.copyOf(children);
}
private List<PrometeuLspSymbolDTO> functionChildren(
final Path documentPath,
final List<PbsAst.FunctionDecl> functions) {
final List<PrometeuLspSymbolDTO> children = new ArrayList<>();
for (final PbsAst.FunctionDecl functionDecl : functions) {
children.add(symbol(documentPath, functionDecl.name(), PrometeuLspSymbolKindDTO.METHOD, functionDecl.span(), List.of()));
}
return List.copyOf(children);
}
private List<PrometeuLspSymbolDTO> signatureChildren(
final Path documentPath,
final List<PbsAst.FunctionSignature> signatures) {
final List<PrometeuLspSymbolDTO> children = new ArrayList<>();
for (final PbsAst.FunctionSignature signature : signatures) {
children.add(symbol(documentPath, signature.name(), PrometeuLspSymbolKindDTO.METHOD, signature.span(), List.of()));
}
return List.copyOf(children);
}
private PrometeuLspSymbolDTO symbol(
final Path documentPath,
final String name,
final PrometeuLspSymbolKindDTO kind,
final Span span,
final List<PrometeuLspSymbolDTO> children) {
return new PrometeuLspSymbolDTO(
name,
kind,
documentPath,
new PrometeuLspRangeDTO((int) span.getStart(), (int) span.getEnd()),
children);
}
Map<Path, List<PrometeuLspSymbolDTO>> documentSymbolsByDocument() {
return Map.copyOf(documentSymbolsByDocument);
}
List<PrometeuLspSymbolDTO> workspaceSymbols() {
return List.copyOf(workspaceSymbols);
}
Map<String, List<PrometeuLspSymbolDTO>> symbolsByName() {
final Map<String, List<PrometeuLspSymbolDTO>> frozen = new LinkedHashMap<>();
for (final var entry : symbolsByName.entrySet()) {
frozen.put(entry.getKey(), List.copyOf(entry.getValue()));
}
return Map.copyOf(frozen);
}
Map<Path, List<PbsToken>> tokensByDocument() {
return Map.copyOf(tokensByDocument);
}
}
}

View File

@ -2,6 +2,10 @@ package p.lsp.v1.internal;
import p.lsp.PrometeuLspProjectContext;
import p.lsp.PrometeuLspService;
import p.lsp.messages.PrometeuLspAnalyzeDocumentRequest;
import p.lsp.messages.PrometeuLspAnalyzeDocumentResult;
import p.lsp.messages.PrometeuLspDefinitionRequest;
import p.lsp.messages.PrometeuLspDefinitionResult;
import p.lsp.dtos.PrometeuLspSessionStateDTO;
import p.studio.vfs.ProjectDocumentVfs;
@ -35,4 +39,14 @@ public final class PrometeuLspV1Service implements PrometeuLspService {
true,
List.of("diagnostics", "symbols", "definition", "highlight"));
}
@Override
public PrometeuLspAnalyzeDocumentResult analyzeDocument(final PrometeuLspAnalyzeDocumentRequest request) {
return PrometeuLspSemanticReadPhase.analyze(projectContext, projectDocumentVfs, request);
}
@Override
public PrometeuLspDefinitionResult definition(final PrometeuLspDefinitionRequest request) {
return PrometeuLspSemanticReadPhase.definition(projectContext, projectDocumentVfs, request);
}
}

View File

@ -0,0 +1,52 @@
package p.lsp.v1.internal;
import p.studio.compiler.utilities.SourceProvider;
import p.studio.compiler.utilities.SourceProviderFactory;
import p.studio.vfs.ProjectDocumentVfs;
import p.studio.vfs.VfsDocumentOpenResult;
import p.studio.vfs.VfsTextDocument;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Objects;
final class PrometeuLspVfsOverlaySourceProviderFactory implements SourceProviderFactory {
private final ProjectDocumentVfs projectDocumentVfs;
private final Path openedDocumentPath;
PrometeuLspVfsOverlaySourceProviderFactory(
final ProjectDocumentVfs projectDocumentVfs,
final Path openedDocumentPath) {
this.projectDocumentVfs = Objects.requireNonNull(projectDocumentVfs, "projectDocumentVfs");
this.openedDocumentPath = openedDocumentPath == null
? null
: normalize(openedDocumentPath);
}
@Override
public SourceProvider create(final Path path) {
final Path normalizedPath = normalize(path);
return () -> read(normalizedPath);
}
private byte[] read(final Path path) throws IOException {
if (openedDocumentPath != null && openedDocumentPath.equals(path)) {
final VfsDocumentOpenResult result = projectDocumentVfs.openDocument(path);
if (result instanceof VfsTextDocument textDocument) {
return textDocument.content().getBytes(StandardCharsets.UTF_8);
}
}
return Files.readAllBytes(path);
}
private Path normalize(final Path path) {
final Path normalized = path.toAbsolutePath().normalize();
try {
return Files.exists(normalized) ? normalized.toRealPath() : normalized;
} catch (IOException ignored) {
return normalized;
}
}
}

View File

@ -0,0 +1,183 @@
package p.lsp.v1.internal;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import p.lsp.PrometeuLspProjectContext;
import p.lsp.PrometeuLspService;
import p.lsp.dtos.PrometeuLspDefinitionTargetDTO;
import p.lsp.messages.PrometeuLspAnalyzeDocumentRequest;
import p.lsp.messages.PrometeuLspDefinitionRequest;
import p.studio.vfs.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
final class PrometeuLspV1ServiceTest {
@TempDir
Path tempDir;
@Test
void analyzeDocumentUsesVfsOverlayForRequestedDocumentAndFilesystemFallbackForClosedFiles() throws Exception {
final Path projectRoot = createProject();
final Path mainFile = projectRoot.resolve("src/main.pbs");
final Path helperFile = projectRoot.resolve("src/helper.pbs");
Files.writeString(mainFile, "fn broken( -> void {}\n");
Files.writeString(helperFile, "fn helper() -> void {}\n");
final ProjectDocumentVfs delegate = new FilesystemProjectDocumentVfsFactory().open(projectContext(projectRoot));
final String overlaySource = """
fn helper_call() -> void
{
helper();
}
""";
final PrometeuLspService service = new PrometeuLspV1Service(
new PrometeuLspProjectContext("Example", "pbs", projectRoot),
new OverlayProjectDocumentVfs(delegate, mainFile, overlaySource));
final var analysis = service.analyzeDocument(new PrometeuLspAnalyzeDocumentRequest(mainFile));
assertTrue(
analysis.documentSymbols().stream().anyMatch(symbol -> symbol.name().equals("helper_call")),
analysis.toString());
assertTrue(
analysis.workspaceSymbols().stream().anyMatch(symbol ->
symbol.name().equals("helper") && symbol.documentPath().equals(normalize(helperFile))),
analysis.toString());
final int offset = overlaySource.indexOf("helper();");
final var definition = service.definition(new PrometeuLspDefinitionRequest(mainFile, offset));
final List<PrometeuLspDefinitionTargetDTO> targets = definition.targets();
assertEquals(1, targets.size());
assertEquals(normalize(helperFile), targets.get(0).documentPath());
assertEquals("helper", targets.get(0).name());
}
@Test
void analyzeDocumentSurfacesDiagnosticsWithoutAbortingSemanticRead() throws Exception {
final Path projectRoot = createProject();
final Path mainFile = projectRoot.resolve("src/main.pbs");
Files.writeString(mainFile, """
fn main( -> void
{
helper();
}
""");
final ProjectDocumentVfs vfs = new FilesystemProjectDocumentVfsFactory().open(projectContext(projectRoot));
final PrometeuLspService service = new PrometeuLspV1Service(
new PrometeuLspProjectContext("Example", "pbs", projectRoot),
vfs);
final var analysis = service.analyzeDocument(new PrometeuLspAnalyzeDocumentRequest(mainFile));
assertFalse(analysis.diagnostics().isEmpty(), analysis.toString());
assertTrue(analysis.diagnostics().stream().allMatch(diagnostic ->
diagnostic.documentPath().equals(normalize(mainFile))));
}
private Path createProject() throws Exception {
final Path src = Files.createDirectories(tempDir.resolve("src"));
Files.writeString(tempDir.resolve("prometeu.json"), """
{
"name": "Example",
"version": "1.0.0",
"language": "pbs",
"stdlib": "1",
"dependencies": []
}
""");
Files.writeString(src.resolve("mod.barrel"), "pub fn helper() -> void;\n");
return tempDir;
}
private VfsProjectContext projectContext(final Path projectRoot) {
return new VfsProjectContext("Example", "pbs", projectRoot);
}
private static Path normalize(final Path path) {
try {
return path.toAbsolutePath().normalize().toRealPath();
} catch (Exception exception) {
throw new IllegalStateException(exception);
}
}
private static final class OverlayProjectDocumentVfs implements ProjectDocumentVfs {
private final ProjectDocumentVfs delegate;
private final Path overlayPath;
private final String overlayContent;
private OverlayProjectDocumentVfs(
final ProjectDocumentVfs delegate,
final Path overlayPath,
final String overlayContent) {
this.delegate = delegate;
this.overlayPath = normalize(overlayPath);
this.overlayContent = overlayContent;
}
@Override
public VfsProjectContext projectContext() {
return delegate.projectContext();
}
@Override
public VfsProjectSnapshot snapshot() {
return delegate.snapshot();
}
@Override
public VfsProjectSnapshot refresh() {
return delegate.refresh();
}
@Override
public VfsProjectSnapshot refresh(final VfsRefreshRequest request) {
return delegate.refresh(request);
}
@Override
public VfsDocumentOpenResult openDocument(final Path path) {
final Path normalizedPath = normalize(path);
if (!overlayPath.equals(normalizedPath)) {
return delegate.openDocument(normalizedPath);
}
final VfsDocumentAccessContext accessContext = new VfsDocumentAccessContext(
normalizedPath,
"pbs",
true,
VfsDocumentAccessMode.READ_ONLY,
Map.of());
return new VfsTextDocument(
normalizedPath,
normalizedPath.getFileName().toString(),
"pbs",
overlayContent,
"LF",
false,
accessContext);
}
@Override
public void close() {
delegate.close();
}
private static Path normalize(final Path path) {
final Path normalized = path.toAbsolutePath().normalize();
try {
return Files.exists(normalized) ? normalized.toRealPath() : normalized;
} catch (Exception ignored) {
return normalized;
}
}
}
}

View File

@ -78,6 +78,10 @@ public enum I18n {
CODE_EDITOR_NAVIGATOR_DETAIL("codeEditor.navigator.detail"),
CODE_EDITOR_OUTLINE_TITLE("codeEditor.outline.title"),
CODE_EDITOR_OUTLINE_PLACEHOLDER("codeEditor.outline.placeholder"),
CODE_EDITOR_OUTLINE_DIAGNOSTICS("codeEditor.outline.diagnostics"),
CODE_EDITOR_OUTLINE_SYMBOLS("codeEditor.outline.symbols"),
CODE_EDITOR_OUTLINE_EMPTY_DIAGNOSTICS("codeEditor.outline.emptyDiagnostics"),
CODE_EDITOR_OUTLINE_EMPTY_SYMBOLS("codeEditor.outline.emptySymbols"),
CODE_EDITOR_HELPER_TITLE("codeEditor.helper.title"),
CODE_EDITOR_HELPER_PLACEHOLDER("codeEditor.helper.placeholder"),
CODE_EDITOR_TABS_PLACEHOLDER("codeEditor.tabs.placeholder"),

View File

@ -28,7 +28,10 @@ public final class MainView extends BorderPane {
setTop(new StudioShellTopBarControl(menuBar));
host.register(new AssetWorkspace(projectReference));
host.register(new EditorWorkspace(projectReference, projectSession.projectDocumentVfs()));
host.register(new EditorWorkspace(
projectReference,
projectSession.projectDocumentVfs(),
projectSession.prometeuLspService()));
// host.register(new PlaceholderWorkspace(WorkspaceId.DEBUG, I18n.WORKSPACE_DEBUG, "Debug"));
host.register(new ShipperWorkspace(projectReference));

View File

@ -2,15 +2,24 @@ package p.studio.workspaces.editor;
import javafx.geometry.Insets;
import javafx.scene.control.Label;
import javafx.scene.control.ScrollPane;
import javafx.scene.layout.VBox;
import p.lsp.dtos.PrometeuLspDiagnosticDTO;
import p.lsp.dtos.PrometeuLspSymbolDTO;
import p.studio.Container;
import p.studio.controls.WorkspaceDockPane;
import p.studio.utilities.i18n.I18n;
import java.nio.file.Path;
import java.util.List;
public final class EditorOutlinePanel extends WorkspaceDockPane {
private static final double COLLAPSED_HEIGHT = 34.0;
private static final double MINIMUM_EXPANDED_HEIGHT = 120.0;
private static final double DEFAULT_HEIGHT = 180.0;
private final Label summary = new Label();
private final VBox diagnosticsBox = new VBox(6);
private final VBox symbolsBox = new VBox(6);
public EditorOutlinePanel() {
super(
@ -21,14 +30,107 @@ public final class EditorOutlinePanel extends WorkspaceDockPane {
true,
"editor-workspace-outline-panel");
final var placeholder = new Label();
placeholder.textProperty().bind(Container.i18n().bind(I18n.CODE_EDITOR_OUTLINE_PLACEHOLDER));
placeholder.getStyleClass().add("editor-workspace-placeholder");
placeholder.setWrapText(true);
summary.getStyleClass().addAll("editor-workspace-placeholder", "editor-workspace-outline-summary");
summary.setWrapText(true);
final var content = new VBox(8, placeholder);
final var diagnosticsTitle = sectionTitle(I18n.CODE_EDITOR_OUTLINE_DIAGNOSTICS);
final var symbolsTitle = sectionTitle(I18n.CODE_EDITOR_OUTLINE_SYMBOLS);
diagnosticsBox.getStyleClass().add("editor-workspace-outline-list");
symbolsBox.getStyleClass().add("editor-workspace-outline-list");
final var content = new VBox(10,
summary,
diagnosticsTitle,
diagnosticsBox,
symbolsTitle,
symbolsBox);
content.getStyleClass().add("editor-workspace-panel-content");
content.setPadding(new Insets(10, 16, 14, 16));
setDockContent(content);
final var scrollPane = new ScrollPane(content);
scrollPane.setFitToWidth(true);
scrollPane.getStyleClass().add("editor-workspace-outline-scroll");
setDockContent(scrollPane);
showPlaceholder();
}
public void showPlaceholder() {
summary.textProperty().unbind();
summary.textProperty().bind(Container.i18n().bind(I18n.CODE_EDITOR_OUTLINE_PLACEHOLDER));
diagnosticsBox.getChildren().setAll(placeholderLabel(I18n.CODE_EDITOR_OUTLINE_EMPTY_DIAGNOSTICS));
symbolsBox.getChildren().setAll(placeholderLabel(I18n.CODE_EDITOR_OUTLINE_EMPTY_SYMBOLS));
}
public void showSemanticReadResult(
final Path documentPath,
final List<PrometeuLspDiagnosticDTO> diagnostics,
final List<PrometeuLspSymbolDTO> symbols) {
summary.textProperty().unbind();
summary.setText(documentPath.getFileName() + " • semantic read-only");
rebuildDiagnostics(diagnostics);
rebuildSymbols(symbols);
}
private void rebuildDiagnostics(final List<PrometeuLspDiagnosticDTO> diagnostics) {
diagnosticsBox.getChildren().clear();
if (diagnostics.isEmpty()) {
diagnosticsBox.getChildren().add(placeholderLabel(I18n.CODE_EDITOR_OUTLINE_EMPTY_DIAGNOSTICS));
return;
}
for (final PrometeuLspDiagnosticDTO diagnostic : diagnostics) {
final var label = new Label(formatDiagnostic(diagnostic));
label.setWrapText(true);
label.getStyleClass().addAll(
"editor-workspace-outline-item",
diagnostic.severity().name().equals("ERROR")
? "editor-workspace-outline-diagnostic-error"
: "editor-workspace-outline-diagnostic-warning");
diagnosticsBox.getChildren().add(label);
}
}
private void rebuildSymbols(final List<PrometeuLspSymbolDTO> symbols) {
symbolsBox.getChildren().clear();
if (symbols.isEmpty()) {
symbolsBox.getChildren().add(placeholderLabel(I18n.CODE_EDITOR_OUTLINE_EMPTY_SYMBOLS));
return;
}
for (final PrometeuLspSymbolDTO symbol : symbols) {
appendSymbol(symbol, 0);
}
}
private void appendSymbol(final PrometeuLspSymbolDTO symbol, final int depth) {
final var label = new Label(symbol.name() + "" + symbol.kind().name().toLowerCase());
label.setWrapText(true);
label.setPadding(new Insets(0, 0, 0, depth * 12));
label.getStyleClass().add("editor-workspace-outline-item");
symbolsBox.getChildren().add(label);
for (final PrometeuLspSymbolDTO child : symbol.children()) {
appendSymbol(child, depth + 1);
}
}
private Label sectionTitle(final I18n key) {
final var label = new Label();
label.textProperty().bind(Container.i18n().bind(key));
label.getStyleClass().add("editor-workspace-outline-section-title");
return label;
}
private Label placeholderLabel(final I18n key) {
final var label = new Label();
label.textProperty().bind(Container.i18n().bind(key));
label.setWrapText(true);
label.getStyleClass().addAll("editor-workspace-placeholder", "editor-workspace-outline-item");
return label;
}
private String formatDiagnostic(final PrometeuLspDiagnosticDTO diagnostic) {
return "%s [%d,%d) %s".formatted(
diagnostic.severity().name(),
diagnostic.range().startOffset(),
diagnostic.range().endOffset(),
diagnostic.message());
}
}

View File

@ -12,6 +12,9 @@ import javafx.scene.layout.Region;
import javafx.scene.layout.VBox;
import org.fxmisc.richtext.CodeArea;
import org.fxmisc.richtext.LineNumberFactory;
import p.lsp.PrometeuLspService;
import p.lsp.messages.PrometeuLspAnalyzeDocumentRequest;
import p.lsp.messages.PrometeuLspAnalyzeDocumentResult;
import p.studio.projects.ProjectReference;
import p.studio.utilities.i18n.I18n;
import p.studio.vfs.ProjectDocumentVfs;
@ -38,6 +41,7 @@ public final class EditorWorkspace extends Workspace {
private final EditorStatusBar statusBar = new EditorStatusBar();
private final EditorTabStrip tabStrip = new EditorTabStrip();
private final EditorDocumentPresentationRegistry presentationRegistry = new EditorDocumentPresentationRegistry();
private final PrometeuLspService prometeuLspService;
private final ProjectDocumentVfs projectDocumentVfs;
private final EditorOpenFileSession openFileSession = new EditorOpenFileSession();
private final List<String> activePresentationStylesheets = new ArrayList<>();
@ -45,9 +49,11 @@ public final class EditorWorkspace extends Workspace {
public EditorWorkspace(
final ProjectReference projectReference,
final ProjectDocumentVfs projectDocumentVfs) {
final ProjectDocumentVfs projectDocumentVfs,
final PrometeuLspService prometeuLspService) {
super(projectReference);
this.projectDocumentVfs = Objects.requireNonNull(projectDocumentVfs, "projectDocumentVfs");
this.prometeuLspService = Objects.requireNonNull(prometeuLspService, "prometeuLspService");
root.getStyleClass().add("editor-workspace");
codeArea.setParagraphGraphicFactory(LineNumberFactory.get(codeArea));
codeArea.setEditable(false);
@ -116,6 +122,7 @@ public final class EditorWorkspace extends Workspace {
applyPresentationStylesheets(presentation);
EditorDocumentPresentationStyles.applyToCodeArea(codeArea, presentation);
statusBar.showPlaceholder(presentation);
outlinePanel.showPlaceholder();
return;
}
@ -133,6 +140,7 @@ public final class EditorWorkspace extends Workspace {
EditorDocumentPresentationStyles.applyToCodeArea(codeArea, presentation);
refreshCommandSurfaces(fileBuffer);
statusBar.showFile(projectReference, fileBuffer, presentation);
refreshSemanticOutline(fileBuffer);
}
private void revealActiveFileInNavigator() {
@ -172,6 +180,7 @@ public final class EditorWorkspace extends Workspace {
saveAllButton.setDisable(true);
readOnlyWarning.setVisible(false);
readOnlyWarning.setManaged(false);
outlinePanel.showPlaceholder();
}
private void applyPresentationStylesheets(final EditorDocumentPresentation presentation) {
@ -307,4 +316,17 @@ public final class EditorWorkspace extends Workspace {
textDocument.accessContext().accessMode(),
textDocument.dirty());
}
private void refreshSemanticOutline(final EditorOpenFileBuffer fileBuffer) {
if (!fileBuffer.frontendDocument()) {
outlinePanel.showPlaceholder();
return;
}
final PrometeuLspAnalyzeDocumentResult analysis = prometeuLspService.analyzeDocument(
new PrometeuLspAnalyzeDocumentRequest(fileBuffer.path()));
outlinePanel.showSemanticReadResult(
fileBuffer.path(),
analysis.diagnostics(),
analysis.documentSymbols());
}
}

View File

@ -68,7 +68,11 @@ codeEditor.navigator.revealActive=Reveal active file
codeEditor.navigator.placeholder=Project navigation lands in the next implementation slice.
codeEditor.navigator.detail=This first shell reserves the full navigator surface, its refresh action, and the left-column composition without wiring project-tree data yet.
codeEditor.outline.title=Outline
codeEditor.outline.placeholder=Outline is reserved for a future semantic-aware wave.
codeEditor.outline.placeholder=Open a frontend document to inspect read-only diagnostics and symbols.
codeEditor.outline.diagnostics=Diagnostics
codeEditor.outline.symbols=Symbols
codeEditor.outline.emptyDiagnostics=No diagnostics for the active frontend document.
codeEditor.outline.emptySymbols=No semantic symbols are currently available for the active frontend document.
codeEditor.helper.title=Editor Helper
codeEditor.helper.placeholder=This region is intentionally passive in the first read-only wave.
codeEditor.tabs.placeholder=no-file-open.txt

View File

@ -510,6 +510,43 @@
-fx-min-height: 0;
}
.editor-workspace-outline-scroll {
-fx-background-color: transparent;
-fx-background-insets: 0;
}
.editor-workspace-outline-scroll > .viewport {
-fx-background-color: transparent;
}
.editor-workspace-outline-summary {
-fx-text-fill: #dce6f0;
}
.editor-workspace-outline-section-title {
-fx-font-size: 11px;
-fx-font-weight: 700;
-fx-text-fill: #8fb1d2;
-fx-padding: 4 0 0 0;
}
.editor-workspace-outline-list {
-fx-spacing: 6;
}
.editor-workspace-outline-item {
-fx-text-fill: #d7e2ec;
-fx-font-size: 12px;
}
.editor-workspace-outline-diagnostic-error {
-fx-text-fill: #ff9a9a;
}
.editor-workspace-outline-diagnostic-warning {
-fx-text-fill: #f6d78f;
}
.editor-workspace-tab-strip {
-fx-padding: 8 12 8 12;
-fx-background-color: #1b1f25;

View File

@ -5,6 +5,10 @@ import p.lsp.PrometeuLspProjectContext;
import p.lsp.PrometeuLspService;
import p.lsp.PrometeuLspServiceFactory;
import p.lsp.dtos.PrometeuLspSessionStateDTO;
import p.lsp.messages.PrometeuLspAnalyzeDocumentRequest;
import p.lsp.messages.PrometeuLspAnalyzeDocumentResult;
import p.lsp.messages.PrometeuLspDefinitionRequest;
import p.lsp.messages.PrometeuLspDefinitionResult;
import p.studio.projects.ProjectReference;
import p.studio.vfs.ProjectDocumentVfs;
import p.studio.vfs.ProjectDocumentVfsFactory;
@ -112,5 +116,15 @@ final class StudioProjectSessionFactoryTest {
public PrometeuLspSessionStateDTO snapshot() {
throw new UnsupportedOperationException();
}
@Override
public PrometeuLspAnalyzeDocumentResult analyzeDocument(final PrometeuLspAnalyzeDocumentRequest request) {
throw new UnsupportedOperationException();
}
@Override
public PrometeuLspDefinitionResult definition(final PrometeuLspDefinitionRequest request) {
throw new UnsupportedOperationException();
}
}
}

View File

@ -4,6 +4,10 @@ import org.junit.jupiter.api.Test;
import p.lsp.PrometeuLspProjectContext;
import p.lsp.PrometeuLspService;
import p.lsp.dtos.PrometeuLspSessionStateDTO;
import p.lsp.messages.PrometeuLspAnalyzeDocumentRequest;
import p.lsp.messages.PrometeuLspAnalyzeDocumentResult;
import p.lsp.messages.PrometeuLspDefinitionRequest;
import p.lsp.messages.PrometeuLspDefinitionResult;
import p.studio.projects.ProjectReference;
import p.studio.vfs.ProjectDocumentVfs;
import p.studio.vfs.VfsDocumentOpenResult;
@ -90,6 +94,16 @@ final class StudioProjectSessionTest {
throw new UnsupportedOperationException();
}
@Override
public PrometeuLspAnalyzeDocumentResult analyzeDocument(final PrometeuLspAnalyzeDocumentRequest request) {
throw new UnsupportedOperationException();
}
@Override
public PrometeuLspDefinitionResult definition(final PrometeuLspDefinitionRequest request) {
throw new UnsupportedOperationException();
}
@Override
public void close() {
closeCalls++;