implements PLN-0035 pbs inline type hint payload production

This commit is contained in:
bQUARKz 2026-04-03 11:35:29 +01:00
parent 3e8f53dc16
commit 09d9bb4c96
Signed by: bquarkz
SSH Key Fingerprint: SHA256:Z7dgqoglWwoK6j6u4QC87OveEq74WOhFN+gitsxtkf8
14 changed files with 559 additions and 25 deletions

View File

@ -15,4 +15,4 @@
{"type":"discussion","id":"DSC-0014","status":"done","ticket":"studio-frontend-owned-semantic-editor-presentation","title":"Definir ownership do schema visual semantico do editor por frontend","created_at":"2026-04-02","updated_at":"2026-04-02","tags":["studio","editor","frontend","presentation","semantic-highlighting","compiler","pbs"],"agendas":[],"decisions":[],"plans":[],"lessons":[{"id":"LSN-0029","file":"discussion/lessons/DSC-0014-studio-frontend-owned-semantic-editor-presentation/LSN-0029-frontend-owned-semantic-presentation-descriptor-and-host-consumption.md","status":"done","created_at":"2026-04-02","updated_at":"2026-04-02"}]} {"type":"discussion","id":"DSC-0014","status":"done","ticket":"studio-frontend-owned-semantic-editor-presentation","title":"Definir ownership do schema visual semantico do editor por frontend","created_at":"2026-04-02","updated_at":"2026-04-02","tags":["studio","editor","frontend","presentation","semantic-highlighting","compiler","pbs"],"agendas":[],"decisions":[],"plans":[],"lessons":[{"id":"LSN-0029","file":"discussion/lessons/DSC-0014-studio-frontend-owned-semantic-editor-presentation/LSN-0029-frontend-owned-semantic-presentation-descriptor-and-host-consumption.md","status":"done","created_at":"2026-04-02","updated_at":"2026-04-02"}]}
{"type":"discussion","id":"DSC-0015","status":"done","ticket":"pbs-service-facade-reserved-metadata","title":"SDK Service Bodies Calling Builtin/Intrinsic Proxies as Ordinary PBS Code","created_at":"2026-04-03","updated_at":"2026-04-03","tags":["compiler","pbs","sdk","stdlib","lowering","service","intrinsic","sdk-interface"],"agendas":[],"decisions":[],"plans":[],"lessons":[{"id":"LSN-0030","file":"discussion/lessons/DSC-0015-pbs-service-facade-reserved-metadata/LSN-0030-sdk-service-bodies-over-private-reserved-proxies.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03"}]} {"type":"discussion","id":"DSC-0015","status":"done","ticket":"pbs-service-facade-reserved-metadata","title":"SDK Service Bodies Calling Builtin/Intrinsic Proxies as Ordinary PBS Code","created_at":"2026-04-03","updated_at":"2026-04-03","tags":["compiler","pbs","sdk","stdlib","lowering","service","intrinsic","sdk-interface"],"agendas":[],"decisions":[],"plans":[],"lessons":[{"id":"LSN-0030","file":"discussion/lessons/DSC-0015-pbs-service-facade-reserved-metadata/LSN-0030-sdk-service-bodies-over-private-reserved-proxies.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03"}]}
{"type":"discussion","id":"DSC-0016","status":"open","ticket":"studio-editor-scope-guides-and-brace-anchoring","title":"Scope Guides do Code Editor com ancoragem exata em braces e destaque do escopo ativo","created_at":"2026-04-03","updated_at":"2026-04-03","tags":["studio","editor","scope-guides","braces","semantic-read","frontend-contract"],"agendas":[{"id":"AGD-0017","file":"AGD-0017-studio-editor-scope-guides-and-brace-anchoring.md","status":"accepted","created_at":"2026-04-03","updated_at":"2026-04-03"}],"decisions":[{"id":"DEC-0014","file":"DEC-0014-studio-editor-active-scope-and-structural-anchors.md","status":"accepted","created_at":"2026-04-03","updated_at":"2026-04-03"}],"plans":[{"id":"PLN-0030","file":"PLN-0030-studio-active-container-and-active-scope-gutter-wave-1.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03"},{"id":"PLN-0031","file":"PLN-0031-studio-structural-anchor-semantic-surface-specification.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03"},{"id":"PLN-0032","file":"PLN-0032-frontend-structural-anchor-payloads-and-anchor-aware-tests.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03"}],"lessons":[]} {"type":"discussion","id":"DSC-0016","status":"open","ticket":"studio-editor-scope-guides-and-brace-anchoring","title":"Scope Guides do Code Editor com ancoragem exata em braces e destaque do escopo ativo","created_at":"2026-04-03","updated_at":"2026-04-03","tags":["studio","editor","scope-guides","braces","semantic-read","frontend-contract"],"agendas":[{"id":"AGD-0017","file":"AGD-0017-studio-editor-scope-guides-and-brace-anchoring.md","status":"accepted","created_at":"2026-04-03","updated_at":"2026-04-03"}],"decisions":[{"id":"DEC-0014","file":"DEC-0014-studio-editor-active-scope-and-structural-anchors.md","status":"accepted","created_at":"2026-04-03","updated_at":"2026-04-03"}],"plans":[{"id":"PLN-0030","file":"PLN-0030-studio-active-container-and-active-scope-gutter-wave-1.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03"},{"id":"PLN-0031","file":"PLN-0031-studio-structural-anchor-semantic-surface-specification.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03"},{"id":"PLN-0032","file":"PLN-0032-frontend-structural-anchor-payloads-and-anchor-aware-tests.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03"}],"lessons":[]}
{"type":"discussion","id":"DSC-0017","status":"open","ticket":"studio-editor-inline-type-hints-for-let-bindings","title":"Inline Type Hints for Let Bindings in the Studio Editor","created_at":"2026-04-03","updated_at":"2026-04-03","tags":["studio","editor","inline-hints","inlay-hints","lsp","pbs","type-inference"],"agendas":[{"id":"AGD-0018","file":"AGD-0018-studio-editor-inline-type-hints-for-let-bindings.md","status":"accepted","created_at":"2026-04-03","updated_at":"2026-04-03"}],"decisions":[{"id":"DEC-0015","file":"DEC-0015-studio-editor-inline-type-hints-contract-and-rendering-model.md","status":"accepted","created_at":"2026-04-03","updated_at":"2026-04-03","ref_agenda":"AGD-0018"}],"plans":[{"id":"PLN-0033","file":"PLN-0033-inline-hint-spec-and-contract-propagation.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03","ref_decisions":["DEC-0015"]},{"id":"PLN-0034","file":"PLN-0034-lsp-inline-hint-transport-contract.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03","ref_decisions":["DEC-0015"]},{"id":"PLN-0035","file":"PLN-0035-pbs-inline-type-hint-payload-production.md","status":"open","created_at":"2026-04-03","updated_at":"2026-04-03","ref_decisions":["DEC-0015"]},{"id":"PLN-0036","file":"PLN-0036-studio-inline-hint-rendering-and-rollout.md","status":"open","created_at":"2026-04-03","updated_at":"2026-04-03","ref_decisions":["DEC-0015"]}],"lessons":[]} {"type":"discussion","id":"DSC-0017","status":"open","ticket":"studio-editor-inline-type-hints-for-let-bindings","title":"Inline Type Hints for Let Bindings in the Studio Editor","created_at":"2026-04-03","updated_at":"2026-04-03","tags":["studio","editor","inline-hints","inlay-hints","lsp","pbs","type-inference"],"agendas":[{"id":"AGD-0018","file":"AGD-0018-studio-editor-inline-type-hints-for-let-bindings.md","status":"accepted","created_at":"2026-04-03","updated_at":"2026-04-03"}],"decisions":[{"id":"DEC-0015","file":"DEC-0015-studio-editor-inline-type-hints-contract-and-rendering-model.md","status":"accepted","created_at":"2026-04-03","updated_at":"2026-04-03","ref_agenda":"AGD-0018"}],"plans":[{"id":"PLN-0033","file":"PLN-0033-inline-hint-spec-and-contract-propagation.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03","ref_decisions":["DEC-0015"]},{"id":"PLN-0034","file":"PLN-0034-lsp-inline-hint-transport-contract.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03","ref_decisions":["DEC-0015"]},{"id":"PLN-0035","file":"PLN-0035-pbs-inline-type-hint-payload-production.md","status":"done","created_at":"2026-04-03","updated_at":"2026-04-03","ref_decisions":["DEC-0015"]},{"id":"PLN-0036","file":"PLN-0036-studio-inline-hint-rendering-and-rollout.md","status":"open","created_at":"2026-04-03","updated_at":"2026-04-03","ref_decisions":["DEC-0015"]}],"lessons":[]}

View File

@ -0,0 +1,110 @@
---
id: PLN-0035
ticket: studio-editor-inline-type-hints-for-let-bindings
title: PBS inline type hint payload production
status: done
created: 2026-04-03
completed: 2026-04-03
tags: [compiler, pbs, inline-hints, type-inference, lsp]
---
## Objective
Produce frontend-owned inline hint payloads for PBS inferred local bindings, starting with `let` bindings whose types are omitted in source.
## Background
DEC-0015 explicitly leaves hint eligibility frontend-owned. The initial user need comes from PBS inferred `let` bindings, and PBS already computes the inferred type during flow analysis. That semantic fact now needs to be surfaced as frontend-produced hint payload rather than remaining internal-only analysis state.
## Scope
### Included
- Surface PBS-produced inline type hints for inferred `let` bindings.
- Reuse existing flow-analysis type information rather than adding parallel inference.
- Preserve valid hints under partial degradation wherever the frontend can still produce them.
- Add PBS/LSP-facing tests for hint eligibility and payload correctness.
### Excluded
- Host rendering policy.
- Non-PBS frontend hint production.
- Hint categories not chosen by PBS.
## Execution Steps
### Step 1 - Identify stable anchor and payload sources in PBS analysis
**What:** Determine the exact PBS semantic source for inferred binding type and anchor location.
**How:** Trace `let` analysis through the PBS flow-analysis pipeline and identify:
- where inferred local binding type becomes stable;
- how to render that type as display text;
- which source position should anchor the hint;
- what happens when the binding has explicit type text and therefore should not receive an inferred-type hint.
This step must avoid recomputing types outside the existing semantic pipeline.
**File(s):**
- `prometeu-compiler/frontends/prometeu-frontend-pbs/src/main/java/p/studio/compiler/pbs/semantics/`
- `prometeu-compiler/frontends/prometeu-frontend-pbs/src/main/java/p/studio/compiler/services/`
### Step 2 - Emit frontend-owned hint payloads for eligible bindings
**What:** Make PBS produce inline hint payloads for eligible inferred bindings.
**How:** Extend the PBS semantic-read production path so it emits frontend-owned hint entries for `let` bindings with omitted type syntax and stable inferred type results.
The production path must:
- suppress hints when PBS does not want one;
- avoid emitting hints for explicitly typed bindings;
- preserve valid hints even if unrelated parts of the document degrade.
**File(s):**
- `prometeu-compiler/frontends/prometeu-frontend-pbs/src/main/java/p/studio/compiler/services/`
- `prometeu-compiler/frontends/prometeu-frontend-pbs/src/main/java/p/studio/compiler/pbs/semantics/`
- any accepted frontend semantic contract surface used by `prometeu-lsp`
### Step 3 - Add PBS-focused hint production tests
**What:** Lock PBS hint policy and payload behavior with tests.
**How:** Add tests for:
- inferred `let` bindings receiving hints;
- explicit `let name: Type = ...` bindings receiving no inferred-type hint;
- payload text stability for scalar, struct, optional, result, tuple, or other supported type forms chosen by PBS;
- partial-degradation cases where unaffected bindings still produce hints.
**File(s):**
- `prometeu-compiler/frontends/prometeu-frontend-pbs/src/test/java/`
- `prometeu-lsp/prometeu-lsp-v1/src/test/java/p/studio/lsp/` where end-to-end analyze assertions are required
## Test Requirements
### Unit Tests
- PBS semantic tests for inferred binding hint eligibility.
- Type-surface formatting tests if payload display text is factored into a helper.
### Integration Tests
- LSP analyze tests verifying PBS-produced hints appear in transported results.
- Partial-degradation tests preserving unaffected hints.
### Manual Verification
- Open a PBS document with inferred `let` bindings and inspect the LSP analyze payload.
- Confirm no hints appear for explicit type annotations.
## Acceptance Criteria
- [ ] PBS produces frontend-owned inline hint payloads for eligible inferred `let` bindings.
- [ ] Explicitly typed bindings do not receive inferred-type hints.
- [ ] Valid hints survive unrelated local degradation when PBS can still analyze those bindings.
- [ ] Tests lock the frontend-owned eligibility policy.
## Dependencies
- Accepted decision `DEC-0015-studio-editor-inline-type-hints-contract-and-rendering-model.md`
- `PLN-0033-inline-hint-spec-and-contract-propagation.md`
- `PLN-0034-lsp-inline-hint-transport-contract.md`
## Risks
- If inferred-type display text is not normalized, hint output may become unstable across equivalent semantic forms.
- If hint production is attached too late in the pipeline, degraded documents may lose more valid hints than necessary.

View File

@ -343,6 +343,7 @@ public final class PbsAst {
public record LetStatement( public record LetStatement(
boolean isConst, boolean isConst,
String name, String name,
Span nameSpan,
TypeRef explicitType, TypeRef explicitType,
Expression initializer, Expression initializer,
Span span) implements Statement { Span span) implements Statement {

View File

@ -118,6 +118,7 @@ final class PbsBlockParser {
return new PbsAst.LetStatement( return new PbsAst.LetStatement(
isConst, isConst,
name.lexeme(), name.lexeme(),
context.span(name.start(), name.end()),
explicitType, explicitType,
initializer, initializer,
context.span(letToken.start(), semicolon.end())); context.span(letToken.start(), semicolon.end()));

View File

@ -5,9 +5,12 @@ import p.studio.compiler.pbs.ast.PbsAst;
import p.studio.compiler.pbs.semantics.PbsFlowSemanticSupport.Model; import p.studio.compiler.pbs.semantics.PbsFlowSemanticSupport.Model;
import p.studio.compiler.pbs.semantics.PbsFlowSemanticSupport.Scope; import p.studio.compiler.pbs.semantics.PbsFlowSemanticSupport.Scope;
import p.studio.compiler.pbs.semantics.PbsFlowSemanticSupport.TypeView; import p.studio.compiler.pbs.semantics.PbsFlowSemanticSupport.TypeView;
import p.studio.compiler.services.PbsInlineHintSurface;
import p.studio.compiler.source.diagnostics.DiagnosticSink; import p.studio.compiler.source.diagnostics.DiagnosticSink;
import p.studio.utilities.structures.ReadOnlyList; import p.studio.utilities.structures.ReadOnlyList;
import java.util.ArrayList;
final class PbsFlowBodyAnalyzer { final class PbsFlowBodyAnalyzer {
private final PbsFlowTypeOps typeOps = new PbsFlowTypeOps(); private final PbsFlowTypeOps typeOps = new PbsFlowTypeOps();
private final PbsFlowExpressionAnalyzer expressionAnalyzer = new PbsFlowExpressionAnalyzer(typeOps); private final PbsFlowExpressionAnalyzer expressionAnalyzer = new PbsFlowExpressionAnalyzer(typeOps);
@ -16,25 +19,76 @@ final class PbsFlowBodyAnalyzer {
typeOps, typeOps,
expressionAnalyzer, expressionAnalyzer,
this::analyzeBlock); this::analyzeBlock);
private final PbsFlowStatementAnalyzer statementAnalyzer = new PbsFlowStatementAnalyzer(
typeOps,
expressionAnalyzer,
assignmentAnalyzer::analyzeAssignmentStatement);
private final PbsFlowCallableBodyAnalyzer callableBodyAnalyzer = new PbsFlowCallableBodyAnalyzer( private final PbsFlowCallableBodyAnalyzer callableBodyAnalyzer = new PbsFlowCallableBodyAnalyzer(
typeOps, typeOps,
completionAnalyzer, completionAnalyzer,
this::analyzeBlock); this::analyzeBlock);
private PbsFlowStatementAnalyzer.InlineHintCollector inlineHintCollector = PbsFlowStatementAnalyzer.InlineHintCollector.noop();
public void validate( public void validate(
final PbsAst.File ast, final PbsAst.File ast,
final ReadOnlyList<PbsAst.TopDecl> supplementalTopDecls, final ReadOnlyList<PbsAst.TopDecl> supplementalTopDecls,
final FESurfaceContext feSurfaceContext, final FESurfaceContext feSurfaceContext,
final DiagnosticSink diagnostics) { final DiagnosticSink diagnostics) {
final var previousCollector = inlineHintCollector;
inlineHintCollector = PbsFlowStatementAnalyzer.InlineHintCollector.noop();
try {
final var model = Model.from(ast, supplementalTopDecls, feSurfaceContext, diagnostics); final var model = Model.from(ast, supplementalTopDecls, feSurfaceContext, diagnostics);
for (final var topDecl : ast.topDecls()) { for (final var topDecl : ast.topDecls()) {
validateTopDecl(topDecl, model, diagnostics); validateTopDecl(topDecl, model, diagnostics);
} }
} finally {
inlineHintCollector = previousCollector;
}
}
public ReadOnlyList<PbsInlineHintSurface> collectInlineHints(
final PbsAst.File ast,
final ReadOnlyList<PbsAst.TopDecl> supplementalTopDecls,
final FESurfaceContext feSurfaceContext,
final DiagnosticSink diagnostics) {
final var collector = new InlineHintsCollector();
final var previousCollector = inlineHintCollector;
inlineHintCollector = collector;
try {
final var model = Model.from(ast, supplementalTopDecls, feSurfaceContext, diagnostics);
for (final var topDecl : ast.topDecls()) {
validateTopDecl(topDecl, model, diagnostics);
}
} finally {
inlineHintCollector = previousCollector;
}
return collector.hints();
}
private PbsFlowStatementAnalyzer statementAnalyzer() {
return new PbsFlowStatementAnalyzer(
typeOps,
expressionAnalyzer,
assignmentAnalyzer::analyzeAssignmentStatement,
inlineHintCollector);
}
private static final class InlineHintsCollector implements PbsFlowStatementAnalyzer.InlineHintCollector {
private static final String TYPE_HINT_CATEGORY = "type";
private final ArrayList<PbsInlineHintSurface> hints = new ArrayList<>();
private final PbsInlineHintSurfaceFormatter formatter = new PbsInlineHintSurfaceFormatter();
@Override
public void collect(final PbsAst.LetStatement letStatement, final TypeView inferredType) {
final var label = formatter.format(inferredType);
if (label == null || letStatement.nameSpan() == null || letStatement.nameSpan().isNone()) {
return;
}
hints.add(new PbsInlineHintSurface(letStatement.nameSpan(), label, TYPE_HINT_CATEGORY));
}
ReadOnlyList<PbsInlineHintSurface> hints() {
return ReadOnlyList.wrap(hints);
}
} }
private void validateTopDecl( private void validateTopDecl(
@ -194,6 +248,6 @@ final class PbsFlowBodyAnalyzer {
final PbsAst.Block block, final PbsAst.Block block,
final PbsFlowBodyContext context, final PbsFlowBodyContext context,
final boolean valueContext) { final boolean valueContext) {
return statementAnalyzer.analyzeBlock(block, context, valueContext); return statementAnalyzer().analyzeBlock(block, context, valueContext);
} }
} }

View File

@ -2,6 +2,7 @@ package p.studio.compiler.pbs.semantics;
import p.studio.compiler.messages.FESurfaceContext; import p.studio.compiler.messages.FESurfaceContext;
import p.studio.compiler.pbs.ast.PbsAst; import p.studio.compiler.pbs.ast.PbsAst;
import p.studio.compiler.services.PbsInlineHintSurface;
import p.studio.compiler.source.diagnostics.DiagnosticSink; import p.studio.compiler.source.diagnostics.DiagnosticSink;
import p.studio.utilities.structures.ReadOnlyList; import p.studio.utilities.structures.ReadOnlyList;
@ -19,4 +20,12 @@ public final class PbsFlowSemanticsValidator {
final DiagnosticSink diagnostics) { final DiagnosticSink diagnostics) {
flowBodyAnalyzer.validate(ast, supplementalTopDecls, feSurfaceContext, diagnostics); flowBodyAnalyzer.validate(ast, supplementalTopDecls, feSurfaceContext, diagnostics);
} }
public ReadOnlyList<PbsInlineHintSurface> collectInlineHints(
final PbsAst.File ast,
final ReadOnlyList<PbsAst.TopDecl> supplementalTopDecls,
final FESurfaceContext feSurfaceContext,
final DiagnosticSink diagnostics) {
return flowBodyAnalyzer.collectInlineHints(ast, supplementalTopDecls, feSurfaceContext, diagnostics);
}
} }

View File

@ -11,17 +11,30 @@ final class PbsFlowStatementAnalyzer {
void analyze(PbsAst.AssignStatement assignStatement, PbsFlowBodyContext context); void analyze(PbsAst.AssignStatement assignStatement, PbsFlowBodyContext context);
} }
@FunctionalInterface
interface InlineHintCollector {
void collect(PbsAst.LetStatement letStatement, TypeView inferredType);
static InlineHintCollector noop() {
return (letStatement, inferredType) -> {
};
}
}
private final PbsFlowTypeOps typeOps; private final PbsFlowTypeOps typeOps;
private final PbsFlowExpressionAnalyzer expressionAnalyzer; private final PbsFlowExpressionAnalyzer expressionAnalyzer;
private final AssignmentStatementAnalyzer assignmentStatementAnalyzer; private final AssignmentStatementAnalyzer assignmentStatementAnalyzer;
private final InlineHintCollector inlineHintCollector;
PbsFlowStatementAnalyzer( PbsFlowStatementAnalyzer(
final PbsFlowTypeOps typeOps, final PbsFlowTypeOps typeOps,
final PbsFlowExpressionAnalyzer expressionAnalyzer, final PbsFlowExpressionAnalyzer expressionAnalyzer,
final AssignmentStatementAnalyzer assignmentStatementAnalyzer) { final AssignmentStatementAnalyzer assignmentStatementAnalyzer,
final InlineHintCollector inlineHintCollector) {
this.typeOps = typeOps; this.typeOps = typeOps;
this.expressionAnalyzer = expressionAnalyzer; this.expressionAnalyzer = expressionAnalyzer;
this.assignmentStatementAnalyzer = assignmentStatementAnalyzer; this.assignmentStatementAnalyzer = assignmentStatementAnalyzer;
this.inlineHintCollector = inlineHintCollector;
} }
TypeView analyzeBlock( TypeView analyzeBlock(
@ -96,7 +109,11 @@ final class PbsFlowStatementAnalyzer {
ExprUse.VALUE, ExprUse.VALUE,
true, true,
this::analyzeNestedBlock); this::analyzeNestedBlock);
scope.bind(letStatement.name(), expected == null ? initializer.type() : expected, !letStatement.isConst()); final var resolvedType = expected == null ? initializer.type() : expected;
scope.bind(letStatement.name(), resolvedType, !letStatement.isConst());
if (letStatement.explicitType() == null) {
inlineHintCollector.collect(letStatement, resolvedType);
}
return; return;
} }
if (statement instanceof PbsAst.ReturnStatement returnStatement) { if (statement instanceof PbsAst.ReturnStatement returnStatement) {

View File

@ -0,0 +1,62 @@
package p.studio.compiler.pbs.semantics;
import p.studio.compiler.pbs.semantics.PbsFlowSemanticSupport.TupleField;
import p.studio.compiler.pbs.semantics.PbsFlowSemanticSupport.TypeView;
final class PbsInlineHintSurfaceFormatter {
String format(final TypeView type) {
if (type == null) {
return null;
}
return switch (type.kind()) {
case UNKNOWN, ASSET_NAMESPACE -> null;
case UNIT -> "void";
case INT, FLOAT, BOOL, STR -> type.name();
case STRUCT,
SERVICE,
CONTRACT,
CALLBACK,
ENUM,
ERROR,
TYPE_REF,
ADDRESSABLE -> type.name();
case OPTIONAL -> {
final var inner = format(type.inner());
yield inner == null ? null : "optional " + inner;
}
case RESULT -> {
final var errorType = format(type.errorType());
final var payloadType = format(type.inner());
if (errorType == null) {
yield null;
}
yield payloadType == null
? "result<%s>".formatted(errorType)
: "result<%s> %s".formatted(errorType, payloadType);
}
case TUPLE -> formatTuple(type.tupleFields());
};
}
private String formatTuple(final java.util.List<TupleField> fields) {
if (fields.isEmpty()) {
return "()";
}
final var builder = new StringBuilder("(");
for (int i = 0; i < fields.size(); i++) {
if (i > 0) {
builder.append(", ");
}
final var field = fields.get(i);
final var fieldType = format(field.type());
if (fieldType == null) {
return null;
}
builder.append(field.label())
.append(": ")
.append(fieldType);
}
builder.append(')');
return builder.toString();
}
}

View File

@ -13,6 +13,7 @@ import p.studio.compiler.models.IRHiddenGlobalKind;
import p.studio.compiler.models.IRSyntheticCallableKind; import p.studio.compiler.models.IRSyntheticCallableKind;
import p.studio.compiler.pbs.PbsFrontendCompiler; import p.studio.compiler.pbs.PbsFrontendCompiler;
import p.studio.compiler.pbs.PbsReservedMetadataExtractor; import p.studio.compiler.pbs.PbsReservedMetadataExtractor;
import p.studio.compiler.pbs.semantics.PbsFlowSemanticsValidator;
import p.studio.compiler.pbs.stdlib.InterfaceModuleLoader; import p.studio.compiler.pbs.stdlib.InterfaceModuleLoader;
import p.studio.compiler.pbs.stdlib.ResourceStdlibEnvironmentResolver; import p.studio.compiler.pbs.stdlib.ResourceStdlibEnvironmentResolver;
import p.studio.compiler.pbs.stdlib.StdlibEnvironmentResolver; import p.studio.compiler.pbs.stdlib.StdlibEnvironmentResolver;
@ -112,16 +113,52 @@ public class PBSFrontendPhaseService implements FrontendPhaseService {
final FrontendPhaseContext ctx, final FrontendPhaseContext ctx,
final DiagnosticSink diagnostics, final DiagnosticSink diagnostics,
final BuildingIssueSink issues) { final BuildingIssueSink issues) {
return semanticReadSurface(ctx, diagnostics, issues).supplementalTopDeclsByFile();
}
public static Map<FileId, ReadOnlyList<PbsInlineHintSurface>> inlineHintsByFile(
final FrontendPhaseContext ctx,
final DiagnosticSink diagnostics,
final BuildingIssueSink issues) {
return semanticReadSurface(ctx, diagnostics, issues).inlineHintsByFile();
}
public static PbsSemanticReadSurface semanticReadSurface(
final FrontendPhaseContext ctx,
final DiagnosticSink diagnostics,
final BuildingIssueSink issues) {
final var service = new PBSFrontendPhaseService(); final var service = new PBSFrontendPhaseService();
final var assembly = service.moduleAssemblyService.assemble(ctx, ctx.nameTable(), diagnostics, issues); final var assembly = service.moduleAssemblyService.assemble(ctx, ctx.nameTable(), diagnostics, issues);
final var importedSemanticContexts = service.importedSemanticContextService.build( final var importedSemanticContexts = service.importedSemanticContextService.build(
assembly.parsedSourceFiles(), assembly.parsedSourceFiles(),
assembly.moduleTable()); assembly.moduleTable());
final var flowSemanticsValidator = new PbsFlowSemanticsValidator();
final Map<FileId, ReadOnlyList<p.studio.compiler.pbs.ast.PbsAst.TopDecl>> supplementalTopDeclsByFile = new LinkedHashMap<>(); final Map<FileId, ReadOnlyList<p.studio.compiler.pbs.ast.PbsAst.TopDecl>> supplementalTopDeclsByFile = new LinkedHashMap<>();
final Map<FileId, ReadOnlyList<PbsInlineHintSurface>> inlineHintsByFile = new LinkedHashMap<>();
for (final var entry : importedSemanticContexts.entrySet()) { for (final var entry : importedSemanticContexts.entrySet()) {
supplementalTopDeclsByFile.put(entry.getKey(), entry.getValue().supplementalTopDecls()); supplementalTopDeclsByFile.put(entry.getKey(), entry.getValue().supplementalTopDecls());
} }
return Map.copyOf(supplementalTopDeclsByFile); for (final var parsedSourceFile : assembly.parsedSourceFiles()) {
final var importedSemanticContext = importedSemanticContexts.getOrDefault(
parsedSourceFile.fileId(),
PbsImportedSemanticContext.empty());
final var inlineHints = flowSemanticsValidator.collectInlineHints(
parsedSourceFile.ast(),
importedSemanticContext.supplementalTopDecls(),
ctx.feSurfaceContext(),
diagnostics);
if (!inlineHints.isEmpty()) {
inlineHintsByFile.put(parsedSourceFile.fileId(), inlineHints);
}
}
return new PbsSemanticReadSurface(
Map.copyOf(supplementalTopDeclsByFile),
Map.copyOf(inlineHintsByFile));
}
public record PbsSemanticReadSurface(
Map<FileId, ReadOnlyList<p.studio.compiler.pbs.ast.PbsAst.TopDecl>> supplementalTopDeclsByFile,
Map<FileId, ReadOnlyList<PbsInlineHintSurface>> inlineHintsByFile) {
} }
private IRBackend mergeCompiledSources( private IRBackend mergeCompiledSources(

View File

@ -0,0 +1,23 @@
package p.studio.compiler.services;
import p.studio.compiler.source.Span;
import java.util.Objects;
public record PbsInlineHintSurface(
Span anchorSpan,
String label,
String category) {
public PbsInlineHintSurface {
anchorSpan = Objects.requireNonNull(anchorSpan, "anchorSpan");
label = Objects.requireNonNull(label, "label").trim();
category = Objects.requireNonNull(category, "category").trim();
if (label.isBlank()) {
throw new IllegalArgumentException("label cannot be blank");
}
if (category.isBlank()) {
throw new IllegalArgumentException("category cannot be blank");
}
}
}

View File

@ -194,6 +194,29 @@ class PbsParserTest {
assertFalse(diagnostics.isEmpty(), "Parser should reject reserved keyword 'error' as callable/member name"); assertFalse(diagnostics.isEmpty(), "Parser should reject reserved keyword 'error' as callable/member name");
} }
@Test
void shouldTrackLetNameSpanSeparatelyFromWholeStatementSpan() {
final var source = """
fn main() -> void {
let inferred = 1;
return;
}
""";
final var diagnostics = DiagnosticSink.empty();
final var fileId = new FileId(0);
final PbsAst.File ast = PbsParser.parse(PbsLexer.lex(source, fileId, diagnostics), fileId, diagnostics);
assertTrue(diagnostics.isEmpty(), diagnostics.stream().map(d -> d.getCode() + ":" + d.getMessage()).toList().toString());
final var functionDecl = assertInstanceOf(PbsAst.FunctionDecl.class, ast.topDecls().getFirst());
final var letStatement = assertInstanceOf(PbsAst.LetStatement.class, functionDecl.body().statements().getFirst());
assertEquals("inferred", letStatement.name());
assertEquals(source.indexOf("inferred"), letStatement.nameSpan().getStart());
assertEquals(source.indexOf("inferred") + "inferred".length(), letStatement.nameSpan().getEnd());
assertTrue(letStatement.span().getStart() < letStatement.nameSpan().getStart());
assertTrue(letStatement.span().getEnd() > letStatement.nameSpan().getEnd());
}
@Test @Test
void shouldAcceptStructFieldTrailingComma() { void shouldAcceptStructFieldTrailingComma() {
final var source = "declare struct S(a: int,);"; final var source = "declare struct S(a: int,);";

View File

@ -20,6 +20,7 @@ import p.studio.compiler.pbs.stdlib.StdlibEnvironment;
import p.studio.compiler.pbs.stdlib.StdlibEnvironmentResolver; import p.studio.compiler.pbs.stdlib.StdlibEnvironmentResolver;
import p.studio.compiler.pbs.stdlib.StdlibModuleSource; import p.studio.compiler.pbs.stdlib.StdlibModuleSource;
import p.studio.compiler.source.diagnostics.DiagnosticSink; import p.studio.compiler.source.diagnostics.DiagnosticSink;
import p.studio.compiler.source.identifiers.FileId;
import p.studio.compiler.source.identifiers.ProjectId; import p.studio.compiler.source.identifiers.ProjectId;
import p.studio.compiler.source.tables.FileTable; import p.studio.compiler.source.tables.FileTable;
import p.studio.compiler.source.tables.ProjectTable; import p.studio.compiler.source.tables.ProjectTable;
@ -1868,13 +1869,127 @@ class PBSFrontendPhaseServiceTest {
allowedDiagnostics.stream().map(d -> d.getCode() + ":" + d.getMessage()).toList().toString()); allowedDiagnostics.stream().map(d -> d.getCode() + ":" + d.getMessage()).toList().toString());
} }
private void registerFile( @Test
void shouldProduceInlineHintsForEligibleInferredLetBindings() throws IOException {
final var projectRoot = tempDir.resolve("project-inline-hints");
final var sourceRoot = projectRoot.resolve("src");
Files.createDirectories(sourceRoot);
final var sourceFile = sourceRoot.resolve("main.pbs");
final var modBarrel = sourceRoot.resolve("mod.barrel");
Files.writeString(sourceFile, """
declare struct Player(score: int);
declare error Failure {
Boom;
}
fn fetch_pair() -> result<Failure> (left: int, right: int) {
return ok((left: 1, right: 2));
}
fn main() -> void {
let scalar = 1;
let player = new Player(1);
let maybe = some(1);
let pair = (left: 1, right: 2);
let outcome = fetch_pair();
return;
}
""");
Files.writeString(modBarrel, "pub fn main() -> void;");
final var projectTable = new ProjectTable();
final var fileTable = new FileTable(1);
final var projectId = projectTable.register(ProjectDescriptor.builder()
.rootPath(projectRoot)
.name("app")
.version("1.0.0")
.sourceRoots(ReadOnlyList.wrap(List.of(sourceRoot)))
.build());
final var sourceFileId = registerFile(projectId, projectRoot, sourceFile, fileTable);
registerFile(projectId, projectRoot, modBarrel, fileTable);
final var ctx = new FrontendPhaseContext(
projectTable,
fileTable,
new BuildStack(ReadOnlyList.wrap(List.of(projectId))));
final var diagnostics = DiagnosticSink.empty();
final var inlineHintsByFile = PBSFrontendPhaseService.inlineHintsByFile(
ctx,
diagnostics,
BuildingIssueSink.empty());
final var sourceHints = inlineHintsByFile.get(sourceFileId);
assertNotNull(sourceHints, () -> "diagnostics="
+ diagnostics.stream().map(d -> d.getCode() + ":" + d.getMessage()).toList()
+ ", keys=" + inlineHintsByFile.keySet());
final var labels = sourceHints.asList().stream()
.map(PbsInlineHintSurface::label)
.toList();
assertEquals(List.of(
"int",
"Player",
"optional int",
"(left: int, right: int)",
"result<Failure> (left: int, right: int)"),
labels);
assertEquals(0, diagnostics.errorCount(), diagnostics.stream().map(d -> d.getCode() + ":" + d.getMessage()).toList().toString());
}
@Test
void shouldSuppressInlineHintsForExplicitlyTypedLetBindings() throws IOException {
final var projectRoot = tempDir.resolve("project-inline-hints-explicit");
final var sourceRoot = projectRoot.resolve("src");
Files.createDirectories(sourceRoot);
final var sourceFile = sourceRoot.resolve("main.pbs");
final var modBarrel = sourceRoot.resolve("mod.barrel");
Files.writeString(sourceFile, """
fn main() -> void {
let inferred = 1;
let explicit: int = 2;
return;
}
""");
Files.writeString(modBarrel, "pub fn main() -> void;");
final var projectTable = new ProjectTable();
final var fileTable = new FileTable(1);
final var projectId = projectTable.register(ProjectDescriptor.builder()
.rootPath(projectRoot)
.name("app")
.version("1.0.0")
.sourceRoots(ReadOnlyList.wrap(List.of(sourceRoot)))
.build());
final var sourceFileId = registerFile(projectId, projectRoot, sourceFile, fileTable);
registerFile(projectId, projectRoot, modBarrel, fileTable);
final var ctx = new FrontendPhaseContext(
projectTable,
fileTable,
new BuildStack(ReadOnlyList.wrap(List.of(projectId))));
final var diagnostics = DiagnosticSink.empty();
final var inlineHints = PBSFrontendPhaseService.inlineHintsByFile(
ctx,
diagnostics,
BuildingIssueSink.empty()).get(sourceFileId);
assertEquals(1, inlineHints.size());
assertEquals("int", inlineHints.getFirst().label());
assertEquals("type", inlineHints.getFirst().category());
}
private FileId registerFile(
final ProjectId projectId, final ProjectId projectId,
final Path projectRoot, final Path projectRoot,
final Path file, final Path file,
final FileTable fileTable) throws IOException { final FileTable fileTable) throws IOException {
final BasicFileAttributes attributes = Files.readAttributes(file, BasicFileAttributes.class); final BasicFileAttributes attributes = Files.readAttributes(file, BasicFileAttributes.class);
fileTable.register(new SourceHandle( return fileTable.register(new SourceHandle(
projectId, projectId,
projectRoot.relativize(file), projectRoot.relativize(file),
file, file,

View File

@ -14,11 +14,13 @@ import p.studio.compiler.source.diagnostics.Diagnostic;
import p.studio.compiler.source.diagnostics.DiagnosticSink; import p.studio.compiler.source.diagnostics.DiagnosticSink;
import p.studio.compiler.source.identifiers.FileId; import p.studio.compiler.source.identifiers.FileId;
import p.studio.compiler.services.PBSFrontendPhaseService; import p.studio.compiler.services.PBSFrontendPhaseService;
import p.studio.compiler.services.PbsInlineHintSurface;
import p.studio.compiler.workspaces.AssetSurfaceContextLoader; import p.studio.compiler.workspaces.AssetSurfaceContextLoader;
import p.studio.compiler.workspaces.PipelineStage; import p.studio.compiler.workspaces.PipelineStage;
import p.studio.compiler.workspaces.stages.LoadSourcesPipelineStage; import p.studio.compiler.workspaces.stages.LoadSourcesPipelineStage;
import p.studio.compiler.workspaces.stages.ResolveDepsPipelineStage; import p.studio.compiler.workspaces.stages.ResolveDepsPipelineStage;
import p.studio.lsp.dtos.LspDiagnosticDTO; import p.studio.lsp.dtos.LspDiagnosticDTO;
import p.studio.lsp.dtos.LspInlineHintDTO;
import p.studio.lsp.dtos.LspRangeDTO; import p.studio.lsp.dtos.LspRangeDTO;
import p.studio.lsp.dtos.LspSemanticPresentationDTO; import p.studio.lsp.dtos.LspSemanticPresentationDTO;
import p.studio.lsp.messages.*; import p.studio.lsp.messages.*;
@ -154,6 +156,10 @@ final class LspSemanticReadPhase {
final Map<FileId, List<PbsAst.TopDecl>> importedSupplementalTopDeclsByFile = importedSupplementalTopDeclsByFile( final Map<FileId, List<PbsAst.TopDecl>> importedSupplementalTopDeclsByFile = importedSupplementalTopDeclsByFile(
snapshot.frontendSpec(), snapshot.frontendSpec(),
context); context);
final Map<Path, List<LspInlineHintDTO>> inlineHintsByDocument = inlineHintsByDocument(
snapshot,
snapshot.frontendSpec(),
context);
final List<IndexedDocument> indexedDocuments = new ArrayList<>(); final List<IndexedDocument> indexedDocuments = new ArrayList<>();
for (final FileId fileId : snapshot.fileTable()) { for (final FileId fileId : snapshot.fileTable()) {
final SourceHandle sourceHandle = snapshot.fileTable().get(fileId); final SourceHandle sourceHandle = snapshot.fileTable().get(fileId);
@ -181,7 +187,7 @@ final class LspSemanticReadPhase {
semanticPresentation(snapshot.frontendSpec()), semanticPresentation(snapshot.frontendSpec()),
diagnosticsByDocument, diagnosticsByDocument,
semanticIndex.semanticHighlightsByDocument(), semanticIndex.semanticHighlightsByDocument(),
Map.of(), inlineHintsByDocument,
semanticIndex.documentSymbolsByDocument(), semanticIndex.documentSymbolsByDocument(),
semanticIndex.structuralAnchorsByDocument(), semanticIndex.structuralAnchorsByDocument(),
semanticIndex.workspaceSymbols(), semanticIndex.workspaceSymbols(),
@ -192,9 +198,42 @@ final class LspSemanticReadPhase {
private static Map<FileId, List<PbsAst.TopDecl>> importedSupplementalTopDeclsByFile( private static Map<FileId, List<PbsAst.TopDecl>> importedSupplementalTopDeclsByFile(
final FrontendSpec frontendSpec, final FrontendSpec frontendSpec,
final BuilderPipelineContext context) { final BuilderPipelineContext context) {
if (context.resolvedWorkspace == null || context.fileTable == null || !"pbs".equals(frontendSpec.getLanguageId())) { final var surface = semanticReadSurface(frontendSpec, context);
final Map<FileId, List<PbsAst.TopDecl>> byFile = new LinkedHashMap<>();
for (final var entry : surface.supplementalTopDeclsByFile().entrySet()) {
byFile.put(entry.getKey(), entry.getValue().asList());
}
return Map.copyOf(byFile);
}
private static Map<Path, List<LspInlineHintDTO>> inlineHintsByDocument(
final AnalysisSnapshot snapshot,
final FrontendSpec frontendSpec,
final BuilderPipelineContext context) {
if (snapshot.fileTable() == null) {
return Map.of(); return Map.of();
} }
final var surface = semanticReadSurface(frontendSpec, context);
final Map<Path, List<LspInlineHintDTO>> byDocument = new LinkedHashMap<>();
for (final var entry : surface.inlineHintsByFile().entrySet()) {
final SourceHandle sourceHandle = snapshot.fileTable().get(entry.getKey());
final Path documentPath = sourceHandle.getCanonPath().toAbsolutePath().normalize();
final var hints = entry.getValue().asList().stream()
.map(LspSemanticReadPhase::toInlineHintDTO)
.toList();
if (!hints.isEmpty()) {
byDocument.put(documentPath, hints);
}
}
return Map.copyOf(byDocument);
}
private static PBSFrontendPhaseService.PbsSemanticReadSurface semanticReadSurface(
final FrontendSpec frontendSpec,
final BuilderPipelineContext context) {
if (context.resolvedWorkspace == null || context.fileTable == null || !"pbs".equals(frontendSpec.getLanguageId())) {
return new PBSFrontendPhaseService.PbsSemanticReadSurface(Map.of(), Map.of());
}
final FESurfaceContext feSurfaceContext = new AssetSurfaceContextLoader().load(context.resolvedWorkspace.mainProject().getRootPath()); final FESurfaceContext feSurfaceContext = new AssetSurfaceContextLoader().load(context.resolvedWorkspace.mainProject().getRootPath());
final FrontendPhaseContext frontendPhaseContext = new FrontendPhaseContext( final FrontendPhaseContext frontendPhaseContext = new FrontendPhaseContext(
context.resolvedWorkspace.graph().projectTable(), context.resolvedWorkspace.graph().projectTable(),
@ -205,15 +244,19 @@ final class LspSemanticReadPhase {
feSurfaceContext); feSurfaceContext);
final var diagnostics = DiagnosticSink.empty(); final var diagnostics = DiagnosticSink.empty();
final var issues = BuildingIssueSink.empty(); final var issues = BuildingIssueSink.empty();
final var supplementalTopDecls = PBSFrontendPhaseService.importedSupplementalTopDeclsByFile( return PBSFrontendPhaseService.semanticReadSurface(
frontendPhaseContext, frontendPhaseContext,
diagnostics, diagnostics,
issues); issues);
final Map<FileId, List<PbsAst.TopDecl>> byFile = new LinkedHashMap<>();
for (final var entry : supplementalTopDecls.entrySet()) {
byFile.put(entry.getKey(), entry.getValue().asList());
} }
return Map.copyOf(byFile);
private static LspInlineHintDTO toInlineHintDTO(final PbsInlineHintSurface hint) {
return new LspInlineHintDTO(
new LspRangeDTO(
(int) hint.anchorSpan().getStart(),
(int) hint.anchorSpan().getEnd()),
hint.label(),
hint.category());
} }
private record IndexedDocument( private record IndexedDocument(

View File

@ -89,7 +89,7 @@ final class LspServiceImplTest {
assertEquals("pbs-function", semanticKeyForLexeme(analysis, OVERLAY_SOURCE, "helper")); assertEquals("pbs-function", semanticKeyForLexeme(analysis, OVERLAY_SOURCE, "helper"));
assertEquals(List.of("/themes/pbs/semantic-highlighting.css"), analysis.semanticPresentation().resources()); assertEquals(List.of("/themes/pbs/semantic-highlighting.css"), analysis.semanticPresentation().resources());
assertTrue(analysis.semanticPresentation().semanticKeys().contains("pbs-function")); assertTrue(analysis.semanticPresentation().semanticKeys().contains("pbs-function"));
assertTrue(analysis.inlineHints().isEmpty()); assertFalse(analysis.inlineHints().isEmpty(), analysis.toString());
assertTrue( assertTrue(
analysis.documentSymbols().stream().anyMatch(symbol -> symbol.name().equals("helper_call")), analysis.documentSymbols().stream().anyMatch(symbol -> symbol.name().equals("helper_call")),
@ -209,7 +209,8 @@ final class LspServiceImplTest {
void analyzeDocumentReturnsDedicatedInlineHintTransportSurface() throws Exception { void analyzeDocumentReturnsDedicatedInlineHintTransportSurface() throws Exception {
final Path projectRoot = createProject(); final Path projectRoot = createProject();
final Path mainFile = projectRoot.resolve("src/main.pbs"); final Path mainFile = projectRoot.resolve("src/main.pbs");
Files.writeString(mainFile, "fn main() -> void { let value = 1; }\n"); final String source = "fn main() -> void { let value = 1; }\n";
Files.writeString(mainFile, source);
final VfsProjectDocument vfs = new FilesystemProjectDocumentVfsFactory().open(projectContext(projectRoot)); final VfsProjectDocument vfs = new FilesystemProjectDocumentVfsFactory().open(projectContext(projectRoot));
final LspService service = new LspServiceImpl( final LspService service = new LspServiceImpl(
@ -218,7 +219,39 @@ final class LspServiceImplTest {
final var analysis = service.analyzeDocument(new LspAnalyzeDocumentRequest(mainFile)); final var analysis = service.analyzeDocument(new LspAnalyzeDocumentRequest(mainFile));
assertTrue(analysis.inlineHints().isEmpty(), analysis.toString()); assertEquals(1, analysis.inlineHints().size(), analysis.toString());
assertEquals("int", analysis.inlineHints().getFirst().label());
assertEquals("type", analysis.inlineHints().getFirst().category());
assertEquals("value", sourceSlice(source, analysis.inlineHints().getFirst().anchor()));
}
@Test
void analyzeDocumentPreservesValidInlineHintsUnderPartialDegradation() throws Exception {
final Path projectRoot = createProject();
final Path mainFile = projectRoot.resolve("src/main.pbs");
final String source = """
fn ok() -> void {
let value = 1;
return;
}
fn broken( -> void {
let missing = 2;
}
""";
Files.writeString(mainFile, source);
final VfsProjectDocument vfs = new FilesystemProjectDocumentVfsFactory().open(projectContext(projectRoot));
final LspService service = new LspServiceImpl(
new LspProjectContext("Example", "pbs", projectRoot),
vfs);
final var analysis = service.analyzeDocument(new LspAnalyzeDocumentRequest(mainFile));
assertFalse(analysis.diagnostics().isEmpty(), analysis.toString());
assertEquals(1, analysis.inlineHints().size(), analysis.toString());
assertEquals("int", analysis.inlineHints().getFirst().label());
assertEquals("value", sourceSlice(source, analysis.inlineHints().getFirst().anchor()));
} }
private Path createProject() throws Exception { private Path createProject() throws Exception {
@ -315,6 +348,12 @@ final class LspServiceImplTest {
return source.substring(start, end); return source.substring(start, end);
} }
private static String sourceSlice(
final String source,
final p.studio.lsp.dtos.LspRangeDTO range) {
return spanContent(source, range.startOffset(), range.endOffset());
}
private static final class OverlayVfsProjectDocument implements VfsProjectDocument { private static final class OverlayVfsProjectDocument implements VfsProjectDocument {
private final VfsProjectDocument delegate; private final VfsProjectDocument delegate;
private final Path overlayPath; private final Path overlayPath;