implements packer PR-08 build artifact emission
This commit is contained in:
parent
14cff847ca
commit
682a0e72b5
@ -0,0 +1,158 @@
|
||||
package p.packer.building;
|
||||
|
||||
import p.packer.api.PackerOperationClass;
|
||||
import p.packer.api.PackerOperationStatus;
|
||||
import p.packer.api.building.PackerBuildRequest;
|
||||
import p.packer.api.building.PackerBuildResult;
|
||||
import p.packer.api.building.PackerBuildService;
|
||||
import p.packer.api.diagnostics.PackerDiagnostic;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public final class FileSystemPackerBuildService implements PackerBuildService {
|
||||
private static final byte[] MAGIC = new byte[]{'P', 'P', 'A', '1'};
|
||||
private static final int SCHEMA_VERSION = 1;
|
||||
private static final int PRELUDE_SIZE = 24;
|
||||
|
||||
private final PackerBuildPlanner buildPlanner;
|
||||
|
||||
public FileSystemPackerBuildService() {
|
||||
this(new PackerBuildPlanner());
|
||||
}
|
||||
|
||||
public FileSystemPackerBuildService(PackerBuildPlanner buildPlanner) {
|
||||
this.buildPlanner = Objects.requireNonNull(buildPlanner, "buildPlanner");
|
||||
}
|
||||
|
||||
@Override
|
||||
public PackerOperationClass operationClass() {
|
||||
return PackerOperationClass.WORKSPACE_MUTATION;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PackerBuildResult build(PackerBuildRequest request) {
|
||||
final Path buildDirectory = Objects.requireNonNull(request, "request").project().rootPath().resolve("build");
|
||||
final Path assetsArchive = buildDirectory.resolve("assets.pa").toAbsolutePath().normalize();
|
||||
final Path assetTableJson = buildDirectory.resolve("asset_table.json").toAbsolutePath().normalize();
|
||||
final Path preloadJson = buildDirectory.resolve("preload.json").toAbsolutePath().normalize();
|
||||
final Path metadataJson = buildDirectory.resolve("asset_table_metadata.json").toAbsolutePath().normalize();
|
||||
|
||||
final PackerBuildPlanResult planResult = buildPlanner.plan(request.project());
|
||||
if (planResult.plan() == null) {
|
||||
return new PackerBuildResult(
|
||||
PackerOperationStatus.FAILED,
|
||||
planResult.summary(),
|
||||
assetsArchive,
|
||||
Map.of(),
|
||||
planResult.diagnostics());
|
||||
}
|
||||
|
||||
try {
|
||||
Files.createDirectories(buildDirectory);
|
||||
final EmittedArchive archive = emitArchive(planResult.plan());
|
||||
Files.write(assetsArchive, archive.bytes());
|
||||
Files.writeString(assetTableJson, archive.assetTableJson(), StandardCharsets.UTF_8);
|
||||
Files.writeString(preloadJson, archive.preloadJson(), StandardCharsets.UTF_8);
|
||||
Files.writeString(metadataJson, archive.metadataJson(), StandardCharsets.UTF_8);
|
||||
return new PackerBuildResult(
|
||||
planResult.status(),
|
||||
"Build emitted " + planResult.plan().assets().size() + " assets.",
|
||||
assetsArchive,
|
||||
Map.of(
|
||||
"build/asset_table.json", assetTableJson,
|
||||
"build/preload.json", preloadJson,
|
||||
"build/asset_table_metadata.json", metadataJson),
|
||||
planResult.diagnostics());
|
||||
} catch (IOException exception) {
|
||||
throw new UncheckedIOException(exception);
|
||||
}
|
||||
}
|
||||
|
||||
private EmittedArchive emitArchive(PackerBuildPlan plan) throws IOException {
|
||||
final List<Map<String, Object>> assetTable = new ArrayList<>();
|
||||
final ByteArrayOutputStream payloadStream = new ByteArrayOutputStream();
|
||||
int payloadOffset = 0;
|
||||
for (PackerPlannedAsset asset : plan.assets()) {
|
||||
final ByteArrayOutputStream assetPayload = new ByteArrayOutputStream();
|
||||
for (PackerPlannedInput input : asset.inputs()) {
|
||||
assetPayload.write(Files.readAllBytes(input.absolutePath()));
|
||||
}
|
||||
final byte[] assetBytes = assetPayload.toByteArray();
|
||||
payloadStream.write(assetBytes);
|
||||
|
||||
final Map<String, Object> row = new LinkedHashMap<>();
|
||||
row.put("asset_family", asset.assetFamily());
|
||||
row.put("asset_id", asset.assetId());
|
||||
row.put("asset_name", asset.assetName());
|
||||
row.put("asset_uuid", asset.assetUuid());
|
||||
row.put("output_codec", asset.outputCodec());
|
||||
row.put("output_format", asset.outputFormat());
|
||||
row.put("payload_length", assetBytes.length);
|
||||
row.put("payload_offset", payloadOffset);
|
||||
row.put("relative_root", asset.relativeRoot());
|
||||
assetTable.add(row);
|
||||
payloadOffset += assetBytes.length;
|
||||
}
|
||||
|
||||
final List<Map<String, Object>> preload = plan.assets().stream()
|
||||
.filter(PackerPlannedAsset::preload)
|
||||
.map(asset -> {
|
||||
final Map<String, Object> row = new LinkedHashMap<>();
|
||||
row.put("asset_id", asset.assetId());
|
||||
row.put("asset_name", asset.assetName());
|
||||
row.put("asset_uuid", asset.assetUuid());
|
||||
return row;
|
||||
})
|
||||
.toList();
|
||||
|
||||
final String assetTableJson = PackerCanonicalJson.write(assetTable);
|
||||
final String preloadJson = PackerCanonicalJson.write(preload);
|
||||
final String headerJson = PackerCanonicalJson.write(Map.of(
|
||||
"asset_table", assetTable,
|
||||
"preload", preload));
|
||||
final byte[] headerBytes = headerJson.getBytes(StandardCharsets.UTF_8);
|
||||
final byte[] payloadBytes = payloadStream.toByteArray();
|
||||
final byte[] prelude = ByteBuffer.allocate(PRELUDE_SIZE)
|
||||
.order(ByteOrder.BIG_ENDIAN)
|
||||
.put(MAGIC)
|
||||
.putInt(SCHEMA_VERSION)
|
||||
.putInt(headerBytes.length)
|
||||
.putInt(PRELUDE_SIZE + headerBytes.length)
|
||||
.putInt(0)
|
||||
.putInt(0)
|
||||
.array();
|
||||
|
||||
final byte[] archiveBytes = new byte[prelude.length + headerBytes.length + payloadBytes.length];
|
||||
System.arraycopy(prelude, 0, archiveBytes, 0, prelude.length);
|
||||
System.arraycopy(headerBytes, 0, archiveBytes, prelude.length, headerBytes.length);
|
||||
System.arraycopy(payloadBytes, 0, archiveBytes, prelude.length + headerBytes.length, payloadBytes.length);
|
||||
|
||||
final String metadataJson = PackerCanonicalJson.write(Map.of(
|
||||
"asset_count", plan.assets().size(),
|
||||
"cache_key", plan.cacheKey(),
|
||||
"header_length", headerBytes.length,
|
||||
"payload_bytes", payloadBytes.length,
|
||||
"payload_offset", PRELUDE_SIZE + headerBytes.length,
|
||||
"schema_version", SCHEMA_VERSION));
|
||||
return new EmittedArchive(archiveBytes, assetTableJson, preloadJson, metadataJson);
|
||||
}
|
||||
|
||||
private record EmittedArchive(
|
||||
byte[] bytes,
|
||||
String assetTableJson,
|
||||
String preloadJson,
|
||||
String metadataJson) {
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,143 @@
|
||||
package p.packer.building;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import p.packer.api.PackerOperationStatus;
|
||||
import p.packer.api.PackerProjectContext;
|
||||
import p.packer.api.building.PackerBuildRequest;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
final class FileSystemPackerBuildServiceTest {
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
|
||||
@TempDir
|
||||
Path tempDir;
|
||||
|
||||
@Test
|
||||
void buildEmitsArchiveAndCompanionArtifactsDeterministically() throws Exception {
|
||||
final Path projectRoot = createProject(tempDir.resolve("main"));
|
||||
final FileSystemPackerBuildService service = new FileSystemPackerBuildService();
|
||||
|
||||
final var first = service.build(new PackerBuildRequest(new PackerProjectContext("main", projectRoot), false));
|
||||
final byte[] firstBytes = Files.readAllBytes(first.assetsArchive());
|
||||
final String firstAssetTable = Files.readString(first.companionArtifacts().get("build/asset_table.json"));
|
||||
final String firstPreload = Files.readString(first.companionArtifacts().get("build/preload.json"));
|
||||
|
||||
final var second = service.build(new PackerBuildRequest(new PackerProjectContext("main", projectRoot), false));
|
||||
final byte[] secondBytes = Files.readAllBytes(second.assetsArchive());
|
||||
|
||||
assertEquals(PackerOperationStatus.SUCCESS, first.status());
|
||||
assertArrayEquals(firstBytes, secondBytes);
|
||||
assertEquals(firstAssetTable, Files.readString(second.companionArtifacts().get("build/asset_table.json")));
|
||||
assertEquals(firstPreload, Files.readString(second.companionArtifacts().get("build/preload.json")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void companionArtifactsMirrorHeaderSections() throws Exception {
|
||||
final Path projectRoot = createProject(tempDir.resolve("mirror"));
|
||||
final FileSystemPackerBuildService service = new FileSystemPackerBuildService();
|
||||
|
||||
final var result = service.build(new PackerBuildRequest(new PackerProjectContext("mirror", projectRoot), false));
|
||||
final ParsedArchive archive = parseArchive(result.assetsArchive());
|
||||
final String assetTableJson = Files.readString(result.companionArtifacts().get("build/asset_table.json"));
|
||||
final String preloadJson = Files.readString(result.companionArtifacts().get("build/preload.json"));
|
||||
final Map<?, ?> metadata = MAPPER.readValue(Files.readString(result.companionArtifacts().get("build/asset_table_metadata.json")), Map.class);
|
||||
|
||||
assertEquals(archive.header().get("asset_table"), MAPPER.readValue(assetTableJson, List.class));
|
||||
assertEquals(archive.header().get("preload"), MAPPER.readValue(preloadJson, List.class));
|
||||
assertEquals(2, metadata.get("asset_count"));
|
||||
assertEquals(archive.payloadOffset(), metadata.get("payload_offset"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void buildFailsCleanlyWhenPlannerIsBlocked() throws Exception {
|
||||
final Path projectRoot = createProject(tempDir.resolve("broken"));
|
||||
Files.delete(projectRoot.resolve("assets/ui/atlas/sprites/confirm.png"));
|
||||
final FileSystemPackerBuildService service = new FileSystemPackerBuildService();
|
||||
|
||||
final var result = service.build(new PackerBuildRequest(new PackerProjectContext("broken", projectRoot), false));
|
||||
|
||||
assertEquals(PackerOperationStatus.FAILED, result.status());
|
||||
assertTrue(result.diagnostics().stream().anyMatch(diagnostic -> diagnostic.message().contains("Build input is missing")));
|
||||
assertTrue(result.companionArtifacts().isEmpty());
|
||||
}
|
||||
|
||||
private ParsedArchive parseArchive(Path archivePath) throws Exception {
|
||||
final byte[] bytes = Files.readAllBytes(archivePath);
|
||||
final ByteBuffer buffer = ByteBuffer.wrap(bytes).order(ByteOrder.BIG_ENDIAN);
|
||||
final byte[] magic = new byte[4];
|
||||
buffer.get(magic);
|
||||
assertEquals("PPA1", new String(magic, StandardCharsets.UTF_8));
|
||||
assertEquals(1, buffer.getInt());
|
||||
final int headerLength = buffer.getInt();
|
||||
final int payloadOffset = buffer.getInt();
|
||||
buffer.getInt();
|
||||
buffer.getInt();
|
||||
final String headerJson = new String(bytes, 24, headerLength, StandardCharsets.UTF_8);
|
||||
return new ParsedArchive(MAPPER.readValue(headerJson, Map.class), payloadOffset);
|
||||
}
|
||||
|
||||
private Path createProject(Path projectRoot) throws Exception {
|
||||
final Path atlasRoot = projectRoot.resolve("assets/ui/atlas");
|
||||
final Path soundsRoot = projectRoot.resolve("assets/audio/ui_sounds");
|
||||
Files.createDirectories(atlasRoot.resolve("sprites"));
|
||||
Files.createDirectories(soundsRoot.resolve("sources"));
|
||||
Files.createDirectories(projectRoot.resolve("assets/.prometeu"));
|
||||
Files.writeString(atlasRoot.resolve("asset.json"), """
|
||||
{
|
||||
"schema_version": 1,
|
||||
"name": "ui_atlas",
|
||||
"type": "image_bank",
|
||||
"inputs": { "sprites": ["sprites/confirm.png"] },
|
||||
"output": { "format": "TILES/indexed_v1", "codec": "RAW" },
|
||||
"preload": { "enabled": true }
|
||||
}
|
||||
""");
|
||||
Files.writeString(soundsRoot.resolve("asset.json"), """
|
||||
{
|
||||
"schema_version": 1,
|
||||
"name": "ui_sounds",
|
||||
"type": "sound_bank",
|
||||
"inputs": { "sources": ["sources/confirm.wav"] },
|
||||
"output": { "format": "SOUND/bank_v1", "codec": "RAW" },
|
||||
"preload": { "enabled": false }
|
||||
}
|
||||
""");
|
||||
Files.writeString(atlasRoot.resolve("sprites/confirm.png"), "png");
|
||||
Files.writeString(soundsRoot.resolve("sources/confirm.wav"), "wav");
|
||||
Files.writeString(projectRoot.resolve("assets/.prometeu/index.json"), """
|
||||
{
|
||||
"schema_version": 1,
|
||||
"next_asset_id": 3,
|
||||
"assets": [
|
||||
{
|
||||
"asset_id": 1,
|
||||
"asset_uuid": "uuid-1",
|
||||
"root": "ui/atlas"
|
||||
},
|
||||
{
|
||||
"asset_id": 2,
|
||||
"asset_uuid": "uuid-2",
|
||||
"root": "audio/ui_sounds"
|
||||
}
|
||||
]
|
||||
}
|
||||
""");
|
||||
return projectRoot;
|
||||
}
|
||||
|
||||
private record ParsedArchive(
|
||||
Map<?, ?> header,
|
||||
int payloadOffset) {
|
||||
}
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user