implements PR-36
This commit is contained in:
parent
fa527720a4
commit
31621303d1
@ -10,6 +10,8 @@ import p.packer.events.PackerEventKind;
|
||||
import p.packer.events.PackerEventSink;
|
||||
import p.packer.events.PackerProgress;
|
||||
import p.packer.dtos.PackerDiagnosticDTO;
|
||||
import p.packer.dtos.PackerEmittedArtifactDTO;
|
||||
import p.packer.dtos.PackerPackExecutionSummaryDTO;
|
||||
import p.packer.dtos.PackerPackSummaryAssetDTO;
|
||||
import p.packer.dtos.PackerPackSummaryDTO;
|
||||
import p.packer.dtos.PackerPackValidationAssetDTO;
|
||||
@ -19,12 +21,18 @@ import p.packer.messages.diagnostics.PackerDiagnosticCategory;
|
||||
import p.packer.messages.diagnostics.PackerDiagnosticSeverity;
|
||||
import p.packer.models.*;
|
||||
import p.packer.repositories.FileSystemPackerCacheRepository;
|
||||
import p.packer.repositories.PackerAssetWalker;
|
||||
import p.packer.repositories.PackerContractFingerprint;
|
||||
import p.packer.repositories.PackerRuntimeAssetMaterializer;
|
||||
import p.packer.repositories.PackerRuntimeRegistry;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.*;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@ -178,7 +186,9 @@ public final class FileSystemPackerWorkspaceService implements PackerWorkspaceSe
|
||||
|
||||
@Override
|
||||
public PackWorkspaceResult packWorkspace(PackWorkspaceRequest request) {
|
||||
throw new UnsupportedOperationException("pack workspace execution is not implemented yet");
|
||||
final PackWorkspaceRequest safeRequest = Objects.requireNonNull(request, "request");
|
||||
final PackerProjectContext project = safeRequest.project();
|
||||
return writeCoordinator.execute(project, () -> packWorkspaceInWriteLane(safeRequest));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -303,6 +313,71 @@ public final class FileSystemPackerWorkspaceService implements PackerWorkspaceSe
|
||||
}
|
||||
}
|
||||
|
||||
private PackWorkspaceResult packWorkspaceInWriteLane(PackWorkspaceRequest request) {
|
||||
final PackerProjectContext project = request.project();
|
||||
final long startedAt = System.currentTimeMillis();
|
||||
workspaceFoundation.initWorkspace(new InitWorkspaceRequest(project));
|
||||
|
||||
final PackerRuntimeSnapshot refreshedSnapshot = runtimeRegistry.refresh(project).snapshot();
|
||||
final PackerRuntimeSnapshot frozenSnapshot = createFrozenPackingSnapshot(refreshedSnapshot);
|
||||
final List<PackerPackValidationAssetDTO> blockedAssets = frozenSnapshot.assets().stream()
|
||||
.filter(runtimeAsset -> runtimeAsset.registryEntry().isPresent() && runtimeAsset.registryEntry().get().includedInBuild())
|
||||
.map(this::toPackValidationAssetDTO)
|
||||
.filter(PackerPackValidationAssetDTO::blocked)
|
||||
.toList();
|
||||
if (!blockedAssets.isEmpty()) {
|
||||
return new PackWorkspaceResult(
|
||||
PackerOperationStatus.PARTIAL,
|
||||
"Pack validation found blocking diagnostics in " + blockedAssets.size() + " included assets.",
|
||||
new PackerPackExecutionSummaryDTO("assets.pa", 0, System.currentTimeMillis() - startedAt, List.of()));
|
||||
}
|
||||
|
||||
final Path buildRoot = project.rootPath().resolve("build").toAbsolutePath().normalize();
|
||||
final String operationId = randomOperationId();
|
||||
final Path stagingRoot = buildRoot.resolve(".staging").resolve(operationId);
|
||||
final Path stagedAssetsPath = stagingRoot.resolve("assets.pa");
|
||||
final Path stagedAssetTablePath = stagingRoot.resolve("asset_table.json");
|
||||
final Path stagedPreloadPath = stagingRoot.resolve("preload.json");
|
||||
final Path stagedAssetTableMetadataPath = stagingRoot.resolve("asset_table_metadata.json");
|
||||
try {
|
||||
Files.createDirectories(stagingRoot);
|
||||
final PackedWorkspaceArtifacts packedWorkspace = buildPackedWorkspaceArtifacts(frozenSnapshot);
|
||||
writeJson(stagedAssetTablePath, packedWorkspace.assetTable());
|
||||
writeJson(stagedPreloadPath, packedWorkspace.preload());
|
||||
writeJson(stagedAssetTableMetadataPath, packedWorkspace.assetTableMetadata());
|
||||
Files.write(stagedAssetsPath, packedWorkspace.assetsPaBytes());
|
||||
|
||||
final Path finalAssetsPath = buildRoot.resolve("assets.pa");
|
||||
final Path finalAssetTablePath = buildRoot.resolve("asset_table.json");
|
||||
final Path finalPreloadPath = buildRoot.resolve("preload.json");
|
||||
final Path finalAssetTableMetadataPath = buildRoot.resolve("asset_table_metadata.json");
|
||||
Files.createDirectories(buildRoot);
|
||||
Files.move(stagedAssetsPath, finalAssetsPath, StandardCopyOption.REPLACE_EXISTING);
|
||||
Files.move(stagedAssetTablePath, finalAssetTablePath, StandardCopyOption.REPLACE_EXISTING);
|
||||
Files.move(stagedPreloadPath, finalPreloadPath, StandardCopyOption.REPLACE_EXISTING);
|
||||
Files.move(stagedAssetTableMetadataPath, finalAssetTableMetadataPath, StandardCopyOption.REPLACE_EXISTING);
|
||||
|
||||
final List<PackerEmittedArtifactDTO> emittedArtifacts = List.of(
|
||||
emittedArtifact("assets.pa", finalAssetsPath, true),
|
||||
emittedArtifact("asset_table.json", finalAssetTablePath, false),
|
||||
emittedArtifact("preload.json", finalPreloadPath, false),
|
||||
emittedArtifact("asset_table_metadata.json", finalAssetTableMetadataPath, false));
|
||||
return new PackWorkspaceResult(
|
||||
PackerOperationStatus.SUCCESS,
|
||||
"Pack workspace completed successfully.",
|
||||
new PackerPackExecutionSummaryDTO(
|
||||
"assets.pa",
|
||||
packedWorkspace.packedAssetCount(),
|
||||
System.currentTimeMillis() - startedAt,
|
||||
emittedArtifacts));
|
||||
} catch (Exception exception) {
|
||||
return new PackWorkspaceResult(
|
||||
PackerOperationStatus.FAILED,
|
||||
"Pack workspace failed: " + exception.getMessage(),
|
||||
new PackerPackExecutionSummaryDTO("assets.pa", 0, System.currentTimeMillis() - startedAt, List.of()));
|
||||
}
|
||||
}
|
||||
|
||||
private RegisterAssetResult registerAssetInWriteLane(
|
||||
RegisterAssetRequest request,
|
||||
PackerOperationEventEmitter events) {
|
||||
@ -728,6 +803,268 @@ public final class FileSystemPackerWorkspaceService implements PackerWorkspaceSe
|
||||
cacheRepository.save(project, snapshot.cacheState());
|
||||
}
|
||||
|
||||
private PackerRuntimeSnapshot createFrozenPackingSnapshot(PackerRuntimeSnapshot snapshot) {
|
||||
final PackerRuntimeAssetMaterializer materializer = new PackerRuntimeAssetMaterializer(new PackerAssetWalker(mapper));
|
||||
final List<PackerRuntimeAsset> assets = snapshot.assets().stream()
|
||||
.map(asset -> rematerializeForPacking(snapshot, asset, materializer))
|
||||
.toList();
|
||||
return new PackerRuntimeSnapshot(snapshot.generation(), snapshot.registry(), assets, snapshot.cacheState());
|
||||
}
|
||||
|
||||
private PackerRuntimeAsset rematerializeForPacking(
|
||||
PackerRuntimeSnapshot snapshot,
|
||||
PackerRuntimeAsset asset,
|
||||
PackerRuntimeAssetMaterializer materializer) {
|
||||
if (asset.registryEntry().isEmpty() || !asset.registryEntry().get().includedInBuild() || !asset.parsedDeclaration().valid()) {
|
||||
return asset;
|
||||
}
|
||||
return materializer.materialize(
|
||||
asset.assetRoot(),
|
||||
asset.manifestPath(),
|
||||
asset.registryEntry(),
|
||||
asset.parsedDeclaration(),
|
||||
asset.registryEntry().flatMap(entry -> snapshot.cacheState().findAsset(entry.assetId())),
|
||||
PackerRuntimeMaterializationConfig.packingBuild()).runtimeAsset();
|
||||
}
|
||||
|
||||
private PackedWorkspaceArtifacts buildPackedWorkspaceArtifacts(PackerRuntimeSnapshot snapshot) throws IOException {
|
||||
final List<PackedAsset> packedAssets = snapshot.assets().stream()
|
||||
.filter(asset -> asset.registryEntry().isPresent() && asset.registryEntry().get().includedInBuild())
|
||||
.sorted(Comparator.comparingInt(asset -> asset.registryEntry().get().assetId()))
|
||||
.map(this::packRuntimeAsset)
|
||||
.toList();
|
||||
|
||||
final List<Map<String, Object>> assetTable = new ArrayList<>();
|
||||
final List<Map<String, Object>> preload = new ArrayList<>();
|
||||
final List<Map<String, Object>> assetTableMetadata = new ArrayList<>();
|
||||
final ByteArrayOutputStream payload = new ByteArrayOutputStream();
|
||||
for (PackedAsset packedAsset : packedAssets) {
|
||||
final int offset = payload.size();
|
||||
payload.write(packedAsset.payload());
|
||||
assetTable.add(new LinkedHashMap<>(Map.of(
|
||||
"asset_id", packedAsset.assetId(),
|
||||
"asset_name", packedAsset.assetName(),
|
||||
"bank_type", packedAsset.bankType(),
|
||||
"offset", offset,
|
||||
"size", packedAsset.payload().length,
|
||||
"decoded_size", packedAsset.decodedSize(),
|
||||
"codec", packedAsset.codec(),
|
||||
"metadata", packedAsset.metadata())));
|
||||
if (packedAsset.preloadEnabled()) {
|
||||
preload.add(new LinkedHashMap<>(Map.of("asset_id", packedAsset.assetId())));
|
||||
}
|
||||
assetTableMetadata.add(new LinkedHashMap<>(Map.of(
|
||||
"asset_id", packedAsset.assetId(),
|
||||
"metadata", packedAsset.metadata())));
|
||||
}
|
||||
|
||||
final byte[] headerBytes = canonicalJsonBytes(Map.of(
|
||||
"asset_table", assetTable,
|
||||
"preload", preload));
|
||||
final byte[] preludeBytes = buildPrelude(headerBytes.length);
|
||||
final ByteArrayOutputStream assetsPa = new ByteArrayOutputStream();
|
||||
assetsPa.write(preludeBytes);
|
||||
assetsPa.write(headerBytes);
|
||||
assetsPa.write(payload.toByteArray());
|
||||
return new PackedWorkspaceArtifacts(
|
||||
assetsPa.toByteArray(),
|
||||
assetTable,
|
||||
preload,
|
||||
assetTableMetadata,
|
||||
packedAssets.size());
|
||||
}
|
||||
|
||||
private PackedAsset packRuntimeAsset(PackerRuntimeAsset runtimeAsset) {
|
||||
final PackerAssetDeclaration declaration = runtimeAsset.parsedDeclaration().declaration();
|
||||
if (declaration == null || declaration.assetFamily() != AssetFamilyCatalog.TILE_BANK) {
|
||||
throw new IllegalStateException("Unsupported pack output family for current implementation.");
|
||||
}
|
||||
return packTileBank(runtimeAsset, declaration);
|
||||
}
|
||||
|
||||
private PackedAsset packTileBank(PackerRuntimeAsset runtimeAsset, PackerAssetDeclaration declaration) {
|
||||
final int tileSize = parseTileSize(declaration.outputMetadata().get("tile_size"));
|
||||
final int width = 256;
|
||||
final int height = 256;
|
||||
final int tilesPerRow = width / tileSize;
|
||||
final byte[] sheetPixels = new byte[width * height];
|
||||
final Map<String, PackerRuntimeWalkFile> selectedByPath = new LinkedHashMap<>();
|
||||
runtimeAsset.walkProjection().buildCandidateFiles().forEach(file -> selectedByPath.put(file.relativePath(), file));
|
||||
|
||||
declaration.artifacts().stream()
|
||||
.sorted(Comparator.comparingInt(PackerAssetArtifactSelection::index))
|
||||
.forEach(artifact -> {
|
||||
final PackerRuntimeWalkFile walkFile = selectedByPath.get(artifact.file());
|
||||
if (walkFile == null) {
|
||||
throw new IllegalStateException("Selected tile artifact is missing from packing snapshot: " + artifact.file());
|
||||
}
|
||||
final Object tileValue = walkFile.metadata().get("tile");
|
||||
if (!(tileValue instanceof PackerTileIndexedV1 tile)) {
|
||||
throw new IllegalStateException("Selected tile artifact is missing normalized tile metadata: " + artifact.file());
|
||||
}
|
||||
final int tileX = (artifact.index() % tilesPerRow) * tileSize;
|
||||
final int tileY = (artifact.index() / tilesPerRow) * tileSize;
|
||||
for (int localY = 0; localY < tile.height(); localY += 1) {
|
||||
for (int localX = 0; localX < tile.width(); localX += 1) {
|
||||
final int sourceOffset = localY * tile.width() + localX;
|
||||
final int targetOffset = (tileY + localY) * width + (tileX + localX);
|
||||
sheetPixels[targetOffset] = tile.paletteIndices()[sourceOffset];
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
final byte[] packedPixels = packNibbles(sheetPixels);
|
||||
final byte[] paletteBytes = emitTileBankPalettes(declaration);
|
||||
final byte[] payload = new byte[packedPixels.length + paletteBytes.length];
|
||||
System.arraycopy(packedPixels, 0, payload, 0, packedPixels.length);
|
||||
System.arraycopy(paletteBytes, 0, payload, packedPixels.length, paletteBytes.length);
|
||||
|
||||
final LinkedHashMap<String, Object> metadata = new LinkedHashMap<>();
|
||||
metadata.put("tile_size", tileSize);
|
||||
metadata.put("width", width);
|
||||
metadata.put("height", height);
|
||||
metadata.put("palette_count", 64);
|
||||
metadata.put("codec", Map.of());
|
||||
metadata.put("pipeline", PackerReadMessageMapper.normalizeMetadata(declaration.outputPipelineMetadata()));
|
||||
declaration.outputMetadata().forEach((key, value) -> {
|
||||
if (!"tile_size".equals(key)) {
|
||||
metadata.putIfAbsent(key, value);
|
||||
}
|
||||
});
|
||||
|
||||
final PackerRegistryEntry registryEntry = runtimeAsset.registryEntry()
|
||||
.orElseThrow(() -> new IllegalStateException("Packed runtime asset must be registered"));
|
||||
return new PackedAsset(
|
||||
registryEntry.assetId(),
|
||||
declaration.name(),
|
||||
"TILES",
|
||||
"NONE",
|
||||
payload,
|
||||
width * height + 2048,
|
||||
metadata,
|
||||
declaration.preloadEnabled());
|
||||
}
|
||||
|
||||
private int parseTileSize(String value) {
|
||||
return switch (Objects.requireNonNullElse(value, "")) {
|
||||
case "8x8" -> 8;
|
||||
case "16x16" -> 16;
|
||||
case "32x32" -> 32;
|
||||
default -> throw new IllegalStateException("Unsupported tile_size for tile-bank packing: " + value);
|
||||
};
|
||||
}
|
||||
|
||||
private byte[] packNibbles(byte[] logicalPixels) {
|
||||
final byte[] packed = new byte[(logicalPixels.length + 1) / 2];
|
||||
for (int offset = 0; offset < logicalPixels.length; offset += 2) {
|
||||
final int high = logicalPixels[offset] & 0x0F;
|
||||
final int low = offset + 1 < logicalPixels.length ? logicalPixels[offset + 1] & 0x0F : 0;
|
||||
packed[offset / 2] = (byte) ((high << 4) | low);
|
||||
}
|
||||
return packed;
|
||||
}
|
||||
|
||||
private byte[] emitTileBankPalettes(PackerAssetDeclaration declaration) {
|
||||
final byte[] bytes = new byte[64 * 16 * 2];
|
||||
final JsonNode palettesNode = declaration.outputPipelineMetadata().get("palettes");
|
||||
if (!(palettesNode instanceof com.fasterxml.jackson.databind.node.ArrayNode palettesArray)) {
|
||||
return bytes;
|
||||
}
|
||||
for (JsonNode declarationNode : palettesArray) {
|
||||
final JsonNode indexNode = declarationNode.path("index");
|
||||
final JsonNode paletteNode = declarationNode.path("palette");
|
||||
if (!indexNode.isInt() || !paletteNode.isObject()) {
|
||||
continue;
|
||||
}
|
||||
final int paletteIndex = indexNode.intValue();
|
||||
if (paletteIndex < 0 || paletteIndex >= 64) {
|
||||
continue;
|
||||
}
|
||||
final JsonNode convertedNode = paletteNode.path("convertedRgb565");
|
||||
if (!convertedNode.isArray()) {
|
||||
continue;
|
||||
}
|
||||
for (int colorIndex = 0; colorIndex < Math.min(16, convertedNode.size()); colorIndex += 1) {
|
||||
final int rgb565 = convertedNode.get(colorIndex).asInt(0);
|
||||
final int baseOffset = ((paletteIndex * 16) + colorIndex) * 2;
|
||||
bytes[baseOffset] = (byte) (rgb565 & 0xFF);
|
||||
bytes[baseOffset + 1] = (byte) ((rgb565 >>> 8) & 0xFF);
|
||||
}
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
private byte[] buildPrelude(int headerLength) {
|
||||
final int preludeLength = 24;
|
||||
final ByteBuffer buffer = ByteBuffer.allocate(preludeLength).order(ByteOrder.LITTLE_ENDIAN);
|
||||
buffer.put((byte) 'P').put((byte) 'P').put((byte) 'A').put((byte) 'K');
|
||||
buffer.putInt(1);
|
||||
buffer.putInt(headerLength);
|
||||
buffer.putInt(preludeLength + headerLength);
|
||||
buffer.putInt(0);
|
||||
buffer.putInt(0);
|
||||
return buffer.array();
|
||||
}
|
||||
|
||||
private byte[] canonicalJsonBytes(Object value) throws IOException {
|
||||
final Object canonical = canonicalizeJsonValue(value);
|
||||
return mapper.copy()
|
||||
.configure(com.fasterxml.jackson.databind.SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true)
|
||||
.writeValueAsBytes(canonical);
|
||||
}
|
||||
|
||||
private Object canonicalizeJsonValue(Object value) {
|
||||
if (value instanceof Map<?, ?> map) {
|
||||
final LinkedHashMap<String, Object> sorted = new LinkedHashMap<>();
|
||||
map.entrySet().stream()
|
||||
.sorted(Comparator.comparing(entry -> String.valueOf(entry.getKey())))
|
||||
.forEach(entry -> sorted.put(String.valueOf(entry.getKey()), canonicalizeJsonValue(entry.getValue())));
|
||||
return sorted;
|
||||
}
|
||||
if (value instanceof List<?> list) {
|
||||
return list.stream().map(this::canonicalizeJsonValue).toList();
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
private void writeJson(Path path, Object value) throws IOException {
|
||||
Files.createDirectories(path.getParent());
|
||||
mapper.writerWithDefaultPrettyPrinter().writeValue(path.toFile(), value);
|
||||
}
|
||||
|
||||
private PackerEmittedArtifactDTO emittedArtifact(String label, Path path, boolean canonical) throws IOException {
|
||||
return new PackerEmittedArtifactDTO(label, path, canonical, Files.size(path));
|
||||
}
|
||||
|
||||
private String randomOperationId() {
|
||||
final String alphabet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
final StringBuilder builder = new StringBuilder(20);
|
||||
final Random random = new Random();
|
||||
for (int index = 0; index < 20; index += 1) {
|
||||
builder.append(alphabet.charAt(random.nextInt(alphabet.length())));
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
private record PackedAsset(
|
||||
int assetId,
|
||||
String assetName,
|
||||
String bankType,
|
||||
String codec,
|
||||
byte[] payload,
|
||||
int decodedSize,
|
||||
Map<String, Object> metadata,
|
||||
boolean preloadEnabled) {
|
||||
}
|
||||
|
||||
private record PackedWorkspaceArtifacts(
|
||||
byte[] assetsPaBytes,
|
||||
List<Map<String, Object>> assetTable,
|
||||
List<Map<String, Object>> preload,
|
||||
List<Map<String, Object>> assetTableMetadata,
|
||||
int packedAssetCount) {
|
||||
}
|
||||
|
||||
private PackerRegistryState removeRegistryEntry(
|
||||
PackerRegistryState registry,
|
||||
PackerRegistryEntry entry) {
|
||||
|
||||
@ -220,6 +220,80 @@ final class FileSystemPackerWorkspaceServiceTest {
|
||||
.anyMatch(diagnostic -> diagnostic.message().contains("must declare at least one palette")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void packWorkspaceRerunsGateOnFreshSnapshotBeforeEmission() throws Exception {
|
||||
final Path projectRoot = copyFixture("workspaces/managed-basic", tempDir.resolve("pack-workspace-rerun-gate"));
|
||||
final Path assetRoot = projectRoot.resolve("assets/ui/atlas");
|
||||
final Path manifestPath = assetRoot.resolve("asset.json");
|
||||
final FileSystemPackerWorkspaceService service = service();
|
||||
|
||||
final var validation = service.validatePackWorkspace(new ValidatePackWorkspaceRequest(project(projectRoot)));
|
||||
assertEquals(PackerOperationStatus.SUCCESS, validation.status());
|
||||
|
||||
writeTilePng(assetRoot.resolve("confirm.png"), 16);
|
||||
final ObjectNode manifest = (ObjectNode) MAPPER.readTree(manifestPath.toFile());
|
||||
final var artifacts = manifest.putArray("artifacts");
|
||||
artifacts.addObject().put("file", "confirm.png").put("index", 0);
|
||||
MAPPER.writerWithDefaultPrettyPrinter().writeValue(manifestPath.toFile(), manifest);
|
||||
|
||||
final var result = service.packWorkspace(new PackWorkspaceRequest(project(projectRoot)));
|
||||
|
||||
assertEquals(PackerOperationStatus.PARTIAL, result.status());
|
||||
assertEquals("assets.pa", result.result().canonicalArtifactName());
|
||||
assertTrue(result.result().emittedArtifacts().isEmpty());
|
||||
assertFalse(Files.exists(projectRoot.resolve("build/assets.pa")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void packWorkspaceEmitsTileBankArtifactsFromFrozenSnapshot() throws Exception {
|
||||
final Path projectRoot = copyFixture("workspaces/managed-basic", tempDir.resolve("pack-workspace-success"));
|
||||
final Path assetRoot = projectRoot.resolve("assets/ui/atlas");
|
||||
final Path manifestPath = assetRoot.resolve("asset.json");
|
||||
writeTilePng(assetRoot.resolve("confirm.png"), 16);
|
||||
|
||||
final ObjectNode manifest = (ObjectNode) MAPPER.readTree(manifestPath.toFile());
|
||||
final ObjectNode output = (ObjectNode) manifest.path("output");
|
||||
output.putObject("metadata").put("tile_size", "16x16");
|
||||
final ObjectNode pipeline = output.putObject("pipeline");
|
||||
final var palettes = pipeline.putArray("palettes");
|
||||
final ObjectNode palette = palettes.addObject();
|
||||
palette.put("index", 0);
|
||||
palette.putObject("palette")
|
||||
.putArray("originalArgb8888").add(0xFFFF0000);
|
||||
((ObjectNode) palette.path("palette"))
|
||||
.putArray("convertedRgb565").add(0xF800);
|
||||
final var artifacts = manifest.putArray("artifacts");
|
||||
artifacts.addObject().put("file", "confirm.png").put("index", 0);
|
||||
MAPPER.writerWithDefaultPrettyPrinter().writeValue(manifestPath.toFile(), manifest);
|
||||
|
||||
final FileSystemPackerWorkspaceService service = service();
|
||||
final var result = service.packWorkspace(new PackWorkspaceRequest(project(projectRoot)));
|
||||
|
||||
assertEquals(PackerOperationStatus.SUCCESS, result.status());
|
||||
assertEquals("assets.pa", result.result().canonicalArtifactName());
|
||||
assertEquals(1, result.result().packedAssetCount());
|
||||
assertEquals(4, result.result().emittedArtifacts().size());
|
||||
assertTrue(Files.isRegularFile(projectRoot.resolve("build/assets.pa")));
|
||||
assertTrue(Files.isRegularFile(projectRoot.resolve("build/asset_table.json")));
|
||||
assertTrue(Files.isRegularFile(projectRoot.resolve("build/preload.json")));
|
||||
assertTrue(Files.isRegularFile(projectRoot.resolve("build/asset_table_metadata.json")));
|
||||
|
||||
final var assetTable = MAPPER.readTree(projectRoot.resolve("build/asset_table.json").toFile());
|
||||
assertEquals(1, assetTable.size());
|
||||
assertEquals(1, assetTable.get(0).path("asset_id").asInt());
|
||||
assertEquals("TILES", assetTable.get(0).path("bank_type").asText());
|
||||
assertEquals("NONE", assetTable.get(0).path("codec").asText());
|
||||
assertEquals(16, assetTable.get(0).path("metadata").path("tile_size").asInt());
|
||||
assertEquals(256, assetTable.get(0).path("metadata").path("width").asInt());
|
||||
assertEquals(256, assetTable.get(0).path("metadata").path("height").asInt());
|
||||
assertEquals(64, assetTable.get(0).path("metadata").path("palette_count").asInt());
|
||||
assertTrue(assetTable.get(0).path("metadata").path("pipeline").path("palettes").isArray());
|
||||
|
||||
final var preload = MAPPER.readTree(projectRoot.resolve("build/preload.json").toFile());
|
||||
assertEquals(1, preload.size());
|
||||
assertEquals(1, preload.get(0).path("asset_id").asInt());
|
||||
}
|
||||
|
||||
@Test
|
||||
void packValidationIncludesBlockingFileScopedTileDiagnostics() throws Exception {
|
||||
final Path projectRoot = copyFixture("workspaces/managed-basic", tempDir.resolve("pack-validation-file-diagnostics"));
|
||||
|
||||
@ -99,7 +99,9 @@ final class PackerAssetDetailsServiceTest {
|
||||
final PackerAssetDetailsService service = service();
|
||||
final var result = service.getAssetDetails(new GetAssetDetailsRequest(project(projectRoot), AssetReference.forAssetId(1)));
|
||||
|
||||
assertEquals(PackerOperationStatus.SUCCESS, result.status());
|
||||
assertEquals(PackerOperationStatus.PARTIAL, result.status());
|
||||
assertTrue(result.diagnostics().stream()
|
||||
.anyMatch(diagnostic -> diagnostic.message().contains("must declare at least one palette")));
|
||||
assertTrue(result.details().bankComposition().availableFiles().stream()
|
||||
.anyMatch(file -> file.path().equals("confirm.png")));
|
||||
assertTrue(result.details().bankComposition().selectedFiles().stream()
|
||||
@ -131,7 +133,9 @@ final class PackerAssetDetailsServiceTest {
|
||||
final PackerAssetDetailsService service = service();
|
||||
final var result = service.getAssetDetails(new GetAssetDetailsRequest(project(projectRoot), AssetReference.forAssetId(1)));
|
||||
|
||||
assertEquals(PackerOperationStatus.SUCCESS, result.status());
|
||||
assertEquals(PackerOperationStatus.PARTIAL, result.status());
|
||||
assertTrue(result.diagnostics().stream()
|
||||
.anyMatch(diagnostic -> diagnostic.message().contains("must declare at least one palette")));
|
||||
assertEquals(
|
||||
List.of("b.png", "a.png"),
|
||||
result.details().bankComposition().selectedFiles().stream().map(file -> file.path()).toList());
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user