From 6956ea730722c30113cb780bdc55523a6de8e946 Mon Sep 17 00:00:00 2001 From: Esta Nagy Date: Wed, 19 Jun 2024 23:38:23 +0200 Subject: [PATCH] Improve archive index format (#269) - Defines V2 archive index properties - Implements parsers for both archive index versions - Uses the parsers instead of direct properties access - Adds new tests Resolves #248 {minor} Signed-off-by: Esta Nagy --- ...BarjCargoArchiveFileInputStreamSource.java | 69 ++++++++------ .../BarjCargoArchiverFileOutputStream.java | 17 ++-- .../filebarj/io/stream/BarjCargoUtil.java | 20 ----- .../filebarj/io/stream/IndexVersion.java | 56 ++++++++++++ .../io/stream/ReadOnlyArchiveIndex.java | 26 ++++++ .../io/stream/index/ArchiveIndexV1.java | 41 +++++++++ .../io/stream/index/ArchiveIndexV2.java | 70 +++++++++++++++ .../filebarj/io/stream/IndexVersionTest.java | 80 +++++++++++++++++ ...chiverFileOutputStreamIntegrationTest.java | 2 +- .../resources/example/index_v2.properties | 89 +++++++++++++++++++ 10 files changed, 415 insertions(+), 55 deletions(-) create mode 100644 file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/IndexVersion.java create mode 100644 file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/ReadOnlyArchiveIndex.java create mode 100644 file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV1.java create mode 100644 file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV2.java create mode 100644 file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/IndexVersionTest.java create mode 100644 file-barj-stream-io/src/test/resources/example/index_v2.properties diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveFileInputStreamSource.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveFileInputStreamSource.java index 9f4fff3..8c5b552 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveFileInputStreamSource.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveFileInputStreamSource.java @@ -28,7 +28,9 @@ import java.util.stream.IntStream; import java.util.stream.LongStream; -import static com.github.nagyesta.filebarj.io.stream.BarjCargoUtil.*; +import static com.github.nagyesta.filebarj.io.stream.BarjCargoUtil.toChunkFileName; +import static com.github.nagyesta.filebarj.io.stream.BarjCargoUtil.toIndexFileName; +import static com.github.nagyesta.filebarj.io.stream.ReadOnlyArchiveIndex.INDEX_VERSION; import static com.github.nagyesta.filebarj.io.stream.crypto.EncryptionUtil.newCipherInputStream; import static com.github.nagyesta.filebarj.io.stream.internal.ChunkingOutputStream.MEBIBYTE; import static org.apache.commons.io.FilenameUtils.normalizeNoEndSeparator; @@ -133,6 +135,7 @@ public BarjCargoArchiveEntryIterator getIteratorForScope( /** * Returns the matching entries in order of occurrence in the archive. + * * @param archiveEntriesInScope the entries in scope * @return the matching entries */ @@ -298,10 +301,10 @@ protected Properties readProperties( @NotNull protected List parseEntityIndexes( @NotNull final Properties properties) { - final var totalEntities = Long.parseLong(properties.getProperty(LAST_ENTITY_INDEX_PROPERTY)); - return LongStream.rangeClosed(1L, totalEntities) + final var index = parse(properties); + return LongStream.rangeClosed(1L, index.getTotalEntities()) .mapToObj(BarjCargoUtil::entryIndexPrefix) - .map(prefix -> BarjCargoEntityIndex.fromProperties(properties, prefix)) + .map(index::entity) .toList(); } @@ -316,10 +319,10 @@ protected List parseEntityIndexes( protected SortedMap generateFilePathMap( @NotNull final Properties properties, @NotNull final BarjCargoInputStreamConfiguration config) { - final var totalChunks = Integer.parseInt(properties.getProperty(LAST_CHUNK_INDEX_PROPERTY)); + final var index = parse(properties); final var map = new TreeMap(); - IntStream.rangeClosed(1, totalChunks) - .mapToObj(index -> toChunkFileName(config.getPrefix(), index)) + IntStream.rangeClosed(1, index.getNumberOfChunks()) + .mapToObj(i -> toChunkFileName(config.getPrefix(), i)) .map(p -> Path.of(config.getFolder().toAbsolutePath().toString(), p)) .map(Path::toAbsolutePath) .forEach(path -> map.put(path.getFileName().toString(), path)); @@ -336,35 +339,45 @@ protected SortedMap generateFilePathMap( protected void verifyFilesExistAndHaveExpectedSizes( @NotNull final Properties properties, @NotNull final SortedMap chunkPaths) throws ArchiveIntegrityException { - final var maxChunkSize = Long.parseLong(properties.getProperty(MAX_CHUNK_SIZE_PROPERTY)); - final var lastChunkSize = Long.parseLong(properties.getProperty(LAST_CHUNK_SIZE_PROPERTY)); - final var expectedTotalSize = Long.parseLong(properties.getProperty(TOTAL_SIZE_PROPERTY)); + final var index = parse(properties); var totalSize = 0L; final var iterator = chunkPaths.keySet().iterator(); while (iterator.hasNext()) { final var key = iterator.next(); final var path = chunkPaths.get(key); - final var file = path.toFile(); - if (!file.exists()) { - throw new ArchiveIntegrityException("Chunk file does not exist: " + path); - } - final long expectedSize; - if (iterator.hasNext()) { - expectedSize = maxChunkSize; - } else { - expectedSize = lastChunkSize; - } - final var fileSize = file.length(); - if (expectedSize != fileSize) { - throw new ArchiveIntegrityException("Chunk file size is wrong: " + path - + ", expected: " + expectedSize + " bytes, actual: " + fileSize + " bytes."); - } - totalSize += fileSize; + totalSize += verifiedFileSize(index, path, iterator.hasNext()); } - if (totalSize != expectedTotalSize) { + if (totalSize != index.getTotalSize()) { throw new ArchiveIntegrityException( - "Total size is wrong: " + totalSize + " bytes, expected: " + expectedTotalSize + " bytes."); + "Total size is wrong: " + totalSize + " bytes, expected: " + index.getTotalSize() + " bytes."); + } + } + + private static long verifiedFileSize( + final ReadOnlyArchiveIndex index, + final Path path, + final boolean isNotLast) { + final var file = path.toFile(); + if (!file.exists()) { + throw new ArchiveIntegrityException("Chunk file does not exist: " + path); + } + final long expectedSize; + if (isNotLast) { + expectedSize = index.getMaxChunkSizeInBytes(); + } else { + expectedSize = index.getLastChunkSizeInBytes(); } + final var fileSize = file.length(); + if (expectedSize != fileSize) { + throw new ArchiveIntegrityException("Chunk file size is wrong: " + path + + ", expected: " + expectedSize + " bytes, actual: " + fileSize + " bytes."); + } + return fileSize; + } + + private static ReadOnlyArchiveIndex parse(final Properties properties) { + return IndexVersion.forVersionString(properties.getProperty(INDEX_VERSION)) + .createIndex(properties); } private void validateEntityIndexes( diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiverFileOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiverFileOutputStream.java index 0aca8fe..a78f858 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiverFileOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiverFileOutputStream.java @@ -1,5 +1,6 @@ package com.github.nagyesta.filebarj.io.stream; +import com.github.nagyesta.filebarj.io.stream.index.ArchiveIndexV2; import com.github.nagyesta.filebarj.io.stream.internal.BaseBarjCargoArchiverFileOutputStream; import com.github.nagyesta.filebarj.io.stream.internal.model.BarjCargoEntityIndex; import lombok.extern.slf4j.Slf4j; @@ -11,7 +12,8 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Path; -import static com.github.nagyesta.filebarj.io.stream.BarjCargoUtil.*; +import static com.github.nagyesta.filebarj.io.stream.BarjCargoUtil.entryIndexPrefix; +import static com.github.nagyesta.filebarj.io.stream.BarjCargoUtil.toIndexFileName; import static com.github.nagyesta.filebarj.io.stream.crypto.EncryptionUtil.newCipherOutputStream; /** @@ -100,11 +102,14 @@ private void writeIndexFileHeader() throws IOException { private void writeIndexFileFooter() throws IOException { final var lastChunk = getCurrentFilePath(); - final var footer = LAST_CHUNK_INDEX_PROPERTY + COLON + getCurrentChunkIndex() + LINE_BREAK - + LAST_CHUNK_SIZE_PROPERTY + COLON + lastChunk.toFile().length() + LINE_BREAK - + MAX_CHUNK_SIZE_PROPERTY + COLON + getMaxChunkSizeBytes() + LINE_BREAK - + LAST_ENTITY_INDEX_PROPERTY + COLON + entryCount() + LINE_BREAK - + TOTAL_SIZE_PROPERTY + COLON + getTotalByteCount() + LINE_BREAK; + final var footer = ArchiveIndexV2.builder() + .numberOfChunks(getCurrentChunkIndex()) + .lastChunkSizeInBytes(lastChunk.toFile().length()) + .maxChunkSizeInBytes(getMaxChunkSizeBytes()) + .totalEntities(entryCount()) + .totalSize(getTotalByteCount()) + .build() + .footerAsString(); indexStreamWriter.write(footer); } } diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoUtil.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoUtil.java index 78a0866..bf3d179 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoUtil.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoUtil.java @@ -17,26 +17,6 @@ public final class BarjCargoUtil { * The file name suffix for index files. */ public static final String INDEX = ".index"; - /** - * The name of the property storing the last entity index. - */ - public static final String LAST_ENTITY_INDEX_PROPERTY = "last.entity.index"; - /** - * The name of the property storing the index of the last chunk. - */ - public static final String LAST_CHUNK_INDEX_PROPERTY = "last.cnunk.index"; - /** - * The name of the property storing the size of the last chunk. - */ - public static final String LAST_CHUNK_SIZE_PROPERTY = "last.cnunk.size"; - /** - * The name of the property storing the maximum chunk size. - */ - public static final String MAX_CHUNK_SIZE_PROPERTY = "max.cnunk.size"; - /** - * The name of the property storing the total size of the archive. - */ - public static final String TOTAL_SIZE_PROPERTY = "total.size"; /** * A colon character used for separating the key and value in properties. */ diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/IndexVersion.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/IndexVersion.java new file mode 100644 index 0000000..f174355 --- /dev/null +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/IndexVersion.java @@ -0,0 +1,56 @@ +package com.github.nagyesta.filebarj.io.stream; + +import com.github.nagyesta.filebarj.io.stream.index.ArchiveIndexV1; +import com.github.nagyesta.filebarj.io.stream.index.ArchiveIndexV2; +import lombok.Getter; +import org.jetbrains.annotations.NotNull; + +import java.util.Properties; + +/** + * The version of the File Barj index specification. + */ +@Getter +public enum IndexVersion { + /** + * The initial version of the File Barj index specification. + */ + V1("1") { + @Override + ReadOnlyArchiveIndex createIndex(@NotNull final Properties properties) { + return new ArchiveIndexV1(properties); + } + }, + /** + * The 2nd version of the File Barj index specification. + */ + V2("2") { + @Override + ReadOnlyArchiveIndex createIndex(@NotNull final Properties properties) { + return new ArchiveIndexV2(properties); + } + }; + + private final String version; + + IndexVersion(final String version) { + this.version = version; + } + + public static IndexVersion forVersionString(final String version) { + for (final var indexVersion : values()) { + if (indexVersion.version.equals(version)) { + return indexVersion; + } + } + return V1; + } + + /** + * Instantiates a read-only archive index from the given properties. + * + * @param properties the properties + * @return the read-only archive index + */ + abstract ReadOnlyArchiveIndex createIndex(@NotNull Properties properties); +} diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/ReadOnlyArchiveIndex.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/ReadOnlyArchiveIndex.java new file mode 100644 index 0000000..025f7e2 --- /dev/null +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/ReadOnlyArchiveIndex.java @@ -0,0 +1,26 @@ +package com.github.nagyesta.filebarj.io.stream; + +import com.github.nagyesta.filebarj.io.stream.internal.model.BarjCargoEntityIndex; + +/** + * A read-only representation of an archive index file. + */ +public interface ReadOnlyArchiveIndex { + + /** + * The name of the property that contains the version of the index specification. + */ + String INDEX_VERSION = "version"; + + int getNumberOfChunks(); + + long getMaxChunkSizeInBytes(); + + long getLastChunkSizeInBytes(); + + long getTotalSize(); + + long getTotalEntities(); + + BarjCargoEntityIndex entity(String prefix); +} diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV1.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV1.java new file mode 100644 index 0000000..db06326 --- /dev/null +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV1.java @@ -0,0 +1,41 @@ +package com.github.nagyesta.filebarj.io.stream.index; + +import com.github.nagyesta.filebarj.io.stream.IndexVersion; +import com.github.nagyesta.filebarj.io.stream.ReadOnlyArchiveIndex; +import com.github.nagyesta.filebarj.io.stream.internal.model.BarjCargoEntityIndex; +import lombok.Getter; +import org.jetbrains.annotations.NotNull; + +import java.util.Properties; + +@Getter +public class ArchiveIndexV1 implements ReadOnlyArchiveIndex { + + private static final String LAST_ENTITY_INDEX_PROPERTY = "last.entity.index"; + private static final String LAST_CHUNK_INDEX_PROPERTY = "last.cnunk.index"; + private static final String LAST_CHUNK_SIZE_PROPERTY = "last.cnunk.size"; + private static final String MAX_CHUNK_SIZE_PROPERTY = "max.cnunk.size"; + private static final String TOTAL_SIZE_PROPERTY = "total.size"; + private final Properties properties; + private final IndexVersion indexVersion; + private final long totalEntities; + private final int numberOfChunks; + private final long maxChunkSizeInBytes; + private final long lastChunkSizeInBytes; + private final long totalSize; + + public ArchiveIndexV1(@NotNull final Properties properties) { + this.properties = properties; + this.indexVersion = IndexVersion.forVersionString(properties.getProperty(INDEX_VERSION)); + this.totalEntities = Long.parseLong(properties.getProperty(LAST_ENTITY_INDEX_PROPERTY)); + this.numberOfChunks = Integer.parseInt(properties.getProperty(LAST_CHUNK_INDEX_PROPERTY)); + this.maxChunkSizeInBytes = Long.parseLong(properties.getProperty(MAX_CHUNK_SIZE_PROPERTY)); + this.lastChunkSizeInBytes = Long.parseLong(properties.getProperty(LAST_CHUNK_SIZE_PROPERTY)); + this.totalSize = Long.parseLong(properties.getProperty(TOTAL_SIZE_PROPERTY)); + } + + @Override + public BarjCargoEntityIndex entity(@NotNull final String prefix) { + return BarjCargoEntityIndex.fromProperties(properties, prefix); + } +} diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV2.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV2.java new file mode 100644 index 0000000..18725af --- /dev/null +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV2.java @@ -0,0 +1,70 @@ +package com.github.nagyesta.filebarj.io.stream.index; + +import com.github.nagyesta.filebarj.io.stream.IndexVersion; +import com.github.nagyesta.filebarj.io.stream.ReadOnlyArchiveIndex; +import com.github.nagyesta.filebarj.io.stream.internal.model.BarjCargoEntityIndex; +import lombok.Builder; +import lombok.Getter; +import org.jetbrains.annotations.NotNull; + +import java.util.Properties; + +import static com.github.nagyesta.filebarj.io.stream.BarjCargoUtil.COLON; +import static com.github.nagyesta.filebarj.io.stream.BarjCargoUtil.LINE_BREAK; + +@Getter +public class ArchiveIndexV2 implements ReadOnlyArchiveIndex { + + private static final String LAST_ENTITY_INDEX_PROPERTY = "last.entity.index"; + private static final String LAST_CHUNK_INDEX_PROPERTY = "last.chunk.index"; + private static final String LAST_CHUNK_SIZE_PROPERTY = "last.chunk.size"; + private static final String MAX_CHUNK_SIZE_PROPERTY = "max.chunk.size"; + private static final String TOTAL_SIZE_PROPERTY = "total.size"; + private final Properties properties; + private final IndexVersion indexVersion; + private final long totalEntities; + private final int numberOfChunks; + private final long maxChunkSizeInBytes; + private final long lastChunkSizeInBytes; + private final long totalSize; + + public ArchiveIndexV2(@NotNull final Properties properties) { + this.properties = properties; + this.indexVersion = IndexVersion.forVersionString(properties.getProperty(INDEX_VERSION)); + this.totalEntities = Long.parseLong(properties.getProperty(LAST_ENTITY_INDEX_PROPERTY)); + this.numberOfChunks = Integer.parseInt(properties.getProperty(LAST_CHUNK_INDEX_PROPERTY)); + this.maxChunkSizeInBytes = Long.parseLong(properties.getProperty(MAX_CHUNK_SIZE_PROPERTY)); + this.lastChunkSizeInBytes = Long.parseLong(properties.getProperty(LAST_CHUNK_SIZE_PROPERTY)); + this.totalSize = Long.parseLong(properties.getProperty(TOTAL_SIZE_PROPERTY)); + } + + @Builder + public ArchiveIndexV2( + final long totalSize, + final long lastChunkSizeInBytes, + final long maxChunkSizeInBytes, + final int numberOfChunks, + final long totalEntities) { + this.indexVersion = IndexVersion.V2; + this.properties = null; + this.totalSize = totalSize; + this.lastChunkSizeInBytes = lastChunkSizeInBytes; + this.maxChunkSizeInBytes = maxChunkSizeInBytes; + this.numberOfChunks = numberOfChunks; + this.totalEntities = totalEntities; + } + + @Override + public BarjCargoEntityIndex entity(@NotNull final String prefix) { + return BarjCargoEntityIndex.fromProperties(properties, prefix); + } + + public String footerAsString() { + return LAST_CHUNK_INDEX_PROPERTY + COLON + numberOfChunks + LINE_BREAK + + LAST_CHUNK_SIZE_PROPERTY + COLON + lastChunkSizeInBytes + LINE_BREAK + + MAX_CHUNK_SIZE_PROPERTY + COLON + maxChunkSizeInBytes + LINE_BREAK + + LAST_ENTITY_INDEX_PROPERTY + COLON + totalEntities + LINE_BREAK + + TOTAL_SIZE_PROPERTY + COLON + totalSize + LINE_BREAK + + INDEX_VERSION + COLON + indexVersion.getVersion() + LINE_BREAK; + } +} diff --git a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/IndexVersionTest.java b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/IndexVersionTest.java new file mode 100644 index 0000000..7cc41a6 --- /dev/null +++ b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/IndexVersionTest.java @@ -0,0 +1,80 @@ +package com.github.nagyesta.filebarj.io.stream; + +import com.github.nagyesta.filebarj.io.TempFileAwareTest; +import com.github.nagyesta.filebarj.io.stream.index.ArchiveIndexV1; +import com.github.nagyesta.filebarj.io.stream.index.ArchiveIndexV2; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Properties; +import java.util.stream.Stream; + +import static com.github.nagyesta.filebarj.io.stream.ReadOnlyArchiveIndex.INDEX_VERSION; +import static java.nio.file.StandardOpenOption.READ; + +class IndexVersionTest extends TempFileAwareTest { + + private static final int EXPECTED_CHUNKS = 1; + private static final int EXPECTED_MAX_CHUNK_SIZE = 1048576; + private static final int EXPECTED_SIZE = 174; + private static final int EXPECTED_ENTITIES = 4; + + public Stream indexVersionProvider() { + return Stream.builder() + .add(Arguments.of(null, IndexVersion.V1)) + .add(Arguments.of("1", IndexVersion.V1)) + .add(Arguments.of("2", IndexVersion.V2)) + .build(); + } + + public Stream indexFileProvider() { + return Stream.builder() + .add(Arguments.of("/example/index.properties", ArchiveIndexV1.class)) + .add(Arguments.of("/example/index_v2.properties", ArchiveIndexV2.class)) + .build(); + } + + @ParameterizedTest + @MethodSource("indexVersionProvider") + void testForVersionStringShouldReturnExpectedVersionWhenCalledWithValidInput( + final String input, + final IndexVersion expected) { + //given + + //when + final var actual = IndexVersion.forVersionString(input); + + //then + Assertions.assertEquals(expected, actual); + } + + @SuppressWarnings("DataFlowIssue") + @ParameterizedTest + @MethodSource("indexFileProvider") + void testCreateIndexShouldParsePropertiesFilesUsingTheTypeRepresentingTheVersionWhenCalled( + final String input, + final Class expectedType) throws URISyntaxException, IOException { + //given + final var path = Path.of(getClass().getResource(input).toURI()); + final var properties = new Properties(); + properties.load(Files.newInputStream(path, READ)); + final var underTest = IndexVersion.forVersionString(properties.getProperty(INDEX_VERSION)); + + //when + final var actual = underTest.createIndex(properties); + + //then + Assertions.assertEquals(expectedType, actual.getClass()); + Assertions.assertEquals(EXPECTED_CHUNKS, actual.getNumberOfChunks()); + Assertions.assertEquals(EXPECTED_MAX_CHUNK_SIZE, actual.getMaxChunkSizeInBytes()); + Assertions.assertEquals(EXPECTED_SIZE, actual.getLastChunkSizeInBytes()); + Assertions.assertEquals(EXPECTED_SIZE, actual.getTotalSize()); + Assertions.assertEquals(EXPECTED_ENTITIES, actual.getTotalEntities()); + } +} diff --git a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/BarjCargoArchiverFileOutputStreamIntegrationTest.java b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/BarjCargoArchiverFileOutputStreamIntegrationTest.java index 00aff62..f8aac2e 100644 --- a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/BarjCargoArchiverFileOutputStreamIntegrationTest.java +++ b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/BarjCargoArchiverFileOutputStreamIntegrationTest.java @@ -709,7 +709,7 @@ void testWritingExampleFileShouldProduceExpectedOutput() throws IOException, URI //then final var actualIndexLines = Files.readAllLines(stream.getIndexFileWritten()); final var expectedIndexLines = Files.readAllLines( - Path.of(getClass().getResource("/example/index.properties").toURI())); + Path.of(getClass().getResource("/example/index_v2.properties").toURI())); Assertions.assertIterableEquals(expectedIndexLines, actualIndexLines); final var actualCargoLines = Files.readAllLines(stream.getDataFilesWritten().get(0)); final var expectedCargoLines = Files.readAllLines( diff --git a/file-barj-stream-io/src/test/resources/example/index_v2.properties b/file-barj-stream-io/src/test/resources/example/index_v2.properties new file mode 100644 index 0000000..c99f99e --- /dev/null +++ b/file-barj-stream-io/src/test/resources/example/index_v2.properties @@ -0,0 +1,89 @@ +# File BaRJ Cargo Archive Index +00000001.path:/dir +00000001.type:DIRECTORY +00000001.encrypt:false +00000001.metadata.rel.start.idx:0 +00000001.metadata.rel.start.file:barj.00001.cargo +00000001.metadata.rel.end.idx:26 +00000001.metadata.rel.end.file:barj.00001.cargo +00000001.metadata.abs.start.idx:0 +00000001.metadata.abs.end.idx:26 +00000001.metadata.orig.size:26 +00000001.metadata.orig.hash:b5ba71cbf60c724f2ac86f1f818c60afae9d70fc29919614f05163c8748ff289 +00000001.metadata.arch.size:26 +00000001.metadata.arch.hash:b5ba71cbf60c724f2ac86f1f818c60afae9d70fc29919614f05163c8748ff289 +00000002.path:/dir/file1.ext +00000002.type:REGULAR_FILE +00000002.encrypt:false +00000002.content.rel.start.idx:26 +00000002.content.rel.start.file:barj.00001.cargo +00000002.content.rel.end.idx:39 +00000002.content.rel.end.file:barj.00001.cargo +00000002.content.abs.start.idx:26 +00000002.content.abs.end.idx:39 +00000002.content.orig.size:13 +00000002.content.orig.hash:1705789d380ee110bc09231df8af42a0cc564a1510ebd2168516d4985c40a263 +00000002.content.arch.size:13 +00000002.content.arch.hash:1705789d380ee110bc09231df8af42a0cc564a1510ebd2168516d4985c40a263 +00000002.metadata.rel.start.idx:39 +00000002.metadata.rel.start.file:barj.00001.cargo +00000002.metadata.rel.end.idx:75 +00000002.metadata.rel.end.file:barj.00001.cargo +00000002.metadata.abs.start.idx:39 +00000002.metadata.abs.end.idx:75 +00000002.metadata.orig.size:36 +00000002.metadata.orig.hash:8467fa2fb7ed6ac909285591309c882f7106ebde3c4d44d7342d11e303281810 +00000002.metadata.arch.size:36 +00000002.metadata.arch.hash:8467fa2fb7ed6ac909285591309c882f7106ebde3c4d44d7342d11e303281810 +00000003.path:/dir/file2.ext +00000003.type:SYMBOLIC_LINK +00000003.encrypt:false +00000003.content.rel.start.idx:75 +00000003.content.rel.start.file:barj.00001.cargo +00000003.content.rel.end.idx:89 +00000003.content.rel.end.file:barj.00001.cargo +00000003.content.abs.start.idx:75 +00000003.content.abs.end.idx:89 +00000003.content.orig.size:14 +00000003.content.orig.hash:8267a191a3ec453d93ea3db85d35f6f32ea74df7a192000a6df9ffb87990b36c +00000003.content.arch.size:14 +00000003.content.arch.hash:8267a191a3ec453d93ea3db85d35f6f32ea74df7a192000a6df9ffb87990b36c +00000003.metadata.rel.start.idx:89 +00000003.metadata.rel.start.file:barj.00001.cargo +00000003.metadata.rel.end.idx:125 +00000003.metadata.rel.end.file:barj.00001.cargo +00000003.metadata.abs.start.idx:89 +00000003.metadata.abs.end.idx:125 +00000003.metadata.orig.size:36 +00000003.metadata.orig.hash:9cd0d00d3de91c86b6dbcc9f4aa47c08127c55db6c93947f3be745a374f597dd +00000003.metadata.arch.size:36 +00000003.metadata.arch.hash:9cd0d00d3de91c86b6dbcc9f4aa47c08127c55db6c93947f3be745a374f597dd +00000004.path:/dir/file3.ext +00000004.type:REGULAR_FILE +00000004.encrypt:false +00000004.content.rel.start.idx:125 +00000004.content.rel.start.file:barj.00001.cargo +00000004.content.rel.end.idx:138 +00000004.content.rel.end.file:barj.00001.cargo +00000004.content.abs.start.idx:125 +00000004.content.abs.end.idx:138 +00000004.content.orig.size:13 +00000004.content.orig.hash:ea290f34cbd4a8143959aa8f89bb4a37b4de278d456b5e930320c50b30cea636 +00000004.content.arch.size:13 +00000004.content.arch.hash:ea290f34cbd4a8143959aa8f89bb4a37b4de278d456b5e930320c50b30cea636 +00000004.metadata.rel.start.idx:138 +00000004.metadata.rel.start.file:barj.00001.cargo +00000004.metadata.rel.end.idx:174 +00000004.metadata.rel.end.file:barj.00001.cargo +00000004.metadata.abs.start.idx:138 +00000004.metadata.abs.end.idx:174 +00000004.metadata.orig.size:36 +00000004.metadata.orig.hash:bc44ce95ea353244949dc3da703a68f0c49c3860f5652261033db22d194e6b55 +00000004.metadata.arch.size:36 +00000004.metadata.arch.hash:bc44ce95ea353244949dc3da703a68f0c49c3860f5652261033db22d194e6b55 +last.chunk.index:1 +last.chunk.size:174 +max.chunk.size:1048576 +last.entity.index:4 +total.size:174 +version:2