Skip to content

Commit

Permalink
Add default attribute to jhdf files
Browse files Browse the repository at this point in the history
Fixup tests
  • Loading branch information
jamesmudd committed Aug 19, 2024
1 parent a642ec0 commit c1a20f6
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 6 deletions.
11 changes: 8 additions & 3 deletions jhdf/src/main/java/io/jhdf/WritableHdfFile.java
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ public class WritableHdfFile implements WritableGroup, AutoCloseable {
this.hdfFileChannel = new HdfFileChannel(this.fileChannel, this.superblock);

this.rootGroup = new WritableGroupImpl(null, "/");
this.rootGroup.putAttribute("_jHDF", getJHdfInfo());
}

@Override
Expand All @@ -75,7 +76,7 @@ private void flush() {
logger.info("Flushing to disk [{}]...", path.toAbsolutePath());
try {
rootGroup.write(hdfFileChannel, ROOT_GROUP_ADDRESS);
hdfFileChannel.write(getJHdfInfo());
hdfFileChannel.write(getJHdfInfoBuffer());
long endOfFile = hdfFileChannel.getFileChannel().size();
hdfFileChannel.write(superblock.toBuffer(endOfFile), 0L);
logger.info("Flushed to disk [{}] file is [{}] bytes", path.toAbsolutePath(), endOfFile);
Expand All @@ -84,11 +85,15 @@ private void flush() {
}
}

private ByteBuffer getJHdfInfo() {
final String info = "jHDF - " + JhdfInfo.VERSION + " - " + JhdfInfo.OS + " - " + JhdfInfo.ARCH + " - " + JhdfInfo.BYTE_ORDER;
private ByteBuffer getJHdfInfoBuffer() {
final String info = getJHdfInfo();
return ByteBuffer.wrap(info.getBytes(StandardCharsets.UTF_8));
}

private static String getJHdfInfo() {
return "jHDF - " + JhdfInfo.VERSION + " - " + JhdfInfo.OS + " - " + JhdfInfo.ARCH + " - " + JhdfInfo.BYTE_ORDER;
}

@Override
public WritiableDataset putDataset(String name, Object data) {
return rootGroup.putDataset(name, data);
Expand Down
8 changes: 5 additions & 3 deletions jhdf/src/test/java/io/jhdf/writing/AttributesWritingTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ void writeScalarAttributes() throws Exception {
// Now read it back
try (HdfFile hdfFile = new HdfFile(tempFile)) {
Map<String, Attribute> attributes = hdfFile.getAttributes();
assertThat(attributes).hasSize(6);
assertThat(attributes).hasSize(7);

// Just check thw whole file is readable
TestAllFilesBase.verifyAttributes(hdfFile);
Expand Down Expand Up @@ -215,7 +215,7 @@ void write1DAttributes() throws Exception {
// Now read it back
try (HdfFile hdfFile = new HdfFile(tempFile)) {
Map<String, Attribute> attributes = hdfFile.getAttributes();
assertThat(attributes).hasSize(12);
assertThat(attributes).hasSize(13);

// Just check thw whole file is readable
TestAllFilesBase.verifyAttributes(hdfFile);
Expand Down Expand Up @@ -360,7 +360,7 @@ void writeNDAttributes() throws Exception {
// Now read it back
try (HdfFile hdfFile = new HdfFile(tempFile)) {
Map<String, Attribute> attributes = hdfFile.getAttributes();
assertThat(attributes).hasSize(12);
assertThat(attributes).hasSize(13);

// Just check thw whole file is readable
TestAllFilesBase.verifyAttributes(hdfFile);
Expand Down Expand Up @@ -408,6 +408,8 @@ void testRemovingAttribute() throws IOException {
// Add attribute then remove it
writableHdfFile.putAttribute("testAttr", 111);
Attribute attr = writableHdfFile.removeAttribute("testAttr");
// Remove the default attribute
Attribute jHdfAttr = writableHdfFile.removeAttribute("_jHDF");

MatcherAssert.assertThat(attr, is(not(nullValue())));
MatcherAssert.assertThat(attr.getData(), is(equalTo(111)));
Expand Down

0 comments on commit c1a20f6

Please sign in to comment.