Skip to content

Commit

Permalink
Merge pull request #642 from jamesmudd/issue-641
Browse files Browse the repository at this point in the history
Fix for Issue 641
  • Loading branch information
jamesmudd authored Oct 28, 2024
2 parents 68c03d7 + 26c6216 commit cebec09
Show file tree
Hide file tree
Showing 2 changed files with 70 additions and 4 deletions.
11 changes: 8 additions & 3 deletions jhdf/src/main/java/io/jhdf/object/datatype/StringData.java
Original file line number Diff line number Diff line change
Expand Up @@ -252,9 +252,14 @@ private void encodeDataInternal(Object data, int[] dims, ByteBuffer buffer) {
encodeDataInternal(newArray, stripLeadingIndex(dims), buffer);
}
} else {
for (String str : (String[]) data) {
buffer.put(this.charset.encode(str)).put(NULL);
}
final int offset = buffer.position();
String[] strings = (String[]) data;
for (int i = 0; i < strings.length; i++) {
String str = strings[i];
buffer.put(this.charset.encode(str))
.put(NULL)
.position(offset + (i + 1) * getSize());
}
}
}

Expand Down
63 changes: 62 additions & 1 deletion jhdf/src/test/java/io/jhdf/writing/StringWritingTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,14 @@
import io.jhdf.HdfFile;
import io.jhdf.TestUtils;
import io.jhdf.WritableHdfFile;
import io.jhdf.api.Dataset;
import io.jhdf.api.Node;
import io.jhdf.api.WritiableDataset;
import io.jhdf.examples.TestAllFilesBase;
import io.jhdf.h5dump.EnabledIfH5DumpAvailable;
import io.jhdf.h5dump.H5Dump;
import io.jhdf.h5dump.HDF5FileXml;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Order;
Expand Down Expand Up @@ -96,7 +98,8 @@ void writeStrings() throws Exception {
{"element 2,1", "element 2,2", "element 2,3", "element 2,4", "element 2,5"}
});

writableHdfFile.putDataset("prose", StringUtils.split(prose));
WritiableDataset proseDataset = writableHdfFile.putDataset("prose", StringUtils.split(prose));
proseDataset.putAttribute("prose_attr", StringUtils.split(prose));

// Actually flush and write everything
writableHdfFile.close();
Expand Down Expand Up @@ -131,4 +134,62 @@ void readStringDatasetsWithH5Dump() throws Exception {
H5Dump.assetXmlAndHdfFileMatch(hdf5FileXml, hdfFile);
}
}

@Test
@Order(3)
// https://github.com/jamesmudd/jhdf/issues/641
void writeVarStringAttributes() throws Exception {
Path tempFile = Files.createTempFile(this.getClass().getSimpleName(), ".hdf5");
WritableHdfFile writableHdfFile = HdfFile.write(tempFile);

// Write a dataset with string attributes
WritiableDataset writiableDataset = writableHdfFile.putDataset("dataset", new String[] {"vv", "xx", "abcdef"});
writiableDataset.putAttribute("labels", new String[] {"vv", "xx", "abcdef"});
writiableDataset.putAttribute("units", new String[] {"", "1", "mm2"});
writableHdfFile.close();

// Now read it back
try (HdfFile hdfFile = new HdfFile(tempFile)) {
Dataset dataset = hdfFile.getDatasetByPath("dataset");
assertThat(dataset.getData()).isEqualTo(new String[] {"vv", "xx", "abcdef"});

// Expected :["vv", "xx", "abcdef"]
// Actual :["vv", "cdedf", ""]
assertThat(dataset.getAttribute("labels").getData()).isEqualTo(new String[] {"vv", "xx", "abcdef"});

// Expected :["", "1", "mm2"]
// Actual :["", "m2", ""]
assertThat(dataset.getAttribute("units").getData()).isEqualTo(new String[] {"", "1", "mm2"});
} finally {
tempFile.toFile().delete();
}
}

@Test()
@Order(4)
void writeReallyLongStrings() throws Exception {
Path tempFile = Files.createTempFile(this.getClass().getSimpleName(), ".hdf5");
WritableHdfFile writableHdfFile = HdfFile.write(tempFile);

// Write a dataset with string attributes
String[] randomLongStringData = {
RandomStringUtils.insecure().nextAlphanumeric(234, 456),
RandomStringUtils.insecure().nextAlphanumeric(234, 456),
RandomStringUtils.insecure().nextAlphanumeric(234, 456),
RandomStringUtils.insecure().nextAlphanumeric(234, 456),
RandomStringUtils.insecure().nextAlphanumeric(234, 456),
};
WritiableDataset writiableDataset = writableHdfFile.putDataset("dataset", randomLongStringData);
writiableDataset.putAttribute("attr", randomLongStringData);
writableHdfFile.close();

// Now read it back
try (HdfFile hdfFile = new HdfFile(tempFile)) {
Dataset dataset = hdfFile.getDatasetByPath("dataset");
assertThat(dataset.getData()).isEqualTo(randomLongStringData);
assertThat(dataset.getAttribute("attr").getData()).isEqualTo(randomLongStringData);
} finally {
tempFile.toFile().delete();
}
}
}

0 comments on commit cebec09

Please sign in to comment.