Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions jhdf/src/main/java/io/jhdf/WritableDatasetImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,11 @@ public Object getDataFlat() {
return flatten(data);
}

@Override
public Object getDataFlat(long[] sliceOffset, int[] sliceDimensions) {
throw new HdfWritingException("Slicing a writable dataset not supported");
}

@Override
public Object getData(long[] sliceOffset, int[] sliceDimensions) {
throw new HdfWritingException("Slicing a writable dataset not supported");
Expand Down
16 changes: 16 additions & 0 deletions jhdf/src/main/java/io/jhdf/api/Dataset.java
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,22 @@ public interface Dataset extends Node {
*/
Object getDataFlat();

/**
* Gets a slice of data from the HDF5 dataset and converts it to a Java object, it differs from
* {@link #getData(long[], int[])} as the data will be flat (i.e. 1D). Can offer improved performance over
* {@link #getData(long[], int[])} for multiple dimension arrays.
* <p>
* The type of the returned array will be that returned by {@link #getJavaType()}. The size of the array returned
* will be the product of the slice dimensions.
*
* @param sliceOffset the position in the dataset the slice starts. Must have length equal to number of dimensions
* @param sliceDimensions the dimensions of the slice to return. Must have length equal to number of dimensions
* @return the slice of data from the dataset as a 1D array
* @throws InvalidSliceHdfException if the <code>sliceOffset</code> or <code>sliceDimensions</code> are invalid
* @throws HdfException if the dataset can't be sliced
*/
Object getDataFlat(long[] sliceOffset, int[] sliceDimensions);

/**
* Gets a slice of data from the HDF5 dataset and converts it to a Java object.
* <p>
Expand Down
22 changes: 22 additions & 0 deletions jhdf/src/main/java/io/jhdf/dataset/DatasetBase.java
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,28 @@ public Object getDataFlat() {
return data;
}

@Override
public Object getDataFlat(long[] sliceOffset, int[] sliceDimensions) {
if (isEmpty()) {
throw new HdfException("Cannot slice empty dataset");
}
if (isScalar()) {
throw new HdfException("Cannot slice scalar dataset");
}

validateSliceRequest(sliceOffset, sliceDimensions);

logger.debug("Getting flat data slice offset={} dimensions={} for [{}]'...", sliceOffset, sliceDimensions, getPath());
ByteBuffer sliceDataBuffer = getSliceDataBuffer(sliceOffset, sliceDimensions);

long elements = 1;
for (int dimension : sliceDimensions) {
elements = Math.multiplyExact(elements, dimension);
}

return DatasetReader.readDataset(getDataType(), sliceDataBuffer, Math.toIntExact(elements), hdfBackingStorage);
}

@Override
public Object getData(long[] sliceOffset, int[] sliceDimensions) {
if (isEmpty()) {
Expand Down
18 changes: 18 additions & 0 deletions jhdf/src/test/java/io/jhdf/dataset/SlicingTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,14 @@
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;

import static io.jhdf.Utils.flatten;
import static io.jhdf.Utils.getDimensions;
import static org.apache.commons.lang3.ArrayUtils.subarray;
import static org.apache.commons.lang3.ArrayUtils.toObject;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;

class SlicingTest {
Expand Down Expand Up @@ -66,6 +68,20 @@ void test3DSliceIntDataset() {
assertThat(toObject(slicedData[1][4]), is(subarray(fullData[1][4], 0, 10)));
}

@Test
void test3DSliceIntDatasetFlat() {
Dataset dataset = hdfFile.getDatasetByPath("/nD_Datasets/3D_int32");

long[] sliceOffset = new long[]{0, 2, 30};
int[] sliceDimensions = new int[]{1, 3, 15};

Object slicedData = dataset.getData(sliceOffset, sliceDimensions);
Object[] expectedFlat = flatten(slicedData);

int[] slicedFlat = (int[]) dataset.getDataFlat(sliceOffset, sliceDimensions);
assertArrayEquals(expectedFlat, toObject(slicedFlat));
}

@Test
void test3DSliceFloatDataset() {
Dataset dataset = hdfFile.getDatasetByPath("/nD_Datasets/3D_float32");
Expand Down Expand Up @@ -126,11 +142,13 @@ void testInvalidSliceRequests() {
void testEmptyDatasetThrows() {
Dataset emptyDataset = scalarEmptyDatasetsHdfFile.getDatasetByPath("empty_uint_32");
assertThrows(HdfException.class, () -> emptyDataset.getData(new long[]{3}, new int[]{3}));
assertThrows(HdfException.class, () -> emptyDataset.getDataFlat(new long[]{3}, new int[]{3}));
}

@Test
void testScalarDatasetThrows() {
Dataset emptyDataset = scalarEmptyDatasetsHdfFile.getDatasetByPath("scalar_float_64");
assertThrows(HdfException.class, () -> emptyDataset.getData(new long[]{3}, new int[]{3}));
assertThrows(HdfException.class, () -> emptyDataset.getDataFlat(new long[]{3}, new int[]{3}));
}
}
Loading