Skip to content

Commit d0b90bb

Browse files
committed
refactor v2 code
1 parent a74e929 commit d0b90bb

2 files changed

Lines changed: 25 additions & 20 deletions

File tree

src/main/java/org/hdf5javalib/hdffile/infrastructure/fractalheap/FractalHeap.java

Lines changed: 21 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
import java.util.List;
1717
import java.util.Objects;
1818

19+
import static org.hdf5javalib.datatype.FixedPointDatatype.BIT_MULTIPLIER;
20+
1921
public class FractalHeap {
2022
private FractalHeapHeader header;
2123
private Block rootBlock;
@@ -59,15 +61,15 @@ private static FractalHeapHeader readHeader(SeekableByteChannel channel, HdfData
5961
FixedPointDatatype sizeOfLength = hdfDataFile.getSuperblock().getFixedPointDatatypeForLength();
6062

6163
FractalHeapHeader h = new FractalHeapHeader();
62-
ByteBuffer signatureBuffer = ByteBuffer.allocate(4);
64+
ByteBuffer signatureBuffer = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN);
6365
channel.read(signatureBuffer);
6466
signatureBuffer.flip();
6567
h.signature = new String(signatureBuffer.array(), StandardCharsets.US_ASCII);
6668
if (!Objects.equals(h.signature, "FRHP")) {
6769
throw new IOException("Invalid signature");
6870
}
6971
int headerSize = getTotalBytesRead(sizeOfLength.getSize(), sizeOfLength.getSize());
70-
ByteBuffer headerBuffer = ByteBuffer.allocate(headerSize);
72+
ByteBuffer headerBuffer = ByteBuffer.allocate(headerSize).order(ByteOrder.LITTLE_ENDIAN);
7173
int bytesRead = channel.read(headerBuffer);
7274
if ( bytesRead != headerSize) {
7375
throw new IllegalStateException("Incorrect amount of bytes read: " + headerSize + " wanted but got " + bytesRead);
@@ -103,7 +105,7 @@ private static FractalHeapHeader readHeader(SeekableByteChannel channel, HdfData
103105
h.checksumDirect = (h.flags & 0x02) != 0;
104106
if (h.hasFilters && h.currentNumRowsRootIndirectBlock == 0) {
105107
headerSize = sizeOfLength.getSize() + 4;
106-
headerBuffer = ByteBuffer.allocate(headerSize);
108+
headerBuffer = ByteBuffer.allocate(headerSize).order(ByteOrder.LITTLE_ENDIAN);
107109
bytesRead = channel.read(headerBuffer);
108110
if ( bytesRead != headerSize) {
109111
throw new IllegalStateException("Incorrect amount of bytes read: " + headerSize + " wanted but got " + bytesRead);
@@ -113,25 +115,28 @@ private static FractalHeapHeader readHeader(SeekableByteChannel channel, HdfData
113115
h.filterMaskRoot = Integer.toUnsignedLong(headerBuffer.getInt());
114116
}
115117
if (h.hasFilters) {
116-
headerBuffer = ByteBuffer.allocate(h.ioFiltersEncodedLength);
118+
headerBuffer = ByteBuffer.allocate(h.ioFiltersEncodedLength).order(ByteOrder.LITTLE_ENDIAN);
117119
bytesRead = channel.read(headerBuffer);
118120
if ( bytesRead != headerSize) {
119121
throw new IllegalStateException("Incorrect amount of bytes read: " + headerSize + " wanted but got " + bytesRead);
120122
}
121123
headerBuffer.flip();
122-
channel.read(headerBuffer);
123124
byte[] filterData = headerBuffer.array();
124125
h.filterPipeline = parseFilterPipeline(filterData);
125126
}
126127
headerSize = 4;
127-
headerBuffer = ByteBuffer.allocate(headerSize);
128+
headerBuffer = ByteBuffer.allocate(headerSize).order(ByteOrder.LITTLE_ENDIAN);
128129
bytesRead = channel.read(headerBuffer);
129130
if ( bytesRead != headerSize) {
130131
throw new IllegalStateException("Incorrect amount of bytes read: " + headerSize + " wanted but got " + bytesRead);
131132
}
132133
headerBuffer.flip();
133134
h.checksum = Integer.toUnsignedLong(headerBuffer.getInt());
134-
h.offsetBytes = (h.maximumHeapSize + 7) / 8;
135+
int offsetBytesSize = (h.maximumHeapSize + 7) / 8;
136+
h.offsetBytes = new FixedPointDatatype(
137+
FixedPointDatatype.createClassAndVersion(),
138+
FixedPointDatatype.createClassBitField(false, false, false, false),
139+
offsetBytesSize, 0, BIT_MULTIPLIER * offsetBytesSize, hdfDataFile);
135140
double logVal = Math.log((double) h.maximumDirectBlockSize.getInstance(Long.class) / h.startingBlockSize.getInstance(Long.class)) / Math.log(2);
136141
h.maxDblockRows = (int) Math.floor(logVal) + 1;
137142
return h;
@@ -245,7 +250,7 @@ private static Block readDirectBlock(SeekableByteChannel channel, FractalHeapHea
245250
}
246251
channel.position(address.getInstance(Long.class));
247252
//
248-
int headerSize = 4 + 1 + sizeOfOffset.getSize() + header.offsetBytes + (header.checksumDirect ? 4 : 0);
253+
int headerSize = 4 + 1 + sizeOfOffset.getSize() + header.offsetBytes.getSize() + (header.checksumDirect ? 4 : 0);
249254
ByteBuffer headerBuffer = ByteBuffer.allocate(headerSize).order(ByteOrder.LITTLE_ENDIAN);
250255
int bytesRead = channel.read(headerBuffer);
251256
if ( headerSize != bytesRead)
@@ -264,11 +269,11 @@ private static Block readDirectBlock(SeekableByteChannel channel, FractalHeapHea
264269
if (heapHeader.isUndefined()) {
265270
throw new IOException("Invalid heap header address in direct block");
266271
}
267-
HdfFixedPoint blockOffset = HdfReadUtils.readHdfFixedPointFromBuffer(sizeOfOffset, headerBuffer);
272+
HdfFixedPoint blockOffset = HdfReadUtils.readHdfFixedPointFromBuffer(header.offsetBytes, headerBuffer);
268273
if (blockOffset.getInstance(Long.class) != expectedBlockOffset) {
269274
throw new IOException("Block offset mismatch");
270275
}
271-
headerBuffer.position(4 + 1 + sizeOfOffset.getSize()+header.offsetBytes);
276+
headerBuffer.position(4 + 1 + sizeOfOffset.getSize()+header.offsetBytes.getSize());
272277
long checksum = 0;
273278
if (header.checksumDirect) {
274279
checksum = Integer.toUnsignedLong(headerBuffer.getInt()); // checksum, not verifying
@@ -283,7 +288,7 @@ private static Block readDirectBlock(SeekableByteChannel channel, FractalHeapHea
283288
if (dataSize < 0) {
284289
throw new IOException("Invalid data size in direct block");
285290
}
286-
headerBuffer = ByteBuffer.allocate((int) dataSize);
291+
headerBuffer = ByteBuffer.allocate((int) dataSize).order(ByteOrder.LITTLE_ENDIAN);
287292
bytesRead = channel.read(headerBuffer);
288293
if ( bytesRead != dataSize)
289294
throw new IllegalStateException();
@@ -335,8 +340,8 @@ private static Block readIndirectBlock(SeekableByteChannel channel, FractalHeapH
335340
}
336341
plusSize += 4; // checksum
337342

338-
int headerSize = 4 + 1 + sizeOfOffset.getSize() + header.offsetBytes + plusSize;
339-
ByteBuffer headerBuffer = ByteBuffer.allocate(headerSize);
343+
int headerSize = 4 + 1 + sizeOfOffset.getSize() + header.offsetBytes.getSize() + plusSize;
344+
ByteBuffer headerBuffer = ByteBuffer.allocate(headerSize).order(ByteOrder.LITTLE_ENDIAN);
340345
int bytesRead = channel.read(headerBuffer);
341346
if ( headerSize != bytesRead)
342347
throw new IllegalStateException("Incorrect bytesRead: " + bytesRead + ": expected " + headerSize);
@@ -355,11 +360,11 @@ private static Block readIndirectBlock(SeekableByteChannel channel, FractalHeapH
355360
if (heapHeader.isUndefined()) {
356361
throw new IOException("Invalid heap header address in indirect block");
357362
}
358-
HdfFixedPoint blockOffset = HdfReadUtils.readHdfFixedPointFromBuffer(sizeOfOffset, headerBuffer);
363+
HdfFixedPoint blockOffset = HdfReadUtils.readHdfFixedPointFromBuffer(header.offsetBytes, headerBuffer);
359364
if (blockOffset.getInstance(Long.class) != expectedBlockOffset) {
360365
throw new IOException("Block offset mismatch");
361366
}
362-
headerBuffer.position(4 + 1 + sizeOfOffset.getSize() + header.offsetBytes);
367+
headerBuffer.position(4 + 1 + sizeOfOffset.getSize() + header.offsetBytes.getSize());
363368
ib.blockOffset = blockOffset.getInstance(Long.class);
364369
ib.children = new ArrayList<>();
365370
List<ChildInfo> childInfos = new ArrayList<>();
@@ -461,7 +466,7 @@ public static class FractalHeapHeader {
461466
long filterMaskRoot;
462467
FilterPipeline filterPipeline;
463468
long checksum;
464-
int offsetBytes;
469+
FixedPointDatatype offsetBytes;
465470
int maxDblockRows;
466471
boolean hasFilters;
467472
boolean checksumDirect;

src/main/java/org/hdf5javalib/hdfjava/HdfFileReader.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -503,11 +503,11 @@ public static HdfSuperblock readSuperblockFromSeekableByteChannel(SeekableByteCh
503503
fixedPointDatatypeForOffset = new FixedPointDatatype(
504504
FixedPointDatatype.createClassAndVersion(),
505505
FixedPointDatatype.createClassBitField(false, false, false, false),
506-
offsetSize, (short) 0, (short) (BIT_MULTIPLIER * offsetSize), hdfDataFile);
506+
offsetSize, 0, BIT_MULTIPLIER * offsetSize, hdfDataFile);
507507
fixedPointDatatypeForLength = new FixedPointDatatype(
508508
FixedPointDatatype.createClassAndVersion(),
509509
FixedPointDatatype.createClassBitField(false, false, false, false),
510-
lengthSize, (short) 0, (short) (BIT_MULTIPLIER * lengthSize), hdfDataFile);
510+
lengthSize, 0, BIT_MULTIPLIER * lengthSize, hdfDataFile);
511511

512512
groupLeafNodeK = Short.toUnsignedInt(buffer.getShort());
513513
groupInternalNodeK = Short.toUnsignedInt(buffer.getShort());
@@ -527,11 +527,11 @@ public static HdfSuperblock readSuperblockFromSeekableByteChannel(SeekableByteCh
527527
fixedPointDatatypeForOffset = new FixedPointDatatype(
528528
FixedPointDatatype.createClassAndVersion(),
529529
FixedPointDatatype.createClassBitField(false, false, false, false),
530-
offsetSize, (short) 0, (short) (BIT_MULTIPLIER * offsetSize), hdfDataFile);
530+
offsetSize, 0, BIT_MULTIPLIER * offsetSize, hdfDataFile);
531531
fixedPointDatatypeForLength = new FixedPointDatatype(
532532
FixedPointDatatype.createClassAndVersion(),
533533
FixedPointDatatype.createClassBitField(false, false, false, false),
534-
lengthSize, (short) 0, (short) (BIT_MULTIPLIER * lengthSize), hdfDataFile);
534+
lengthSize,0, BIT_MULTIPLIER * lengthSize, hdfDataFile);
535535

536536
// groupLeafNodeK = Short.toUnsignedInt(buffer.getShort());
537537
// groupInternalNodeK = Short.toUnsignedInt(buffer.getShort());

0 commit comments

Comments
 (0)