@@ -156,7 +156,7 @@ private void readV1Structure(long rootObjectHeaderAddr, HdfSymbolTableEntry root
156156 long bTreeAddress = ((HdfSymbolTableEntryCacheWithScratch ) rootGroupSTE .getCache ()).getbTreeAddress ().getInstance (Long .class );
157157
158158 HdfLocalHeap localHeap = readLocalHeapFromSeekableByteChannel (fileChannel , heapOffset , this );
159- HdfBTreeV1 groupBTree = readBTreeFromSeekableByteChannel (fileChannel , bTreeAddress , this );
159+ HdfBTreeV1 groupBTree = readBTreeFromSeekableByteChannelForGroups (fileChannel , bTreeAddress , this );
160160
161161 String rootGroupName = localHeap .stringAtOffset (rootGroupSTE .getLinkNameOffset ());
162162 HdfGroup rootGroup = new HdfGroup (rootGroupName , rootObjectHeader , null , null );
@@ -206,7 +206,7 @@ private void readV1GroupHierarchy(HdfGroup parentGroup, HdfLocalHeap localHeap,
206206 long newHeapOffset = ((HdfSymbolTableEntryCacheWithScratch ) symbolTableEntry .getCache ()).getLocalHeapAddress ().getInstance (Long .class );
207207 long newBTreeAddress = ((HdfSymbolTableEntryCacheWithScratch ) symbolTableEntry .getCache ()).getbTreeAddress ().getInstance (Long .class );
208208 HdfLocalHeap newLocalHeap = readLocalHeapFromSeekableByteChannel (fileChannel , newHeapOffset , this );
209- HdfBTreeV1 newGroupBTree = readBTreeFromSeekableByteChannel (fileChannel , newBTreeAddress , this );
209+ HdfBTreeV1 newGroupBTree = readBTreeFromSeekableByteChannelForGroups (fileChannel , newBTreeAddress , this );
210210 readV1GroupHierarchy (groupObject , newLocalHeap , newGroupBTree );
211211 }
212212 break ;
@@ -576,12 +576,12 @@ public static HdfSuperblock readSuperblockFromSeekableByteChannel(SeekableByteCh
576576 * @return the constructed HdfTree instance
577577 * @throws IOException if an I/O error occurs or the B-Tree data is invalid
578578 */
579- public static HdfBTreeV1 readBTreeFromSeekableByteChannel (
579+ public static HdfBTreeV1 readBTreeFromSeekableByteChannelForGroups (
580580 SeekableByteChannel fileChannel ,
581581 long btreeAddress ,
582582 HdfDataFile hdfDataFile
583583 ) throws Exception {
584- return readFromSeekableByteChannelRecursive (fileChannel , btreeAddress , hdfDataFile , new LinkedHashMap <>());
584+ return readFromSeekableByteChannelRecursiveForGroups (fileChannel , btreeAddress , hdfDataFile , new LinkedHashMap <>());
585585 }
586586
587587 /**
@@ -594,10 +594,10 @@ public static HdfBTreeV1 readBTreeFromSeekableByteChannel(
594594 * @return the constructed HdfTree instance
595595 * @throws IOException if an I/O error occurs or the B-Tree data is invalid
596596 */
597- private static HdfBTreeV1 readFromSeekableByteChannelRecursive (SeekableByteChannel fileChannel ,
598- long nodeAddress ,
599- HdfDataFile hdfDataFile ,
600- Map <Long , HdfBTreeV1 > visitedNodes
597+ private static HdfBTreeV1 readFromSeekableByteChannelRecursiveForGroups (SeekableByteChannel fileChannel ,
598+ long nodeAddress ,
599+ HdfDataFile hdfDataFile ,
600+ Map <Long , HdfBTreeV1 > visitedNodes
601601 ) throws Exception {
602602 if (visitedNodes .containsKey (nodeAddress )) {
603603 throw new IllegalStateException ("Cycle detected or node re-visited: BTree node address "
@@ -650,7 +650,7 @@ private static HdfBTreeV1 readFromSeekableByteChannelRecursive(SeekableByteChann
650650 HdfGroupBTreeEntry entry ;
651651 if (nodeLevel == 1 ) {
652652 // It's a sub B-Tree
653- HdfBTreeV1 child = readFromSeekableByteChannelRecursive (fileChannel , childAddress , hdfDataFile , visitedNodes );
653+ HdfBTreeV1 child = readFromSeekableByteChannelRecursiveForGroups (fileChannel , childAddress , hdfDataFile , visitedNodes );
654654 entry = new HdfGroupBTreeEntry (key , childPointer , child , null ); // Assuming entry constructor accepts Object for last param
655655 } else {
656656 // It's a SNOD
@@ -663,102 +663,105 @@ private static HdfBTreeV1 readFromSeekableByteChannelRecursive(SeekableByteChann
663663 return currentNode ;
664664 }
665665
666- // /**
667- // * Reads an HdfTree from a file channel.
668- // *
669- // * @param fileChannel the file channel to read from
670- // * @param hdfDataFile the HDF5 file context
671- // * @return the constructed HdfTree instance
672- // * @throws IOException if an I/O error occurs or the B-Tree data is invalid
673- // */
674- // public static HdfBTreeV1 readBTreeFromSeekableByteChannel(
675- // SeekableByteChannel fileChannel,
676- // long btreeAddress,
677- // int dimensions,
678- // FixedPointDatatype eightByteFixedPointType,
679- // HdfDataFile hdfDataFile
680- // ) throws IOException, InvocationTargetException, InstantiationException, IllegalAccessException {
681- // return readFromSeekableByteChannelRecursive(fileChannel, btreeAddress, dimensions, eightByteFixedPointType, hdfDataFile, new LinkedHashMap<>());
682- // }
683- //
684- // /**
685- // * Recursively reads an HdfTree from a file channel, handling cycles.
686- // *
687- // * @param fileChannel the file channel to read from
688- // * @param nodeAddress the address of the current node
689- // * @param visitedNodes a map of visited node addresses to detect cycles
690- // * @param hdfDataFile the HDF5 file context
691- // * @return the constructed HdfTree instance
692- // * @throws IOException if an I/O error occurs or the B-Tree data is invalid
693- // */
694- // private static HdfBTreeV1 readFromSeekableByteChannelRecursive(SeekableByteChannel fileChannel,
695- // long nodeAddress,
696- // int dimensions,
697- // FixedPointDatatype eightByteFixedPointType,
698- // HdfDataFile hdfDataFile,
699- // Map<Long, HdfBTreeV1> visitedNodes
700- // ) throws IOException, InvocationTargetException, InstantiationException, IllegalAccessException {
701- // if (visitedNodes.containsKey(nodeAddress)) {
702- // throw new IllegalStateException("Cycle detected or node re-visited: BTree node address "
703- // + nodeAddress + " encountered again during recursive read.");
704- // }
705- //
706- // fileChannel.position(nodeAddress);
707- // FixedPointDatatype hdfOffset = hdfDataFile.getSuperblock().getFixedPointDatatypeForOffset();
708- // final int offsetSize = hdfOffset.getSize();
709- //
710- // int headerSize = BTREE_HEADER_INITIAL_SIZE + offsetSize + offsetSize;
711- // ByteBuffer headerBuffer = ByteBuffer.allocate(headerSize).order(ByteOrder.LITTLE_ENDIAN);
712- // fileChannel.read(headerBuffer);
713- // headerBuffer.flip();
714- //
715- // byte[] signatureBytes = new byte[BTREE_SIGNATURE.length];
716- // headerBuffer.get(signatureBytes);
717- // if (Arrays.compare(signatureBytes, BTREE_SIGNATURE) != 0) {
718- // throw new IOException("Invalid B-tree node signature: '" + Arrays.toString(signatureBytes) + "' at position " + nodeAddress);
719- // }
720- //
721- // int nodeType = Byte.toUnsignedInt(headerBuffer.get());
722- // int nodeLevel = Byte.toUnsignedInt(headerBuffer.get());
723- // int entriesUsed = Short.toUnsignedInt(headerBuffer.getShort());
724- //
725- // HdfFixedPoint leftSiblingAddress = HdfReadUtils.readHdfFixedPointFromBuffer(hdfOffset, headerBuffer);
726- // HdfFixedPoint rightSiblingAddress = HdfReadUtils.readHdfFixedPointFromBuffer(hdfOffset, headerBuffer);
727- //
728- // int entriesDataSize = (4 + 4 + 8*dimensions + offsetSize + offsetSize) * entriesUsed;
729- // ByteBuffer entriesBuffer = ByteBuffer.allocate(entriesDataSize).order(ByteOrder.LITTLE_ENDIAN);
730- // fileChannel.read(entriesBuffer);
731- // entriesBuffer.flip();
732- //
733- // List<HdfBTreeEntryBase> entries = new ArrayList<>(entriesUsed);
734- //
735- // HdfBTreeV1 currentNode = new HdfBTreeV1(nodeType, nodeLevel, entriesUsed, leftSiblingAddress, rightSiblingAddress, null, entries, hdfDataFile);
736- // visitedNodes.put(nodeAddress, currentNode);
737- //
738- // // (4 + 4 + 8*dimensions + 1*length + 1*length) * entriesUsed
739- // // 4 + 4 + 8*dimensions + 8 + 8
740- // // 8 + 24 + 16
741- // // 48
742- //
743- // for (int i = 0; i < entriesUsed; i++) {
744- // long sizeOfChunk = Integer.toUnsignedLong(entriesBuffer.getInt());
745- // long filterMask = Integer.toUnsignedLong(entriesBuffer.getInt());
746- // List<HdfFixedPoint> dimensionOffsets = new ArrayList<>();
747- // for (int j = 0; j < dimensions; j++) {
748- // dimensionOffsets.add(HdfReadUtils.readHdfFixedPointFromBuffer(eightByteFixedPointType, entriesBuffer));
749- // }
750- // HdfFixedPoint zeroValue = HdfReadUtils.readHdfFixedPointFromBuffer(hdfOffset, entriesBuffer);
751- // HdfFixedPoint childPointer = HdfReadUtils.readHdfFixedPointFromBuffer(hdfOffset, entriesBuffer);
752- //
753- // if ( nodeLevel == 0 ) {
754- // entries.add(new HdfChunkBTreeEntry(null, childPointer, null, sizeOfChunk, filterMask, dimensionOffsets));
755- // } else {
756- // HdfBTreeV1 bTree = readFromSeekableByteChannelRecursive(fileChannel, childPointer.getInstance(Long.class), dimensions, eightByteFixedPointType, hdfDataFile, visitedNodes);
757- // }
758- // }
759- // return currentNode;
760- // }
761- //
666+ /**
667+ * Reads an HdfTree from a file channel.
668+ *
669+ * @param fileChannel the file channel to read from
670+ * @param hdfDataFile the HDF5 file context
671+ * @return the constructed HdfTree instance
672+ * @throws IOException if an I/O error occurs or the B-Tree data is invalid
673+ */
674+ public static HdfBTreeV1 readBTreeFromSeekableByteChannelForChunked (
675+ SeekableByteChannel fileChannel ,
676+ long btreeAddress ,
677+ int dimensions ,
678+ FixedPointDatatype eightByteFixedPointType ,
679+ HdfDataFile hdfDataFile
680+ ) throws IOException , InvocationTargetException , InstantiationException , IllegalAccessException {
681+ return readFromSeekableByteChannelRecursiveForChunked (fileChannel , btreeAddress , dimensions , eightByteFixedPointType , hdfDataFile , new LinkedHashMap <>());
682+ }
683+
684+ /**
685+ * Recursively reads an HdfTree from a file channel, handling cycles.
686+ *
687+ * @param fileChannel the file channel to read from
688+ * @param nodeAddress the address of the current node
689+ * @param visitedNodes a map of visited node addresses to detect cycles
690+ * @param hdfDataFile the HDF5 file context
691+ * @return the constructed HdfTree instance
692+ * @throws IOException if an I/O error occurs or the B-Tree data is invalid
693+ */
694+ private static HdfBTreeV1 readFromSeekableByteChannelRecursiveForChunked (SeekableByteChannel fileChannel ,
695+ long nodeAddress ,
696+ int dimensions ,
697+ FixedPointDatatype eightByteFixedPointType ,
698+ HdfDataFile hdfDataFile ,
699+ Map <Long , HdfBTreeV1 > visitedNodes
700+ ) throws IOException , InvocationTargetException , InstantiationException , IllegalAccessException {
701+ if (visitedNodes .containsKey (nodeAddress )) {
702+ throw new IllegalStateException ("Cycle detected or node re-visited: BTree node address "
703+ + nodeAddress + " encountered again during recursive read." );
704+ }
705+
706+ fileChannel .position (nodeAddress );
707+ FixedPointDatatype hdfOffset = hdfDataFile .getSuperblock ().getFixedPointDatatypeForOffset ();
708+ final int offsetSize = hdfOffset .getSize ();
709+
710+ int headerSize = BTREE_HEADER_INITIAL_SIZE + offsetSize + offsetSize ;
711+ ByteBuffer headerBuffer = ByteBuffer .allocate (headerSize ).order (ByteOrder .LITTLE_ENDIAN );
712+ fileChannel .read (headerBuffer );
713+ headerBuffer .flip ();
714+
715+ byte [] signatureBytes = new byte [BTREE_SIGNATURE .length ];
716+ headerBuffer .get (signatureBytes );
717+ if (Arrays .compare (signatureBytes , BTREE_SIGNATURE ) != 0 ) {
718+ throw new IOException ("Invalid B-tree node signature: '" + Arrays .toString (signatureBytes ) + "' at position " + nodeAddress );
719+ }
720+
721+ int nodeType = Byte .toUnsignedInt (headerBuffer .get ());
722+ int nodeLevel = Byte .toUnsignedInt (headerBuffer .get ());
723+ int entriesUsed = Short .toUnsignedInt (headerBuffer .getShort ());
724+
725+ HdfFixedPoint leftSiblingAddress = HdfReadUtils .readHdfFixedPointFromBuffer (hdfOffset , headerBuffer );
726+ HdfFixedPoint rightSiblingAddress = HdfReadUtils .readHdfFixedPointFromBuffer (hdfOffset , headerBuffer );
727+
728+ int entriesDataSize = (4 + 4 + 8 *dimensions + offsetSize + offsetSize ) * entriesUsed ;
729+ ByteBuffer entriesBuffer = ByteBuffer .allocate (entriesDataSize ).order (ByteOrder .LITTLE_ENDIAN );
730+ fileChannel .read (entriesBuffer );
731+ entriesBuffer .flip ();
732+
733+ List <HdfBTreeEntryBase > entries = new ArrayList <>(entriesUsed );
734+
735+ HdfBTreeV1 currentNode = new HdfBTreeV1 (nodeType , nodeLevel , entriesUsed , leftSiblingAddress , rightSiblingAddress , null , entries , hdfDataFile );
736+ visitedNodes .put (nodeAddress , currentNode );
737+
738+ // (4 + 4 + 8*dimensions + 1*length + 1*length) * entriesUsed
739+ // 4 + 4 + 8*dimensions + 8 + 8
740+ // 8 + 24 + 16
741+ // 48
742+
743+ for (int i = 0 ; i < entriesUsed ; i ++) {
744+ long sizeOfChunk = Integer .toUnsignedLong (entriesBuffer .getInt ());
745+ long filterMask = Integer .toUnsignedLong (entriesBuffer .getInt ());
746+ List <HdfFixedPoint > dimensionOffsets = new ArrayList <>();
747+ for (int j = 0 ; j < dimensions ; j ++) {
748+ dimensionOffsets .add (HdfReadUtils .readHdfFixedPointFromBuffer (eightByteFixedPointType , entriesBuffer ));
749+ }
750+ HdfFixedPoint zeroValue = HdfReadUtils .readHdfFixedPointFromBuffer (hdfOffset , entriesBuffer );
751+ HdfFixedPoint childPointer = HdfReadUtils .readHdfFixedPointFromBuffer (hdfOffset , entriesBuffer );
752+
753+ HdfBTreeEntryBase hdfBTreeEntryBase ;
754+ if ( nodeLevel == 0 ) {
755+ hdfBTreeEntryBase = new HdfChunkBTreeEntry (zeroValue , childPointer , null , sizeOfChunk , filterMask , dimensionOffsets );
756+ } else {
757+ HdfBTreeV1 bTree = readFromSeekableByteChannelRecursiveForChunked (fileChannel , childPointer .getInstance (Long .class ), dimensions , eightByteFixedPointType , hdfDataFile , visitedNodes );
758+ hdfBTreeEntryBase = new HdfGroupBTreeEntry (zeroValue , childPointer , bTree , null );
759+ }
760+ entries .add (hdfBTreeEntryBase );
761+ }
762+ return currentNode ;
763+ }
764+
762765 /**
763766 * Reads an HdfGroupSymbolTableNode from a file channel.
764767 *
0 commit comments