Skip to content

Commit aa367a7

Browse files
committed
sonarqube cleanup.
1 parent 6bad505 commit aa367a7

3 files changed

Lines changed: 4 additions & 102 deletions

File tree

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@
4848
<sonar.projectKey>karlnicholas_Hdf5JavaLib</sonar.projectKey>
4949
<sonar.organization>karlnicholas</sonar.organization>
5050
<sonar.host.url>https://sonarcloud.io</sonar.host.url>
51-
<sonar.exclusions>/hdf5/**</sonar.exclusions>
51+
<sonar.exclusions>hdf5/**,HdfFileAllocation.java</sonar.exclusions>
5252
</properties>
5353

5454
<dependencies>

src/main/java/org/hdf5javalib/hdfjava/HdfFileReader.java

Lines changed: 2 additions & 100 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,6 @@
3939
*/
4040
public class HdfFileReader implements HdfDataFile {
4141
// /** The superblock containing metadata about the HDF5 file. */
42-
// private HdfSuperblock superblock;
4342
public static final byte[] BTREE_SIGNATURE = {'T', 'R', 'E', 'E'};
4443
public static final int BTREE_HEADER_INITIAL_SIZE = 8;
4544
private static final int MAX_SNOD_ENTRIES = 8;
@@ -177,6 +176,8 @@ private void readInfrastructure(HdfGroup parentGroup, HdfLocalHeap localHeap, Hd
177176
parentGroup.addChild(groupObject);
178177
readInfrastructure(groupObject, newLocalHeap, newGroupBTree);
179178
break;
179+
default:
180+
throw new UnsupportedOperationException("Unknown type: " + symbolTableEntry.getCache().getCacheType());
180181
}
181182
}
182183
}
@@ -189,12 +190,6 @@ private void readInfrastructure(HdfGroup parentGroup, HdfLocalHeap localHeap, Hd
189190
* @return a collection of
190191
*/
191192
public Iterator<HdfDataset> datasetIterator() {
192-
// return bTree.getEntries().stream()
193-
// .filter(bte -> bte.getGroupSymbolTableNode().isPresent())
194-
// .flatMap(bte -> bte.getGroupSymbolTableNode().get().getSymbolTableEntries().stream())
195-
// .filter(ste -> ste.getCache() instanceof HdfSymbolTableEntryCacheNoScratch)
196-
// .map(ste -> ((HdfSymbolTableEntryCacheNoScratch) ste.getCache()).getDataSet())
197-
// .toList();
198193
return bTree.datasetIterator();
199194

200195
}
@@ -207,16 +202,6 @@ public List<HdfDataset> getDatasets() {
207202
return resultList;
208203
}
209204

210-
// private Optional<HdfDataObject> findTypeInBTree(HdfBTree bTree, String[] components, int index, String currentComponent) {
211-
// if (bTree == null || components == null || index >= components.length || currentComponent == null) {
212-
// return Optional.empty();
213-
// }
214-
//// Optional<HdfDataObject> result = bTree.findObjectByName(currentComponent, this);
215-
//// if (result.isPresent()) {
216-
//// return findTypeByPath(components, index + 1, result.get());
217-
//// }
218-
// return Optional.empty();
219-
// }
220205
public Optional<HdfDataset> getDataset(String path) {
221206
return getHdfDataObject(path, HdfDataset.class);
222207
}
@@ -592,15 +577,6 @@ public static HdfSymbolTableEntryCache readCacheWithScratchFromSeekableByteChann
592577
// reading for group.
593578
HdfFixedPoint bTreeAddress = HdfReadUtils.readHdfFixedPointFromFileChannel(hdfDataFile.getSuperblock().getFixedPointDatatypeForOffset(), fileChannel);
594579
HdfFixedPoint localHeapAddress = HdfReadUtils.readHdfFixedPointFromFileChannel(hdfDataFile.getSuperblock().getFixedPointDatatypeForOffset(), fileChannel);
595-
// long savedPosition = fileChannel.position();
596-
//
597-
// fileChannel.position(localHeapAddress.getInstance(Long.class));
598-
// HdfLocalHeap localHeap = readLocalHeapFromSeekableByteChannel(fileChannel, hdfDataFile, objectName);
599-
//
600-
// fileChannel.position(bTreeAddress.getInstance(Long.class));
601-
// HdfBTree bTreeV1 = readBTreeFromSeekableByteChannel(fileChannel, hdfDataFile, localHeap, objectName);
602-
// fileChannel.position(savedPosition);
603-
// return new HdfSymbolTableEntryCacheWithScratch(objectName, objectHeader, bTreeV1, localHeap, hdfDataFile);
604580
return new HdfSymbolTableEntryCacheWithScratch(bTreeAddress, localHeapAddress);
605581
}
606582

@@ -624,15 +600,6 @@ public static HdfSymbolTableEntry readSteFromSeekableByteChannel(
624600
int cacheType = HdfReadUtils.readIntFromFileChannel(fileChannel);
625601
HdfReadUtils.skipBytes(fileChannel, RESERVED_FIELD_1_SIZE); // Skip reserved field
626602

627-
// long savedPosition = fileChannel.position();
628-
// fileChannel.position(objectHeaderAddress.getInstance(Long.class));
629-
// String objectName = localHeap == null ? "" : localHeap.stringAtOffset(linkNameOffset);
630-
// HdfObjectHeaderPrefix objectHeader = readObjectHeaderPrefixFromSeekableByteChannel(
631-
// fileChannel,
632-
// hdfDataFile,
633-
// objectName
634-
// );
635-
// fileChannel.position(savedPosition);
636603
HdfSymbolTableEntryCache cache;
637604
if (cacheType == 0) {
638605
cache = readCacheNoScratchFromSeekableByteChannel(fileChannel, hdfDataFile);
@@ -866,61 +833,6 @@ private void initializeGlobalHeap(HdfFixedPoint offset) {
866833
}
867834
}
868835

869-
// /**
870-
// * Collects a map of dataset names to their information from the B-tree and local heap.
871-
// *
872-
// * @param fileChannel the seekable byte channel for reading the file
873-
// * @param bTree the B-tree containing symbol table entries
874-
// * @param localHeap the local heap storing link names
875-
// * @return a map of dataset names to their {@link HdfGroup.DataSetInfo}
876-
// * @throws IOException if an I/O error occurs
877-
// */
878-
// private Map<String, HdfGroup.DataSetInfo> collectDatasetsMap(SeekableByteChannel fileChannel, HdfBTree bTree, HdfLocalHeap localHeap) throws IOException {
879-
// Map<String, HdfGroup.DataSetInfo> dataSets = new LinkedHashMap<>();
880-
// collectDatasetsRecursive(bTree, dataSets, localHeap, fileChannel);
881-
// return dataSets;
882-
// }
883-
//
884-
// /**
885-
// * Recursively collects dataset information from the B-tree.
886-
// * <p>
887-
// * Traverses the B-tree, processing leaf nodes to extract dataset metadata and
888-
// * recursively handling internal nodes to collect all datasets.
889-
// * </p>
890-
// *
891-
// * @param currentNode the current B-tree node
892-
// * @param dataSets the map to store dataset information
893-
// * @param localHeap the local heap for link names
894-
// * @param fileChannel the seekable byte channel for reading
895-
// * @throws IOException if an I/O error occurs
896-
// */
897-
// private void collectDatasetsRecursive(HdfBTree currentNode,
898-
// Map<String, HdfGroup.DataSetInfo> dataSets,
899-
// HdfLocalHeap localHeap,
900-
// SeekableByteChannel fileChannel) throws IOException {
901-
// for (HdfGroupBTreeEntry entry : currentNode.getEntries()) {
902-
// if (entry.isLeafEntry()) {
903-
// HdfGroupSymbolTableNode snod = entry.getSymbolTableNode();
904-
// for (HdfSymbolTableEntry ste : snod.getSymbolTableEntries()) {
905-
// HdfString linkName = localHeap.parseStringAtOffset(ste.getLinkNameOffset());
906-
// long dataObjectHeaderAddress = ste.getObjectHeaderOffset().getInstance(Long.class);
907-
// long linkNameOffset = ste.getLinkNameOffset().getInstance(Long.class);
908-
// fileChannel.position(dataObjectHeaderAddress);
909-
// HdfObjectHeaderPrefixV1 header = HdfObjectHeaderPrefixV1.readFromSeekableByteChannel(fileChannel, this);
910-
// DatatypeMessage dataType = header.findMessageByType(DatatypeMessage.class).orElseThrow();
911-
// HdfDataset dataset = new HdfDataset(this, linkName.toString(), dataType.getHdfDatatype(), header);
912-
// HdfGroup.DataSetInfo dataSetInfo = new HdfGroup.DataSetInfo(dataset,
913-
// HdfWriteUtils.hdfFixedPointFromValue(0, superblock.getFixedPointDatatypeForOffset()),
914-
// linkNameOffset);
915-
// dataSets.put(linkName.toString(), dataSetInfo);
916-
// }
917-
// } else if (entry.isInternalEntry()) {
918-
// HdfBTree childBTree = entry.getChildBTree();
919-
// collectDatasetsRecursive(childBTree, dataSets, localHeap, fileChannel);
920-
// }
921-
// }
922-
// }
923-
924836
/**
925837
* Retrieves the global heap of the HDF5 file.
926838
*
@@ -931,16 +843,6 @@ public HdfGlobalHeap getGlobalHeap() {
931843
return globalHeap;
932844
}
933845

934-
/**
935-
* Retrieves the file allocation manager of the HDF5 file.
936-
*
937-
* @return the {@link HdfFileAllocation} instance
938-
*/
939-
// @Override
940-
// public HdfFileAllocation getFileAllocation() {
941-
// return fileAllocation;
942-
// }
943-
944846
/**
945847
* Retrieves the seekable byte channel for reading the HDF5 file.
946848
*

src/main/java/org/hdf5javalib/utils/HdfDisplayUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ private static <T> void displayNDimData(SeekableByteChannel fileChannel, HdfData
215215
System.out.println();
216216

217217
Object resultArray = FlattenedArrayUtils.streamToNDArray(dataSource.streamFlattened(), dataSource.getShape(), clazz);
218-
System.out.println(displayType(clazz, resultArray) + " stream = " + displayValue(resultArray));
218+
System.out.println(displayType(clazz, resultArray) + STREAM_EQUALS + displayValue(resultArray));
219219
}
220220

221221
// Method to print flattened array according to shape

0 commit comments

Comments
 (0)