Skip to content

Commit f7fe286

Browse files
committed
sonarqube cleanup.
1 parent 024ff12 commit f7fe286

6 files changed

Lines changed: 19 additions & 140 deletions

File tree

src/main/java/org/hdf5javalib/datatype/ReferenceDatatype.java

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -100,12 +100,6 @@ public static byte createClassAndVersion() {
100100
return (byte) (7 << 4);
101101
}
102102

103-
// public static int getTypeValue(BitSet classBitField) {
104-
// int value = 0;
105-
// for (int i = 0; i < 4; i++) if (classBitField.get(i)) value |= 1 << i;
106-
// return value;
107-
// }
108-
//
109103
public static ReferenceType getReferenceType(BitSet classBitField) {
110104
return ReferenceType.fromValue(getTypeValue(classBitField));
111105
}
@@ -151,10 +145,6 @@ public static int getTypeValue(BitSet classBitField) {
151145
}
152146

153147
public String toString(byte[] bytes) throws InvocationTargetException, InstantiationException, IllegalAccessException, IOException {
154-
// if (bytes.length != size) throw new IllegalArgumentException("Byte array length mismatch");
155-
// StringBuilder sb = new StringBuilder();
156-
// for (byte b : bytes) sb.append(String.format("%02X", b));
157-
// return "Reference[" + getReferenceType(classBitField).description + "]=" + sb;
158148
HdfReferenceInstance referenceInstance = getInstance(HdfReferenceInstance.class, bytes);
159149
HdfDataHolder data = referenceInstance.getData();
160150
if ( data.isScalar()) {

src/main/java/org/hdf5javalib/examples/FileByteComparator.java

Lines changed: 9 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -58,29 +58,25 @@ public static void main(String[] args) {
5858
if (file1Bytes[i] != file2Bytes[i]) {
5959
differencesFound = true;
6060
// Print offset and byte values in hex
61-
log.info(String.format("0x%-8X 0x%02X 0x%02X",
62-
i,
63-
file1Bytes[i] & 0xFF,
64-
file2Bytes[i] & 0xFF));
61+
log.info("0x{:08X} 0x{:02X} 0x{:02X}", i, file1Bytes[i] & 0xFF, file2Bytes[i] & 0xFF);
6562
}
6663
}
6764

68-
// Check for length differences
65+
// Check for length differences
6966
if (file1Bytes.length != file2Bytes.length) {
7067
differencesFound = true;
71-
log.info(String.format("Files differ in length: File1 = %d bytes, File2 = %d bytes",
72-
file1Bytes.length, file2Bytes.length));
68+
log.info("Files differ in length: File1 = {} bytes, File2 = {}", file1Bytes.length, file2Bytes.length);
69+
String offset = Integer.toHexString(minLength - 1).toUpperCase();
70+
7371
if (file1Bytes.length > file2Bytes.length) {
74-
log.info("Extra bytes in File1 after offset 0x" +
75-
Integer.toHexString(minLength - 1).toUpperCase() + ":");
72+
log.info("Extra bytes in File1 after offset 0x{}:", offset);
7673
for (int i = minLength; i < file1Bytes.length; i++) {
77-
log.info(String.format("0x%-8X 0x%02X", i, file1Bytes[i] & 0xFF));
74+
log.info("0x{:08X} 0x{:02X}", i, file1Bytes[i] & 0xFF);
7875
}
7976
} else {
80-
log.info("Extra bytes in File2 after offset 0x" +
81-
Integer.toHexString(minLength - 1).toUpperCase() + ":");
77+
log.info("Extra bytes in File2 after offset 0x{}:", offset);
8278
for (int i = minLength; i < file2Bytes.length; i++) {
83-
log.info(String.format("0x%-8X 0x%02X", i, file2Bytes[i] & 0xFF));
79+
log.info("0x{:08X} 0x{:02X}", i, file2Bytes[i] & 0xFF);
8480
}
8581
}
8682
}

src/main/java/org/hdf5javalib/examples/HDF5Examples/ArrayAttributeRead.java

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@
44
import org.hdf5javalib.hdfjava.HdfDataset;
55
import org.hdf5javalib.hdfjava.HdfFileReader;
66

7+
import java.io.IOException;
8+
import java.lang.reflect.InvocationTargetException;
79
import java.nio.channels.SeekableByteChannel;
810
import java.nio.file.Files;
911
import java.nio.file.Path;
@@ -37,17 +39,15 @@ public static void main(String[] args) {
3739
* Executes the main logic of reading and displaying compound data from an HDF5 file.
3840
*/
3941
private void run() {
40-
try {
41-
Path filePath = getResourcePath("HDF5Examples/h5ex_t_arrayatt.h5");
42-
try (SeekableByteChannel channel = Files.newByteChannel(filePath, StandardOpenOption.READ)) {
43-
HdfFileReader reader = new HdfFileReader(channel).readFile();
44-
log.debug("BTree: {} ", reader.getBTree().getRoot());
45-
for (HdfDataset dataSet : reader.getDatasets()) {
46-
displayAttributes(dataSet);
47-
}
42+
Path filePath = getResourcePath("HDF5Examples/h5ex_t_arrayatt.h5");
43+
try (SeekableByteChannel channel = Files.newByteChannel(filePath, StandardOpenOption.READ)) {
44+
HdfFileReader reader = new HdfFileReader(channel).readFile();
45+
log.debug("BTree: {} ", reader.getBTree().getRoot());
46+
for (HdfDataset dataSet : reader.getDatasets()) {
47+
displayAttributes(dataSet);
4848
}
4949
} catch (Exception e) {
50-
throw new RuntimeException(e);
50+
throw new IllegalArgumentException(e);
5151
}
5252
}
5353
}

src/main/java/org/hdf5javalib/examples/read/CompoundRead.java

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -105,24 +105,6 @@ public int getValue() {
105105
return value;
106106
}
107107
}
108-
109-
// // Canonical constructor for validation
110-
// public Record {
111-
// if (string == null || string.length() > 16) {
112-
// throw new IllegalArgumentException("string must be non-null and at most 16 characters");
113-
// }
114-
// // Pad string to 16 chars with NULs for HDF5
115-
// string = string.length() < 16 ? string + "\0".repeat(16 - string.length()) : string;
116-
// if (opaque == null || opaque.length != 4) {
117-
// throw new IllegalArgumentException("opaque must be a 4-byte array");
118-
// }
119-
// if (array == null || array.length != 3) {
120-
// throw new IllegalArgumentException("array must be a 3-element int array");
121-
// }
122-
// if (variableLength == null) {
123-
// throw new IllegalArgumentException("variableLength must be non-null");
124-
// }
125-
// }
126108
}
127109

128110
/**

src/main/java/org/hdf5javalib/examples/read/DimensionsRead.java

Lines changed: 1 addition & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -38,24 +38,7 @@ public static void main(String[] args) {
3838
*/
3939
private void run() {
4040
try {
41-
Path filePath = getResourcePath("dimensions.h5");
42-
// try (SeekableByteChannel channel = Files.newByteChannel(filePath, StandardOpenOption.READ)) {
43-
// HdfFileReader reader = new HdfFileReader(channel).readFile();
44-
// log.debug("Root Group: {} ", reader.getRootGroup());
45-
// try (HdfDataset ds = reader.getRootGroup().getDataset("/scalar_dataset").orElseThrow()) {
46-
// displayScalarData(channel, ds, HdfFloatPoint.class, reader);
47-
// }
48-
// try (HdfDataset ds = reader.getRootGroup().getDataset("/1d_dataset").orElseThrow()) {
49-
// displayVectorData(channel, ds, HdfFloatPoint.class, reader);
50-
// }
51-
// try (HdfDataset ds = reader.getRootGroup().getDataset("/2d_dataset").orElseThrow()) {
52-
// displayMatrixData(channel, ds, HdfFloatPoint.class, reader);
53-
// }
54-
// try (HdfDataset ds = reader.getRootGroup().getDataset("/2d_dataset_permuted").orElseThrow()) {
55-
// displayMatrixData(channel, ds, HdfFloatPoint.class, reader);
56-
// }
57-
// }
58-
filePath = getResourcePath("array_datasets.h5");
41+
Path filePath = getResourcePath("array_datasets.h5");
5942
try (SeekableByteChannel channel = Files.newByteChannel(filePath, StandardOpenOption.READ)) {
6043
HdfFileReader reader = new HdfFileReader(channel).readFile();
6144
log.debug("File BTree: {} ", reader.getBTree());

src/main/java/org/hdf5javalib/examples/read/FixedPointRead.java

Lines changed: 0 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
import java.util.Comparator;
2323
import java.util.List;
2424
import java.util.stream.Collectors;
25-
import java.util.stream.IntStream;
2625
import java.util.stream.Stream;
2726

2827
import static org.hdf5javalib.utils.HdfReadUtils.getResourcePath;
@@ -97,28 +96,6 @@ void run() throws Exception {
9796
}
9897
}
9998

100-
/**
101-
* Processes a scalar dataset using a TypedDataSource.
102-
*
103-
* @param channel the file channel for reading the HDF5 file
104-
* @param hdfDataFile the HDF5 file context
105-
* @param dataSet the scalar dataset to process
106-
* @throws IOException if an I/O error occurs
107-
*/
108-
void tryDataSpliterator(SeekableByteChannel channel, HdfDataFile hdfDataFile, HdfDataset dataSet) throws IOException, InvocationTargetException, InstantiationException, IllegalAccessException {
109-
TypedDataSource<BigInteger> dataSource = new TypedDataSource<>(channel, hdfDataFile, dataSet, BigInteger.class);
110-
BigInteger allData = dataSource.readScalar();
111-
log.info("Scalar dataset name = {}", dataSet.getObjectName());
112-
log.info("Scalar readAll stats = {}", Stream.of(allData)
113-
.collect(Collectors.summarizingInt(BigInteger::intValue)));
114-
log.info("Scalar streaming list = {}", dataSource.streamScalar().toList());
115-
log.info("Scalar parallelStreaming list = {}", dataSource.parallelStreamScalar().toList());
116-
117-
new TypedDataSource<>(channel, hdfDataFile, dataSet, HdfFixedPoint.class).streamScalar().forEach(item -> log.info("{}", item));
118-
new TypedDataSource<>(channel, hdfDataFile, dataSet, String.class).streamScalar().forEach(item -> log.info("{}", item));
119-
new TypedDataSource<>(channel, hdfDataFile, dataSet, BigDecimal.class).streamScalar().forEach(item -> log.info("{}", item));
120-
}
121-
12299
/**
123100
* Processes a scalar dataset using a TypedDataSource.
124101
*
@@ -141,31 +118,6 @@ void tryScalarDataSpliterator(SeekableByteChannel channel, HdfDataFile hdfDataFi
141118
new TypedDataSource<>(channel, hdfDataFile, dataSet, BigDecimal.class).streamScalar().forEach(item -> log.info("{}", item));
142119
}
143120

144-
/**
145-
* Processes a vector dataset using a TypedDataSource.
146-
*
147-
* @param fileChannel the file channel for reading the HDF5 file
148-
* @param hdfDataFile the HDF5 file context
149-
* @param dataSet the vector dataset to process
150-
* @throws IOException if an I/O error occurs
151-
*/
152-
void tryVectorSpliterator(SeekableByteChannel fileChannel, HdfDataFile hdfDataFile, HdfDataset dataSet) throws IOException, InvocationTargetException, InstantiationException, IllegalAccessException {
153-
TypedDataSource<BigInteger> dataSource = new TypedDataSource<>(fileChannel, hdfDataFile, dataSet, BigInteger.class);
154-
BigInteger[] allData = dataSource.readVector();
155-
log.info("Vector readAll stats = {}", Arrays.stream(allData).collect(Collectors.summarizingInt(BigInteger::intValue)));
156-
log.info("Vector streaming stats = {}", dataSource.streamVector()
157-
.collect(Collectors.summarizingInt(BigInteger::intValue)));
158-
log.info("Vector parallel streaming stats = {}", dataSource.parallelStreamVector()
159-
.collect(Collectors.summarizingInt(BigInteger::intValue)));
160-
final BigInteger[] flattenedData = dataSource.readFlattened();
161-
int[] shape = dataSource.getShape();
162-
log.info("Vector flattenedData stats = {}", IntStream.rangeClosed(0, FlattenedArrayUtils.totalSize(shape) - 1)
163-
.mapToObj(i -> FlattenedArrayUtils.getElement(flattenedData, shape, i))
164-
.collect(Collectors.summarizingInt(BigInteger::intValue)));
165-
BigInteger bdReduced = (BigInteger) FlattenedArrayUtils.reduceAlongAxis(dataSource.streamFlattened(), shape, 0, BigInteger::max, BigInteger.class);
166-
log.info("FlattenedData Streamed Reduced = {}", bdReduced);
167-
}
168-
169121
/**
170122
* Processes a matrix dataset using a TypedDataSource.
171123
*
@@ -282,28 +234,4 @@ void display4DData(SeekableByteChannel fileChannel, HdfDataFile hdfDataFile, Hdf
282234
pieces.forEach(entry -> log.info("Coords {} → Value: {}", Arrays.toString(entry.coordinates), entry.value));
283235
}
284236

285-
/**
286-
* Processes a 4D dataset using a TypedDataSource, demonstrating slicing and filtering.
287-
*
288-
* @param fileChannel the file channel for reading the HDF5 file
289-
* @param hdfDataFile the HDF5 file context
290-
* @param dataSet the 4D dataset to process
291-
* @throws IOException if an I/O error occurs
292-
*/
293-
void displaySalesCube(SeekableByteChannel fileChannel, HdfDataFile hdfDataFile, HdfDataset dataSet) throws IOException, InvocationTargetException, InstantiationException, IllegalAccessException {
294-
TypedDataSource<Double> dataSource = new TypedDataSource<>(fileChannel, hdfDataFile, dataSet, Double.class);
295-
int[] shape = dataSource.getShape(); // Should be [60, 100, 50]
296-
Double[][] sales2024Jan = (Double[][]) FlattenedArrayUtils.sliceStream(
297-
dataSource.streamFlattened(),
298-
shape,
299-
new int[][]{{0}, {}, {}}, // Slice Time=2024-01 (index 0)
300-
Double.class
301-
);
302-
log.info("Sales for January 2024:");
303-
for (int z = 0; z < shape[1]; z++) {
304-
for (int p = 0; p < shape[2]; p++) {
305-
log.info(String.format("Zip %d, Product %d: %.2f", z, p, sales2024Jan[z][p]));
306-
}
307-
}
308-
}
309237
}

0 commit comments

Comments
 (0)