Skip to content

Commit cc06390

Browse files
committed
Cleanup
1 parent e5d13f9 commit cc06390

2 files changed

Lines changed: 18 additions & 8 deletions

File tree

pyiceberg/manifest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -705,6 +705,7 @@ def fetch_manifest_entry(self, io: FileIO, discard_deleted: bool = True) -> List
705705
An Iterator of manifest entries.
706706
"""
707707
from pyiceberg_core import manifest
708+
708709
bs = io.new_input(self.manifest_path).open().read()
709710
manifest = manifest.read_manifest_entries(bs)
710711

@@ -747,7 +748,6 @@ def _convert_entry(entry: Any) -> ManifestEntry:
747748
]
748749

749750

750-
751751
@cached(cache=LRUCache(maxsize=128), key=lambda io, manifest_list: hashkey(manifest_list))
752752
def _manifests(io: FileIO, manifest_list: str) -> Tuple[ManifestFile, ...]:
753753
"""Read and cache manifests from the given manifest list, returning a tuple to prevent modification."""

tests/conftest.py

Lines changed: 17 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
and the built-in pytest fixture request should be used as an additional argument in the function. The fixture can then be
2525
retrieved using `request.getfixturevalue(fixture_name)`.
2626
"""
27-
import json
27+
2828
import os
2929
import re
3030
import socket
@@ -61,7 +61,7 @@
6161
)
6262
from pyiceberg.io.fsspec import FsspecFileIO
6363
from pyiceberg.manifest import DataFile, FileFormat
64-
from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec, PartitionField
64+
from pyiceberg.partitioning import PartitionField, PartitionSpec
6565
from pyiceberg.schema import Accessor, Schema
6666
from pyiceberg.serializers import ToOutputFile
6767
from pyiceberg.table import FileScanTask, Table
@@ -1850,9 +1850,8 @@ def simple_map() -> MapType:
18501850

18511851
@pytest.fixture(scope="session")
18521852
def test_schema() -> Schema:
1853-
return Schema(
1854-
NestedField(1, "VendorID", IntegerType(), False), NestedField(2, "tpep_pickup_datetime", IntegerType(), False)
1855-
)
1853+
return Schema(NestedField(1, "VendorID", IntegerType(), False), NestedField(2, "tpep_pickup_datetime", IntegerType(), False))
1854+
18561855

18571856
@pytest.fixture(scope="session")
18581857
def test_partition_spec() -> Schema:
@@ -1861,16 +1860,27 @@ def test_partition_spec() -> Schema:
18611860
PartitionField(2, 1001, IdentityTransform(), "tpep_pickup_datetime"),
18621861
)
18631862

1863+
18641864
@pytest.fixture(scope="session")
1865-
def generated_manifest_entry_file(avro_schema_manifest_entry: Dict[str, Any], test_schema: Schema, test_partition_spec: PartitionSpec) -> Generator[str, None, None]:
1865+
def generated_manifest_entry_file(
1866+
avro_schema_manifest_entry: Dict[str, Any], test_schema: Schema, test_partition_spec: PartitionSpec
1867+
) -> Generator[str, None, None]:
18661868
from fastavro import parse_schema, writer
18671869

18681870
parsed_schema = parse_schema(avro_schema_manifest_entry)
18691871

18701872
with TemporaryDirectory() as tmpdir:
18711873
tmp_avro_file = tmpdir + "/manifest.avro"
18721874
with open(tmp_avro_file, "wb") as out:
1873-
writer(out, parsed_schema, manifest_entry_records, metadata={'schema': test_schema.model_dump_json(), 'partition-spec': test_partition_spec.fields,})
1875+
writer(
1876+
out,
1877+
parsed_schema,
1878+
manifest_entry_records,
1879+
metadata={
1880+
"schema": test_schema.model_dump_json(),
1881+
"partition-spec": test_partition_spec.fields,
1882+
},
1883+
)
18741884
yield tmp_avro_file
18751885

18761886

0 commit comments

Comments
 (0)