Skip to content

Commit 5fa89e9

Browse files
committed
Make linter happy
1 parent 33043f9 commit 5fa89e9

1 file changed

Lines changed: 4 additions & 5 deletions

File tree

tests/integration/test_writes/test_writes.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@
5050
from pyiceberg.schema import Schema
5151
from pyiceberg.table import TableProperties
5252
from pyiceberg.table.sorting import SortDirection, SortField, SortOrder
53-
from pyiceberg.transforms import DayTransform, HourTransform, IdentityTransform, BucketTransform, Transform
53+
from pyiceberg.transforms import BucketTransform, DayTransform, HourTransform, IdentityTransform, Transform
5454
from pyiceberg.types import (
5555
DateType,
5656
DecimalType,
@@ -1848,7 +1848,7 @@ def test_read_write_decimals(session_catalog: Catalog) -> None:
18481848

18491849
@pytest.mark.integration
18501850
@pytest.mark.parametrize("transform", [IdentityTransform(), BucketTransform(32)])
1851-
def test_uuid_partitioning(session_catalog: Catalog, spark: SparkSession, transform: Transform) -> None:
1851+
def test_uuid_partitioning(session_catalog: Catalog, spark: SparkSession, transform: Transform) -> None: # type: ignore
18521852
identifier = f"default.test_uuid_partitioning_{str(transform).replace('[32]', '')}"
18531853

18541854
schema = Schema(NestedField(field_id=1, name="uuid", field_type=UUIDType(), required=True))
@@ -1858,9 +1858,7 @@ def test_uuid_partitioning(session_catalog: Catalog, spark: SparkSession, transf
18581858
except NoSuchTableError:
18591859
pass
18601860

1861-
partition_spec = PartitionSpec(
1862-
PartitionField(source_id=1, field_id=1000, transform=transform, name="uuid_identity")
1863-
)
1861+
partition_spec = PartitionSpec(PartitionField(source_id=1, field_id=1000, transform=transform, name="uuid_identity"))
18641862

18651863
import pyarrow as pa
18661864

@@ -1892,6 +1890,7 @@ def test_uuid_partitioning(session_catalog: Catalog, spark: SparkSession, transf
18921890
rhs = [str(u.as_py()) for u in tbl.scan().to_arrow()["uuid"].combine_chunks()]
18931891
assert lhs == rhs
18941892

1893+
18951894
@pytest.mark.integration
18961895
def test_avro_compression_codecs(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None:
18971896
identifier = "default.test_avro_compression_codecs"

0 commit comments

Comments
 (0)