Skip to content

Commit ef4e81e

Browse files
authored
Merge branch 'main' into test-consolidation
2 parents c69e0b5 + e522567 commit ef4e81e

30 files changed

Lines changed: 1713 additions & 194 deletions

.github/workflows/pypi-build-artifacts.yml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ jobs:
4545
3.10
4646
3.11
4747
3.12
48+
3.13
4849
4950
- name: Install UV
5051
uses: astral-sh/setup-uv@v7
@@ -61,14 +62,14 @@ jobs:
6162
if: startsWith(matrix.os, 'ubuntu')
6263

6364
- name: Build wheels
64-
uses: pypa/cibuildwheel@v3.3.0
65+
uses: pypa/cibuildwheel@v3.3.1
6566
with:
6667
output-dir: wheelhouse
6768
config-file: "pyproject.toml"
6869
env:
6970
# Ignore 32 bit architectures
7071
CIBW_ARCHS: "auto64"
71-
CIBW_PROJECT_REQUIRES_PYTHON: ">=3.10"
72+
CIBW_PROJECT_REQUIRES_PYTHON: ">=3.10,<3.14"
7273
CIBW_TEST_REQUIRES: "pytest==7.4.2 moto==5.0.1"
7374
CIBW_TEST_COMMAND: "pytest {project}/tests/avro/test_decoder.py"
7475
# Ignore tests for pypy since not all dependencies are compiled for it

.github/workflows/python-ci.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ jobs:
4747
runs-on: ubuntu-latest
4848
strategy:
4949
matrix:
50-
python: ['3.10', '3.11', '3.12']
50+
python: ['3.10', '3.11', '3.12', '3.13']
5151

5252
steps:
5353
- uses: actions/checkout@v6
@@ -71,7 +71,7 @@ jobs:
7171
runs-on: ubuntu-latest
7272
strategy:
7373
matrix:
74-
python: ['3.10', '3.11', '3.12']
74+
python: ['3.10', '3.11', '3.12', '3.13']
7575

7676
steps:
7777
- uses: actions/checkout@v6

.github/workflows/svn-build-artifacts.yml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ jobs:
4545
3.10
4646
3.11
4747
3.12
48+
3.13
4849
4950
- name: Install UV
5051
uses: astral-sh/setup-uv@v7
@@ -56,14 +57,14 @@ jobs:
5657
if: startsWith(matrix.os, 'ubuntu')
5758

5859
- name: Build wheels
59-
uses: pypa/cibuildwheel@v3.3.0
60+
uses: pypa/cibuildwheel@v3.3.1
6061
with:
6162
output-dir: wheelhouse
6263
config-file: "pyproject.toml"
6364
env:
6465
# Ignore 32 bit architectures
6566
CIBW_ARCHS: "auto64"
66-
CIBW_PROJECT_REQUIRES_PYTHON: ">=3.10"
67+
CIBW_PROJECT_REQUIRES_PYTHON: ">=3.10,<3.14"
6768
CIBW_TEST_REQUIRES: "pytest==7.4.2 moto==5.0.1"
6869
CIBW_TEST_COMMAND: "pytest {project}/tests/avro/test_decoder.py"
6970
# Ignore tests for pypy since not all dependencies are compiled for it

Makefile

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ setup-venv: ## Create virtual environment
7070
uv venv $(PYTHON_ARG)
7171

7272
install-dependencies: setup-venv ## Install all dependencies including extras
73-
uv sync $(PYTHON_ARG) --all-extras
73+
uv sync $(PYTHON_ARG) --all-extras --reinstall
7474

7575
install: install-uv install-dependencies ## Install uv and dependencies
7676

@@ -100,7 +100,7 @@ test-integration: test-integration-setup test-integration-exec test-integration-
100100
test-integration-setup: ## Start Docker services for integration tests
101101
docker compose -f dev/docker-compose-integration.yml kill
102102
docker compose -f dev/docker-compose-integration.yml rm -f
103-
docker compose -f dev/docker-compose-integration.yml up -d --wait
103+
docker compose -f dev/docker-compose-integration.yml up -d --build --wait
104104
uv run $(PYTHON_ARG) python dev/provision.py
105105

106106
test-integration-exec: ## Run integration tests (excluding provision)
@@ -148,7 +148,7 @@ docs-install: ## Install docs dependencies (included in default groups)
148148
uv sync $(PYTHON_ARG) --group docs
149149

150150
docs-serve: ## Serve local docs preview (hot reload)
151-
uv run $(PYTHON_ARG) mkdocs serve -f mkdocs/mkdocs.yml
151+
uv run $(PYTHON_ARG) mkdocs serve -f mkdocs/mkdocs.yml --livereload
152152

153153
docs-build: ## Build the static documentation site
154154
uv run $(PYTHON_ARG) mkdocs build -f mkdocs/mkdocs.yml --strict

dev/spark/Dockerfile

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,8 @@ ARG BASE_IMAGE_SPARK_VERSION=4.0.1
1818
FROM apache/spark:${BASE_IMAGE_SPARK_VERSION}
1919

2020
# Dependency versions - keep these compatible
21-
ARG ICEBERG_VERSION=1.10.0
21+
ARG ICEBERG_VERSION=1.10.1
2222
ARG ICEBERG_SPARK_RUNTIME_VERSION=4.0_2.13
23-
ARG SPARK_VERSION=4.0.1
2423
ARG HADOOP_VERSION=3.4.1
2524
ARG SCALA_VERSION=2.13
2625
ARG AWS_SDK_VERSION=2.24.6
@@ -43,7 +42,6 @@ RUN mkdir -p /home/iceberg/spark-events && \
4342

4443
# Required JAR dependencies
4544
ENV JARS_TO_DOWNLOAD="\
46-
org/apache/spark/spark-connect_${SCALA_VERSION}/${SPARK_VERSION}/spark-connect_${SCALA_VERSION}-${SPARK_VERSION}.jar \
4745
org/apache/iceberg/iceberg-spark-runtime-${ICEBERG_SPARK_RUNTIME_VERSION}/${ICEBERG_VERSION}/iceberg-spark-runtime-${ICEBERG_SPARK_RUNTIME_VERSION}-${ICEBERG_VERSION}.jar \
4846
org/apache/iceberg/iceberg-aws-bundle/${ICEBERG_VERSION}/iceberg-aws-bundle-${ICEBERG_VERSION}.jar \
4947
org/apache/hadoop/hadoop-aws/${HADOOP_VERSION}/hadoop-aws-${HADOOP_VERSION}.jar \

mkdocs/docs/contributing.md

Lines changed: 22 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ make lint
118118

119119
In addition to manually running `make lint`, you can install the pre-commit hooks in your local repo with `prek install`. By doing this, linting is run automatically every time you make a commit.
120120

121-
You can bump the integrations to the latest version using `prek auto-update`. This will check if there is a newer version of `{black,mypy,isort,...}` and update the yaml.
121+
You can bump the integrations to the latest version using `prek auto-update`. This will check if there is a newer version of `{ruff,mypy,...}` and update the yaml.
122122

123123
## Cleaning
124124

@@ -258,6 +258,27 @@ Which will warn:
258258
Deprecated in 0.1.0, will be removed in 0.2.0. The old_property is deprecated. Please use the something_else property instead.
259259
```
260260

261+
### Logging
262+
263+
PyIceberg uses Python's standard logging module. You can control the logging level using either:
264+
265+
**CLI option:**
266+
267+
```bash
268+
pyiceberg --log-level DEBUG describe my_table
269+
```
270+
271+
**Environment variable:**
272+
273+
```bash
274+
export PYICEBERG_LOG_LEVEL=DEBUG
275+
pyiceberg describe my_table
276+
```
277+
278+
Valid log levels are: `DEBUG`, `INFO`, `WARNING` (default), `ERROR`, `CRITICAL`.
279+
280+
Debug logging is particularly useful for troubleshooting issues with FileIO implementations, catalog connections, and other integration points.
281+
261282
### Type annotations
262283

263284
For the type annotation the types from the `Typing` package are used.

pyiceberg/catalog/__init__.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -285,8 +285,8 @@ def delete_files(io: FileIO, files_to_delete: set[str], file_type: str) -> None:
285285
for file in files_to_delete:
286286
try:
287287
io.delete(file)
288-
except OSError as exc:
289-
logger.warning(msg=f"Failed to delete {file_type} file {file}", exc_info=exc)
288+
except OSError:
289+
logger.warning(f"Failed to delete {file_type} file {file}", exc_info=logger.isEnabledFor(logging.DEBUG))
290290

291291

292292
def delete_data_files(io: FileIO, manifests_to_delete: list[ManifestFile]) -> None:
@@ -305,8 +305,8 @@ def delete_data_files(io: FileIO, manifests_to_delete: list[ManifestFile]) -> No
305305
if not deleted_files.get(path, False):
306306
try:
307307
io.delete(path)
308-
except OSError as exc:
309-
logger.warning(msg=f"Failed to delete data file {path}", exc_info=exc)
308+
except OSError:
309+
logger.warning(f"Failed to delete data file {path}", exc_info=logger.isEnabledFor(logging.DEBUG))
310310
deleted_files[path] = True
311311

312312

@@ -319,8 +319,8 @@ def _import_catalog(name: str, catalog_impl: str, properties: Properties) -> Cat
319319
module = importlib.import_module(module_name)
320320
class_ = getattr(module, class_name)
321321
return class_(name, **properties)
322-
except ModuleNotFoundError as exc:
323-
logger.warning(f"Could not initialize Catalog: {catalog_impl}", exc_info=exc)
322+
except ModuleNotFoundError:
323+
logger.warning(f"Could not initialize Catalog: {catalog_impl}", exc_info=logger.isEnabledFor(logging.DEBUG))
324324
return None
325325

326326

0 commit comments

Comments
 (0)