Skip to content

Commit 6a57afc

Browse files
committed
feat: move setup.py to pyproject.toml and update makefile
1 parent 497b1ac commit 6a57afc

8 files changed

Lines changed: 138 additions & 120 deletions

Makefile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,10 @@ package:
2929
twine check dist/*
3030

3131
install:
32-
pip install -e .[notebook]
32+
pip install -e ".[notebook]"
3333

3434
install-docs: install ### Installs regular and docs dependencies
35-
pip install -r requirements-docs.txt
35+
pip install -e ".[docs]"
3636

3737
install-spark-ci:
3838
sudo apt-get update

pyproject.toml

Lines changed: 135 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,135 @@
1+
[build-system]
2+
requires = ["setuptools"]
3+
build-backend = "setuptools.build_meta"
4+
5+
[project]
6+
name = "ydata-profiling"
7+
authors = [
8+
{name = "YData Labs Inc", email = "opensource@ydata.ai"},
9+
]
10+
description="Generate profile report for pandas DataFrame"
11+
readme = "README.md"
12+
requires-python=">=3.7, <3.13"
13+
keywords=["pandas", "data-science", "data-analysis", "python", "jupyter", "ipython"]
14+
license = {text = "MIT"}
15+
classifiers=[
16+
"Development Status :: 5 - Production/Stable",
17+
"Topic :: Software Development :: Build Tools",
18+
"License :: OSI Approved :: MIT License",
19+
"Environment :: Console",
20+
"Operating System :: OS Independent",
21+
"Intended Audience :: Science/Research",
22+
"Intended Audience :: Developers",
23+
"Intended Audience :: Financial and Insurance Industry",
24+
"Intended Audience :: Healthcare Industry",
25+
"Topic :: Scientific/Engineering",
26+
"Framework :: IPython",
27+
"Programming Language :: Python :: 3",
28+
"Programming Language :: Python :: 3.7",
29+
"Programming Language :: Python :: 3.8",
30+
"Programming Language :: Python :: 3.9",
31+
"Programming Language :: Python :: 3.10",
32+
"Programming Language :: Python :: 3.11",
33+
"Programming Language :: Python :: 3.12",
34+
]
35+
36+
dependencies = [
37+
"scipy>=1.4.1, <1.14",
38+
"pandas>1.1, <2.0, !=1.4.0",
39+
"matplotlib>=3.5, <=3.10",
40+
"pydantic>=2",
41+
"PyYAML>=5.0.0, <6.1",
42+
"jinja2>=2.11.1, <3.2",
43+
"visions[type_image_path]>=0.7.5, <0.7.7",
44+
"numpy>=1.16.0,<2.2",
45+
# Could be optional
46+
# Related to HTML report
47+
"htmlmin==0.1.12",
48+
# Correlations
49+
"phik>=0.11.1,<0.13",
50+
# Examples
51+
"requests>=2.24.0, <3",
52+
# Progress bar
53+
"tqdm>=4.48.2, <5",
54+
"seaborn>=0.10.1, <0.14",
55+
"multimethod>=1.4, <2",
56+
# metrics
57+
"statsmodels>=0.13.2, <1",
58+
# type checking
59+
"typeguard>=3, <5",
60+
"imagehash==4.3.1",
61+
"wordcloud>=1.9.3",
62+
"dacite>=1.8",
63+
"numba>=0.56.0, <1",
64+
]
65+
66+
dynamic = ["version"]
67+
68+
[project.optional-dependencies]
69+
# dependencies for development and testing
70+
dev = [
71+
"black>=20.8b1",
72+
"isort>=5.0.7",
73+
"pre-commit>=2.8.2",
74+
"virtualenv>=20.0.33",
75+
"twine",
76+
"wheel",
77+
"myst-parser>=0.18.1",
78+
"sphinx_rtd_theme>=0.4.3",
79+
"sphinx-autodoc-typehints>=1.10.3",
80+
"sphinx-multiversion>=0.2.3",
81+
"autodoc_pydantic",
82+
]
83+
# this provides the recommended pyspark and pyarrow versions for spark to work on pandas-profiling
84+
# note that if you are using pyspark 2.3 or 2.4 and pyarrow >= 0.15, you might need to
85+
# set ARROW_PRE_0_15_IPC_FORMAT=1 in your conf/spark-env.sh for toPandas functions to work properly
86+
spark = [
87+
"pyspark>=2.3.0",
88+
"pyarrow>=2.0.0",
89+
"pandas>1.1, <2, !=1.4.0",
90+
"numpy>=1.16.0,<1.24",
91+
"visions[type_image_path]==0.7.5",
92+
]
93+
test = [
94+
"pytest",
95+
"coverage>=6.5, <8",
96+
"codecov",
97+
"pytest-cov",
98+
"pytest-spark",
99+
"nbval",
100+
"pyarrow",
101+
"twine>=3.1.1",
102+
"kaggle",
103+
]
104+
notebook = [
105+
"jupyter>=1.0.0",
106+
"ipywidgets>=7.5.1",
107+
]
108+
docs = [
109+
"mkdocs>=1.6.0,<1.7.0",
110+
"mkdocs-material>=9.0.12,<10.0.0",
111+
"mkdocs-material-extensions>=1.1.1,<2.0.0",
112+
"mkdocs-table-reader-plugin<=2.2.0",
113+
"mike>=2.1.1,<2.2.0",
114+
"mkdocstrings[python]>=0.20.0,<1.0.0",
115+
"mkdocs-badges",
116+
]
117+
unicode= [
118+
"tangled-up-in-unicode==0.2.0",
119+
]
120+
121+
[tool.setuptools.packages.find]
122+
where = ["src"]
123+
124+
[tool.setuptools.package-data]
125+
ydata_profiling = ["py.typed"]
126+
127+
[tool.setuptools]
128+
include-package-data = true
129+
130+
[project.scripts]
131+
ydata_profiling = "ydata_profiling.controller.console:main"
132+
pandas_profiling = "ydata_profiling.controller.console:main"
133+
134+
[project.urls]
135+
homepage = "https://github.com/ydataai/ydata-profiling"

requirements-dev.txt

Lines changed: 0 additions & 11 deletions
This file was deleted.

requirements-docs.txt

Lines changed: 0 additions & 7 deletions
This file was deleted.

requirements-spark.txt

Lines changed: 0 additions & 9 deletions
This file was deleted.

requirements-test.txt

Lines changed: 0 additions & 9 deletions
This file was deleted.

requirements.txt

Lines changed: 0 additions & 27 deletions
This file was deleted.

setup.py

Lines changed: 1 addition & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,12 @@
11
from pathlib import Path
22

3-
from setuptools import find_packages, setup
3+
from setuptools import setup
44

55
# Read the contents of README file
66
source_root = Path(".")
77
with (source_root / "README.md").open(encoding="utf-8") as f:
88
long_description = f.read()
99

10-
# Read the requirements
11-
with (source_root / "requirements.txt").open(encoding="utf8") as f:
12-
requirements = f.readlines()
13-
1410
try:
1511
version = (source_root / "VERSION").read_text().rstrip("\n")
1612
except FileNotFoundError:
@@ -20,58 +16,8 @@
2016
version_file.write(f"__version__ = '{version}'")
2117

2218
setup(
23-
name="ydata-profiling",
2419
version=version,
25-
author="YData Labs Inc",
26-
author_email="opensource@ydata.ai",
27-
packages=find_packages("src"),
28-
package_dir={"": "src"},
29-
url="https://github.com/ydataai/ydata-profiling",
30-
license="MIT",
31-
description="Generate profile report for pandas DataFrame",
32-
python_requires=">=3.7, <3.13",
33-
install_requires=requirements,
34-
extras_require={
35-
"notebook": [
36-
"jupyter>=1.0.0",
37-
"ipywidgets>=7.5.1",
38-
],
39-
"unicode": [
40-
"tangled-up-in-unicode==0.2.0",
41-
],
42-
},
43-
package_data={
44-
"ydata_profiling": ["py.typed"],
45-
},
46-
include_package_data=True,
47-
classifiers=[
48-
"Development Status :: 5 - Production/Stable",
49-
"Topic :: Software Development :: Build Tools",
50-
"License :: OSI Approved :: MIT License",
51-
"Environment :: Console",
52-
"Operating System :: OS Independent",
53-
"Intended Audience :: Science/Research",
54-
"Intended Audience :: Developers",
55-
"Intended Audience :: Financial and Insurance Industry",
56-
"Intended Audience :: Healthcare Industry",
57-
"Topic :: Scientific/Engineering",
58-
"Framework :: IPython",
59-
"Programming Language :: Python :: 3",
60-
"Programming Language :: Python :: 3.7",
61-
"Programming Language :: Python :: 3.8",
62-
"Programming Language :: Python :: 3.9",
63-
"Programming Language :: Python :: 3.10",
64-
"Programming Language :: Python :: 3.11",
65-
"Programming Language :: Python :: 3.12",
66-
],
67-
keywords="pandas data-science data-analysis python jupyter ipython",
6820
long_description=long_description,
6921
long_description_content_type="text/markdown",
70-
entry_points={
71-
"console_scripts": [
72-
"ydata_profiling = ydata_profiling.controller.console:main",
73-
"pandas_profiling = ydata_profiling.controller.console:main",
74-
]
75-
},
7622
options={"bdist_wheel": {"universal": True}},
7723
)

0 commit comments

Comments
 (0)