Remove hardcoded libpython binaries and add debug step
All checks were successful
build / build-linux (push) Successful in 16s

This commit is contained in:
kdusek
2025-12-07 23:15:18 +01:00
parent 308ce7768e
commit 6a1fe63684
1807 changed files with 172293 additions and 1 deletions

View File

@@ -0,0 +1,13 @@
import locale
import sys
import pytest
__all__ = ['fail_on_ascii']
if sys.version_info >= (3, 11):
locale_encoding = locale.getencoding()
else:
locale_encoding = locale.getpreferredencoding(False)
is_ascii = locale_encoding == 'ANSI_X3.4-1968'
fail_on_ascii = pytest.mark.xfail(is_ascii, reason="Test fails in this locale")

View File

@@ -0,0 +1,3 @@
from jaraco.test.cpython import from_test_support, try_import
os_helper = try_import('os_helper') or from_test_support('can_symlink')

View File

@@ -0,0 +1,59 @@
from __future__ import annotations
import re
import time
from pathlib import Path
from urllib.error import HTTPError
from urllib.request import urlopen
__all__ = ["DOWNLOAD_DIR", "retrieve_file", "output_file", "urls_from_file"]
NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/")
DOWNLOAD_DIR = Path(__file__).parent
# ----------------------------------------------------------------------
# Please update ./preload.py accordingly when modifying this file
# ----------------------------------------------------------------------
def output_file(url: str, download_dir: Path = DOWNLOAD_DIR) -> Path:
file_name = url.strip()
for part in NAME_REMOVE:
file_name = file_name.replace(part, '').strip().strip('/:').strip()
return Path(download_dir, re.sub(r"[^\-_\.\w\d]+", "_", file_name))
def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5) -> Path:
path = output_file(url, download_dir)
if path.exists():
print(f"Skipping {url} (already exists: {path})")
else:
download_dir.mkdir(exist_ok=True, parents=True)
print(f"Downloading {url} to {path}")
try:
download(url, path)
except HTTPError:
time.sleep(wait) # wait a few seconds and try again.
download(url, path)
return path
def urls_from_file(list_file: Path) -> list[str]:
"""``list_file`` should be a text file where each line corresponds to a URL to
download.
"""
print(f"file: {list_file}")
content = list_file.read_text(encoding="utf-8")
return [url for url in content.splitlines() if not url.startswith("#")]
def download(url: str, dest: Path):
with urlopen(url) as f:
data = f.read()
with open(dest, "wb") as f:
f.write(data)
assert Path(dest).exists()

View File

@@ -0,0 +1,18 @@
"""This file can be used to preload files needed for testing.
For example you can use::
cd setuptools/tests/config
python -m downloads.preload setupcfg_examples.txt
to make sure the `setup.cfg` examples are downloaded before starting the tests.
"""
import sys
from pathlib import Path
from . import retrieve_file, urls_from_file
if __name__ == "__main__":
urls = urls_from_file(Path(sys.argv[1]))
list(map(retrieve_file, urls))

View File

@@ -0,0 +1,22 @@
# ====================================================================
# Some popular packages that use setup.cfg (and others not so popular)
# Reference: https://hugovk.github.io/top-pypi-packages/
# ====================================================================
https://github.com/pypa/setuptools/raw/52c990172fec37766b3566679724aa8bf70ae06d/setup.cfg
https://github.com/pypa/wheel/raw/0acd203cd896afec7f715aa2ff5980a403459a3b/setup.cfg
https://github.com/python/importlib_metadata/raw/2f05392ca980952a6960d82b2f2d2ea10aa53239/setup.cfg
https://github.com/jaraco/skeleton/raw/d9008b5c510cd6969127a6a2ab6f832edddef296/setup.cfg
https://github.com/jaraco/zipp/raw/700d3a96390e970b6b962823bfea78b4f7e1c537/setup.cfg
https://github.com/pallets/jinja/raw/7d72eb7fefb7dce065193967f31f805180508448/setup.cfg
https://github.com/tkem/cachetools/raw/2fd87a94b8d3861d80e9e4236cd480bfdd21c90d/setup.cfg
https://github.com/aio-libs/aiohttp/raw/5e0e6b7080f2408d5f1dd544c0e1cf88378b7b10/setup.cfg
https://github.com/pallets/flask/raw/9486b6cf57bd6a8a261f67091aca8ca78eeec1e3/setup.cfg
https://github.com/pallets/click/raw/6411f425fae545f42795665af4162006b36c5e4a/setup.cfg
https://github.com/sqlalchemy/sqlalchemy/raw/533f5718904b620be8d63f2474229945d6f8ba5d/setup.cfg
https://github.com/pytest-dev/pluggy/raw/461ef63291d13589c4e21aa182cd1529257e9a0a/setup.cfg
https://github.com/pytest-dev/pytest/raw/c7be96dae487edbd2f55b561b31b68afac1dabe6/setup.cfg
https://github.com/platformdirs/platformdirs/raw/7b7852128dd6f07511b618d6edea35046bd0c6ff/setup.cfg
https://github.com/pandas-dev/pandas/raw/bc17343f934a33dc231c8c74be95d8365537c376/setup.cfg
https://github.com/django/django/raw/4e249d11a6e56ca8feb4b055b681cec457ef3a3d/setup.cfg
https://github.com/pyscaffold/pyscaffold/raw/de7aa5dc059fbd04307419c667cc4961bc9df4b8/setup.cfg
https://github.com/pypa/virtualenv/raw/f92eda6e3da26a4d28c2663ffb85c4960bdb990c/setup.cfg

View File

@@ -0,0 +1,772 @@
"""Make sure that applying the configuration from pyproject.toml is equivalent to
applying a similar configuration from setup.cfg
To run these tests offline, please have a look on ``./downloads/preload.py``
"""
from __future__ import annotations
import io
import re
import tarfile
from inspect import cleandoc
from pathlib import Path
from unittest.mock import Mock
import pytest
from ini2toml.api import LiteTranslator
from packaging.metadata import Metadata
import setuptools # noqa: F401 # ensure monkey patch to metadata
from setuptools._static import is_static
from setuptools.command.egg_info import write_requirements
from setuptools.config import expand, pyprojecttoml, setupcfg
from setuptools.config._apply_pyprojecttoml import _MissingDynamic, _some_attrgetter
from setuptools.dist import Distribution
from setuptools.errors import InvalidConfigError, RemovedConfigError
from setuptools.warnings import InformationOnly, SetuptoolsDeprecationWarning
from .downloads import retrieve_file, urls_from_file
HERE = Path(__file__).parent
EXAMPLES_FILE = "setupcfg_examples.txt"
def makedist(path, **attrs):
return Distribution({"src_root": path, **attrs})
def _mock_expand_patterns(patterns, *_, **__):
"""
Allow comparing the given patterns for 2 dist objects.
We need to strip special chars to avoid errors when validating.
"""
return [re.sub("[^a-z0-9]+", "", p, flags=re.I) or "empty" for p in patterns]
@pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE))
@pytest.mark.filterwarnings("ignore")
@pytest.mark.uses_network
def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path):
monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.0.1"))
monkeypatch.setattr(
Distribution, "_expand_patterns", Mock(side_effect=_mock_expand_patterns)
)
setupcfg_example = retrieve_file(url)
pyproject_example = Path(tmp_path, "pyproject.toml")
setupcfg_text = setupcfg_example.read_text(encoding="utf-8")
toml_config = LiteTranslator().translate(setupcfg_text, "setup.cfg")
pyproject_example.write_text(toml_config, encoding="utf-8")
dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example)
dist_cfg = setupcfg.apply_configuration(makedist(tmp_path), setupcfg_example)
pkg_info_toml = core_metadata(dist_toml)
pkg_info_cfg = core_metadata(dist_cfg)
assert pkg_info_toml == pkg_info_cfg
if any(getattr(d, "license_files", None) for d in (dist_toml, dist_cfg)):
assert set(dist_toml.license_files) == set(dist_cfg.license_files)
if any(getattr(d, "entry_points", None) for d in (dist_toml, dist_cfg)):
print(dist_cfg.entry_points)
ep_toml = {
(k, *sorted(i.replace(" ", "") for i in v))
for k, v in dist_toml.entry_points.items()
}
ep_cfg = {
(k, *sorted(i.replace(" ", "") for i in v))
for k, v in dist_cfg.entry_points.items()
}
assert ep_toml == ep_cfg
if any(getattr(d, "package_data", None) for d in (dist_toml, dist_cfg)):
pkg_data_toml = {(k, *sorted(v)) for k, v in dist_toml.package_data.items()}
pkg_data_cfg = {(k, *sorted(v)) for k, v in dist_cfg.package_data.items()}
assert pkg_data_toml == pkg_data_cfg
if any(getattr(d, "data_files", None) for d in (dist_toml, dist_cfg)):
data_files_toml = {(k, *sorted(v)) for k, v in dist_toml.data_files}
data_files_cfg = {(k, *sorted(v)) for k, v in dist_cfg.data_files}
assert data_files_toml == data_files_cfg
assert set(dist_toml.install_requires) == set(dist_cfg.install_requires)
if any(getattr(d, "extras_require", None) for d in (dist_toml, dist_cfg)):
extra_req_toml = {(k, *sorted(v)) for k, v in dist_toml.extras_require.items()}
extra_req_cfg = {(k, *sorted(v)) for k, v in dist_cfg.extras_require.items()}
assert extra_req_toml == extra_req_cfg
PEP621_EXAMPLE = """\
[project]
name = "spam"
version = "2020.0.0"
description = "Lovely Spam! Wonderful Spam!"
readme = "README.rst"
requires-python = ">=3.8"
license-files = ["LICENSE.txt"] # Updated to be PEP 639 compliant
keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"]
authors = [
{email = "hi@pradyunsg.me"},
{name = "Tzu-Ping Chung"}
]
maintainers = [
{name = "Brett Cannon", email = "brett@python.org"},
{name = "John X. Ãørçeč", email = "john@utf8.org"},
{name = "Γαμα קּ 東", email = "gama@utf8.org"},
]
classifiers = [
"Development Status :: 4 - Beta",
"Programming Language :: Python"
]
dependencies = [
"httpx",
"gidgethub[httpx]>4.0.0",
"django>2.1; os_name != 'nt'",
"django>2.0; os_name == 'nt'"
]
[project.optional-dependencies]
test = [
"pytest < 5.0.0",
"pytest-cov[all]"
]
[project.urls]
homepage = "http://example.com"
documentation = "http://readthedocs.org"
repository = "http://github.com"
changelog = "http://github.com/me/spam/blob/master/CHANGELOG.md"
[project.scripts]
spam-cli = "spam:main_cli"
[project.gui-scripts]
spam-gui = "spam:main_gui"
[project.entry-points."spam.magical"]
tomatoes = "spam:main_tomatoes"
"""
PEP621_INTERNATIONAL_EMAIL_EXAMPLE = """\
[project]
name = "spam"
version = "2020.0.0"
authors = [
{email = "hi@pradyunsg.me"},
{name = "Tzu-Ping Chung"}
]
maintainers = [
{name = "Степан Бандера", email = "криївка@оун-упа.укр"},
]
"""
PEP621_EXAMPLE_SCRIPT = """
def main_cli(): pass
def main_gui(): pass
def main_tomatoes(): pass
"""
PEP639_LICENSE_TEXT = """\
[project]
name = "spam"
version = "2020.0.0"
authors = [
{email = "hi@pradyunsg.me"},
{name = "Tzu-Ping Chung"}
]
license = {text = "MIT"}
"""
PEP639_LICENSE_EXPRESSION = """\
[project]
name = "spam"
version = "2020.0.0"
authors = [
{email = "hi@pradyunsg.me"},
{name = "Tzu-Ping Chung"}
]
license = "mit or apache-2.0" # should be normalized in metadata
classifiers = [
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
]
"""
def _pep621_example_project(
tmp_path,
readme="README.rst",
pyproject_text=PEP621_EXAMPLE,
):
pyproject = tmp_path / "pyproject.toml"
text = pyproject_text
replacements = {'readme = "README.rst"': f'readme = "{readme}"'}
for orig, subst in replacements.items():
text = text.replace(orig, subst)
pyproject.write_text(text, encoding="utf-8")
(tmp_path / readme).write_text("hello world", encoding="utf-8")
(tmp_path / "LICENSE.txt").write_text("--- LICENSE stub ---", encoding="utf-8")
(tmp_path / "spam.py").write_text(PEP621_EXAMPLE_SCRIPT, encoding="utf-8")
return pyproject
def test_pep621_example(tmp_path):
"""Make sure the example in PEP 621 works"""
pyproject = _pep621_example_project(tmp_path)
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert set(dist.metadata.license_files) == {"LICENSE.txt"}
@pytest.mark.parametrize(
("readme", "ctype"),
[
("Readme.txt", "text/plain"),
("readme.md", "text/markdown"),
("text.rst", "text/x-rst"),
],
)
def test_readme_content_type(tmp_path, readme, ctype):
pyproject = _pep621_example_project(tmp_path, readme)
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert dist.metadata.long_description_content_type == ctype
def test_undefined_content_type(tmp_path):
pyproject = _pep621_example_project(tmp_path, "README.tex")
with pytest.raises(ValueError, match="Undefined content type for README.tex"):
pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
def test_no_explicit_content_type_for_missing_extension(tmp_path):
pyproject = _pep621_example_project(tmp_path, "README")
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert dist.metadata.long_description_content_type is None
@pytest.mark.parametrize(
("pyproject_text", "expected_maintainers_meta_value"),
(
pytest.param(
PEP621_EXAMPLE,
(
'Brett Cannon <brett@python.org>, "John X. Ãørçeč" <john@utf8.org>, '
'Γαμα קּ 東 <gama@utf8.org>'
),
id='non-international-emails',
),
pytest.param(
PEP621_INTERNATIONAL_EMAIL_EXAMPLE,
'Степан Бандера <криївка@оун-упа.укр>',
marks=pytest.mark.xfail(
reason="CPython's `email.headerregistry.Address` only supports "
'RFC 5322, as of Nov 10, 2022 and latest Python 3.11.0',
strict=True,
),
id='international-email',
),
),
)
def test_utf8_maintainer_in_metadata( # issue-3663
expected_maintainers_meta_value,
pyproject_text,
tmp_path,
):
pyproject = _pep621_example_project(
tmp_path,
"README",
pyproject_text=pyproject_text,
)
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert dist.metadata.maintainer_email == expected_maintainers_meta_value
pkg_file = tmp_path / "PKG-FILE"
with open(pkg_file, "w", encoding="utf-8") as fh:
dist.metadata.write_pkg_file(fh)
content = pkg_file.read_text(encoding="utf-8")
assert f"Maintainer-email: {expected_maintainers_meta_value}" in content
@pytest.mark.parametrize(
(
'pyproject_text',
'license',
'license_expression',
'content_str',
'not_content_str',
),
(
pytest.param(
PEP639_LICENSE_TEXT,
'MIT',
None,
'License: MIT',
'License-Expression: ',
id='license-text',
marks=[
pytest.mark.filterwarnings(
"ignore:.project.license. as a TOML table is deprecated",
)
],
),
pytest.param(
PEP639_LICENSE_EXPRESSION,
None,
'MIT OR Apache-2.0',
'License-Expression: MIT OR Apache-2.0',
'License: ',
id='license-expression',
),
),
)
def test_license_in_metadata(
license,
license_expression,
content_str,
not_content_str,
pyproject_text,
tmp_path,
):
pyproject = _pep621_example_project(
tmp_path,
"README",
pyproject_text=pyproject_text,
)
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert dist.metadata.license == license
assert dist.metadata.license_expression == license_expression
pkg_file = tmp_path / "PKG-FILE"
with open(pkg_file, "w", encoding="utf-8") as fh:
dist.metadata.write_pkg_file(fh)
content = pkg_file.read_text(encoding="utf-8")
assert "Metadata-Version: 2.4" in content
assert content_str in content
assert not_content_str not in content
def test_license_classifier_with_license_expression(tmp_path):
text = PEP639_LICENSE_EXPRESSION.rsplit("\n", 2)[0]
pyproject = _pep621_example_project(
tmp_path,
"README",
f"{text}\n \"License :: OSI Approved :: MIT License\"\n]",
)
msg = "License classifiers have been superseded by license expressions"
with pytest.raises(InvalidConfigError, match=msg) as exc:
pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert "License :: OSI Approved :: MIT License" in str(exc.value)
def test_license_classifier_without_license_expression(tmp_path):
text = """\
[project]
name = "spam"
version = "2020.0.0"
license = {text = "mit or apache-2.0"}
classifiers = ["License :: OSI Approved :: MIT License"]
"""
pyproject = _pep621_example_project(tmp_path, "README", text)
msg1 = "License classifiers are deprecated(?:.|\n)*MIT License"
msg2 = ".project.license. as a TOML table is deprecated"
with (
pytest.warns(SetuptoolsDeprecationWarning, match=msg1),
pytest.warns(SetuptoolsDeprecationWarning, match=msg2),
):
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
# Check license classifier is still included
assert dist.metadata.get_classifiers() == ["License :: OSI Approved :: MIT License"]
class TestLicenseFiles:
def base_pyproject(
self,
tmp_path,
additional_text="",
license_toml='license = {file = "LICENSE.txt"}\n',
):
text = PEP639_LICENSE_EXPRESSION
# Sanity-check
assert 'license = "mit or apache-2.0"' in text
assert 'license-files' not in text
assert "[tool.setuptools]" not in text
text = re.sub(
r"(license = .*)\n",
license_toml,
text,
count=1,
)
assert license_toml in text # sanity check
text = f"{text}\n{additional_text}\n"
pyproject = _pep621_example_project(tmp_path, "README", pyproject_text=text)
return pyproject
def base_pyproject_license_pep639(self, tmp_path, additional_text=""):
return self.base_pyproject(
tmp_path,
additional_text=additional_text,
license_toml='license = "licenseref-Proprietary"'
'\nlicense-files = ["_FILE*"]\n',
)
def test_both_license_and_license_files_defined(self, tmp_path):
setuptools_config = '[tool.setuptools]\nlicense-files = ["_FILE*"]'
pyproject = self.base_pyproject(tmp_path, setuptools_config)
(tmp_path / "_FILE.txt").touch()
(tmp_path / "_FILE.rst").touch()
# Would normally match the `license_files` patterns, but we want to exclude it
# by being explicit. On the other hand, contents should be added to `license`
license = tmp_path / "LICENSE.txt"
license.write_text("LicenseRef-Proprietary\n", encoding="utf-8")
msg1 = "'tool.setuptools.license-files' is deprecated in favor of 'project.license-files'"
msg2 = ".project.license. as a TOML table is deprecated"
with (
pytest.warns(SetuptoolsDeprecationWarning, match=msg1),
pytest.warns(SetuptoolsDeprecationWarning, match=msg2),
):
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
assert dist.metadata.license == "LicenseRef-Proprietary\n"
def test_both_license_and_license_files_defined_pep639(self, tmp_path):
# Set license and license-files
pyproject = self.base_pyproject_license_pep639(tmp_path)
(tmp_path / "_FILE.txt").touch()
(tmp_path / "_FILE.rst").touch()
msg = "Normalizing.*LicenseRef"
with pytest.warns(InformationOnly, match=msg):
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"}
assert dist.metadata.license is None
assert dist.metadata.license_expression == "LicenseRef-Proprietary"
def test_license_files_defined_twice(self, tmp_path):
# Set project.license-files and tools.setuptools.license-files
setuptools_config = '[tool.setuptools]\nlicense-files = ["_FILE*"]'
pyproject = self.base_pyproject_license_pep639(tmp_path, setuptools_config)
msg = "'project.license-files' is defined already. Remove 'tool.setuptools.license-files'"
with pytest.raises(InvalidConfigError, match=msg):
pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
def test_default_patterns(self, tmp_path):
setuptools_config = '[tool.setuptools]\nzip-safe = false'
# ^ used just to trigger section validation
pyproject = self.base_pyproject(tmp_path, setuptools_config, license_toml="")
license_files = "LICENCE-a.html COPYING-abc.txt AUTHORS-xyz NOTICE,def".split()
for fname in license_files:
(tmp_path / fname).write_text(f"{fname}\n", encoding="utf-8")
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert (tmp_path / "LICENSE.txt").exists() # from base example
assert set(dist.metadata.license_files) == {*license_files, "LICENSE.txt"}
def test_missing_patterns(self, tmp_path):
pyproject = self.base_pyproject_license_pep639(tmp_path)
assert list(tmp_path.glob("_FILE*")) == [] # sanity check
msg1 = "Cannot find any files for the given pattern.*"
msg2 = "Normalizing 'licenseref-Proprietary' to 'LicenseRef-Proprietary'"
with (
pytest.warns(SetuptoolsDeprecationWarning, match=msg1),
pytest.warns(InformationOnly, match=msg2),
):
pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
def test_deprecated_file_expands_to_text(self, tmp_path):
"""Make sure the old example with ``license = {text = ...}`` works"""
assert 'license-files = ["LICENSE.txt"]' in PEP621_EXAMPLE # sanity check
text = PEP621_EXAMPLE.replace(
'license-files = ["LICENSE.txt"]',
'license = {file = "LICENSE.txt"}',
)
pyproject = _pep621_example_project(tmp_path, pyproject_text=text)
msg = ".project.license. as a TOML table is deprecated"
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
assert dist.metadata.license == "--- LICENSE stub ---"
assert set(dist.metadata.license_files) == {"LICENSE.txt"} # auto-filled
class TestPyModules:
# https://github.com/pypa/setuptools/issues/4316
def dist(self, name):
toml_config = f"""
[project]
name = "test"
version = "42.0"
[tool.setuptools]
py-modules = [{name!r}]
"""
pyproject = Path("pyproject.toml")
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
return pyprojecttoml.apply_configuration(Distribution({}), pyproject)
@pytest.mark.parametrize("module", ["pip-run", "abc-d.λ-xyz-e"])
def test_valid_module_name(self, tmp_path, monkeypatch, module):
monkeypatch.chdir(tmp_path)
assert module in self.dist(module).py_modules
@pytest.mark.parametrize("module", ["pip run", "-pip-run", "pip-run-stubs"])
def test_invalid_module_name(self, tmp_path, monkeypatch, module):
monkeypatch.chdir(tmp_path)
with pytest.raises(ValueError, match="py-modules"):
self.dist(module).py_modules
class TestExtModules:
def test_pyproject_sets_attribute(self, tmp_path, monkeypatch):
monkeypatch.chdir(tmp_path)
pyproject = Path("pyproject.toml")
toml_config = """
[project]
name = "test"
version = "42.0"
[tool.setuptools]
ext-modules = [
{name = "my.ext", sources = ["hello.c", "world.c"]}
]
"""
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
with pytest.warns(pyprojecttoml._ExperimentalConfiguration):
dist = pyprojecttoml.apply_configuration(Distribution({}), pyproject)
assert len(dist.ext_modules) == 1
assert dist.ext_modules[0].name == "my.ext"
assert set(dist.ext_modules[0].sources) == {"hello.c", "world.c"}
class TestDeprecatedFields:
def test_namespace_packages(self, tmp_path):
pyproject = tmp_path / "pyproject.toml"
config = """
[project]
name = "myproj"
version = "42"
[tool.setuptools]
namespace-packages = ["myproj.pkg"]
"""
pyproject.write_text(cleandoc(config), encoding="utf-8")
with pytest.raises(RemovedConfigError, match="namespace-packages"):
pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject)
class TestPresetField:
def pyproject(self, tmp_path, dynamic, extra_content=""):
content = f"[project]\nname = 'proj'\ndynamic = {dynamic!r}\n"
if "version" not in dynamic:
content += "version = '42'\n"
file = tmp_path / "pyproject.toml"
file.write_text(content + extra_content, encoding="utf-8")
return file
@pytest.mark.parametrize(
("attr", "field", "value"),
[
("license_expression", "license", "MIT"),
pytest.param(
*("license", "license", "Not SPDX"),
marks=[pytest.mark.filterwarnings("ignore:.*license. overwritten")],
),
("classifiers", "classifiers", ["Private :: Classifier"]),
("entry_points", "scripts", {"console_scripts": ["foobar=foobar:main"]}),
("entry_points", "gui-scripts", {"gui_scripts": ["bazquux=bazquux:main"]}),
pytest.param(
*("install_requires", "dependencies", ["six"]),
marks=[
pytest.mark.filterwarnings("ignore:.*install_requires. overwritten")
],
),
],
)
def test_not_listed_in_dynamic(self, tmp_path, attr, field, value):
"""Setuptools cannot set a field if not listed in ``dynamic``"""
pyproject = self.pyproject(tmp_path, [])
dist = makedist(tmp_path, **{attr: value})
msg = re.compile(f"defined outside of `pyproject.toml`:.*{field}", re.S)
with pytest.warns(_MissingDynamic, match=msg):
dist = pyprojecttoml.apply_configuration(dist, pyproject)
dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
assert not dist_value
@pytest.mark.parametrize(
("attr", "field", "value"),
[
("license_expression", "license", "MIT"),
("install_requires", "dependencies", []),
("extras_require", "optional-dependencies", {}),
("install_requires", "dependencies", ["six"]),
("classifiers", "classifiers", ["Private :: Classifier"]),
],
)
def test_listed_in_dynamic(self, tmp_path, attr, field, value):
pyproject = self.pyproject(tmp_path, [field])
dist = makedist(tmp_path, **{attr: value})
dist = pyprojecttoml.apply_configuration(dist, pyproject)
dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist)
assert dist_value == value
def test_license_files_exempt_from_dynamic(self, monkeypatch, tmp_path):
"""
license-file is currently not considered in the context of dynamic.
As per 2025-02-19, https://packaging.python.org/en/latest/specifications/pyproject-toml/#license-files
allows setuptools to fill-in `license-files` the way it sees fit:
> If the license-files key is not defined, tools can decide how to handle license files.
> For example they can choose not to include any files or use their own
> logic to discover the appropriate files in the distribution.
Using license_files from setup.py to fill-in the value is in accordance
with this rule.
"""
monkeypatch.chdir(tmp_path)
pyproject = self.pyproject(tmp_path, [])
dist = makedist(tmp_path, license_files=["LIC*"])
(tmp_path / "LIC1").write_text("42", encoding="utf-8")
dist = pyprojecttoml.apply_configuration(dist, pyproject)
assert dist.metadata.license_files == ["LIC1"]
def test_warning_overwritten_dependencies(self, tmp_path):
src = "[project]\nname='pkg'\nversion='0.1'\ndependencies=['click']\n"
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(src, encoding="utf-8")
dist = makedist(tmp_path, install_requires=["wheel"])
with pytest.warns(match="`install_requires` overwritten"):
dist = pyprojecttoml.apply_configuration(dist, pyproject)
assert "wheel" not in dist.install_requires
def test_optional_dependencies_dont_remove_env_markers(self, tmp_path):
"""
Internally setuptools converts dependencies with markers to "extras".
If ``install_requires`` is given by ``setup.py``, we have to ensure that
applying ``optional-dependencies`` does not overwrite the mandatory
dependencies with markers (see #3204).
"""
# If setuptools replace its internal mechanism that uses `requires.txt`
# this test has to be rewritten to adapt accordingly
extra = "\n[project.optional-dependencies]\nfoo = ['bar>1']\n"
pyproject = self.pyproject(tmp_path, ["dependencies"], extra)
install_req = ['importlib-resources (>=3.0.0) ; python_version < "3.7"']
dist = makedist(tmp_path, install_requires=install_req)
dist = pyprojecttoml.apply_configuration(dist, pyproject)
assert "foo" in dist.extras_require
egg_info = dist.get_command_obj("egg_info")
write_requirements(egg_info, tmp_path, tmp_path / "requires.txt")
reqs = (tmp_path / "requires.txt").read_text(encoding="utf-8")
assert "importlib-resources" in reqs
assert "bar" in reqs
assert ':python_version < "3.7"' in reqs
@pytest.mark.parametrize(
("field", "group"),
[("scripts", "console_scripts"), ("gui-scripts", "gui_scripts")],
)
@pytest.mark.filterwarnings("error")
def test_scripts_dont_require_dynamic_entry_points(self, tmp_path, field, group):
# Issue 3862
pyproject = self.pyproject(tmp_path, [field])
dist = makedist(tmp_path, entry_points={group: ["foobar=foobar:main"]})
dist = pyprojecttoml.apply_configuration(dist, pyproject)
assert group in dist.entry_points
class TestMeta:
def test_example_file_in_sdist(self, setuptools_sdist):
"""Meta test to ensure tests can run from sdist"""
with tarfile.open(setuptools_sdist) as tar:
assert any(name.endswith(EXAMPLES_FILE) for name in tar.getnames())
class TestInteropCommandLineParsing:
def test_version(self, tmp_path, monkeypatch, capsys):
# See pypa/setuptools#4047
# This test can be removed once the CLI interface of setup.py is removed
monkeypatch.chdir(tmp_path)
toml_config = """
[project]
name = "test"
version = "42.0"
"""
pyproject = Path(tmp_path, "pyproject.toml")
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
opts = {"script_args": ["--version"]}
dist = pyprojecttoml.apply_configuration(Distribution(opts), pyproject)
dist.parse_command_line() # <-- there should be no exception here.
captured = capsys.readouterr()
assert "42.0" in captured.out
class TestStaticConfig:
def test_mark_static_fields(self, tmp_path, monkeypatch):
monkeypatch.chdir(tmp_path)
toml_config = """
[project]
name = "test"
version = "42.0"
dependencies = ["hello"]
keywords = ["world"]
classifiers = ["private :: hello world"]
[tool.setuptools]
obsoletes = ["abcd"]
provides = ["abcd"]
platforms = ["abcd"]
"""
pyproject = Path(tmp_path, "pyproject.toml")
pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
dist = pyprojecttoml.apply_configuration(Distribution({}), pyproject)
assert is_static(dist.install_requires)
assert is_static(dist.metadata.keywords)
assert is_static(dist.metadata.classifiers)
assert is_static(dist.metadata.obsoletes)
assert is_static(dist.metadata.provides)
assert is_static(dist.metadata.platforms)
# --- Auxiliary Functions ---
def core_metadata(dist) -> str:
with io.StringIO() as buffer:
dist.metadata.write_pkg_file(buffer)
pkg_file_txt = buffer.getvalue()
# Make sure core metadata is valid
Metadata.from_email(pkg_file_txt, validate=True) # can raise exceptions
skip_prefixes: tuple[str, ...] = ()
skip_lines = set()
# ---- DIFF NORMALISATION ----
# PEP 621 is very particular about author/maintainer metadata conversion, so skip
skip_prefixes += ("Author:", "Author-email:", "Maintainer:", "Maintainer-email:")
# May be redundant with Home-page
skip_prefixes += ("Project-URL: Homepage,", "Home-page:")
# May be missing in original (relying on default) but backfilled in the TOML
skip_prefixes += ("Description-Content-Type:",)
# Remove empty lines
skip_lines.add("")
result = []
for line in pkg_file_txt.splitlines():
if line.startswith(skip_prefixes) or line in skip_lines:
continue
result.append(line + "\n")
return "".join(result)

View File

@@ -0,0 +1,247 @@
import os
import sys
from pathlib import Path
import pytest
from setuptools._static import is_static
from setuptools.config import expand
from setuptools.discovery import find_package_path
from distutils.errors import DistutilsOptionError
def write_files(files, root_dir):
for file, content in files.items():
path = root_dir / file
path.parent.mkdir(exist_ok=True, parents=True)
path.write_text(content, encoding="utf-8")
def test_glob_relative(tmp_path, monkeypatch):
files = {
"dir1/dir2/dir3/file1.txt",
"dir1/dir2/file2.txt",
"dir1/file3.txt",
"a.ini",
"b.ini",
"dir1/c.ini",
"dir1/dir2/a.ini",
}
write_files({k: "" for k in files}, tmp_path)
patterns = ["**/*.txt", "[ab].*", "**/[ac].ini"]
monkeypatch.chdir(tmp_path)
assert set(expand.glob_relative(patterns)) == files
# Make sure the same APIs work outside cwd
assert set(expand.glob_relative(patterns, tmp_path)) == files
def test_read_files(tmp_path, monkeypatch):
dir_ = tmp_path / "dir_"
(tmp_path / "_dir").mkdir(exist_ok=True)
(tmp_path / "a.txt").touch()
files = {"a.txt": "a", "dir1/b.txt": "b", "dir1/dir2/c.txt": "c"}
write_files(files, dir_)
secrets = Path(str(dir_) + "secrets")
secrets.mkdir(exist_ok=True)
write_files({"secrets.txt": "secret keys"}, secrets)
with monkeypatch.context() as m:
m.chdir(dir_)
assert expand.read_files(list(files)) == "a\nb\nc"
cannot_access_msg = r"Cannot access '.*\.\..a\.txt'"
with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
expand.read_files(["../a.txt"])
cannot_access_secrets_msg = r"Cannot access '.*secrets\.txt'"
with pytest.raises(DistutilsOptionError, match=cannot_access_secrets_msg):
expand.read_files(["../dir_secrets/secrets.txt"])
# Make sure the same APIs work outside cwd
assert expand.read_files(list(files), dir_) == "a\nb\nc"
with pytest.raises(DistutilsOptionError, match=cannot_access_msg):
expand.read_files(["../a.txt"], dir_)
class TestReadAttr:
@pytest.mark.parametrize(
"example",
[
# No cookie means UTF-8:
b"__version__ = '\xc3\xa9'\nraise SystemExit(1)\n",
# If a cookie is present, honor it:
b"# -*- coding: utf-8 -*-\n__version__ = '\xc3\xa9'\nraise SystemExit(1)\n",
b"# -*- coding: latin1 -*-\n__version__ = '\xe9'\nraise SystemExit(1)\n",
],
)
def test_read_attr_encoding_cookie(self, example, tmp_path):
(tmp_path / "mod.py").write_bytes(example)
assert expand.read_attr('mod.__version__', root_dir=tmp_path) == 'é'
def test_read_attr(self, tmp_path, monkeypatch):
files = {
"pkg/__init__.py": "",
"pkg/sub/__init__.py": "VERSION = '0.1.1'",
"pkg/sub/mod.py": (
"VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\nraise SystemExit(1)"
),
}
write_files(files, tmp_path)
with monkeypatch.context() as m:
m.chdir(tmp_path)
# Make sure it can read the attr statically without evaluating the module
version = expand.read_attr('pkg.sub.VERSION')
values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'})
assert version == '0.1.1'
assert is_static(values)
assert values['a'] == 0
assert values['b'] == {42}
assert is_static(values)
# Make sure the same APIs work outside cwd
assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1'
values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}, tmp_path)
assert values['c'] == (0, 1, 1)
@pytest.mark.parametrize(
"example",
[
"VERSION: str\nVERSION = '0.1.1'\nraise SystemExit(1)\n",
"VERSION: str = '0.1.1'\nraise SystemExit(1)\n",
],
)
def test_read_annotated_attr(self, tmp_path, example):
files = {
"pkg/__init__.py": "",
"pkg/sub/__init__.py": example,
}
write_files(files, tmp_path)
# Make sure this attribute can be read statically
version = expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path)
assert version == '0.1.1'
assert is_static(version)
@pytest.mark.parametrize(
"example",
[
"VERSION = (lambda: '0.1.1')()\n",
"def fn(): return '0.1.1'\nVERSION = fn()\n",
"VERSION: str = (lambda: '0.1.1')()\n",
],
)
def test_read_dynamic_attr(self, tmp_path, monkeypatch, example):
files = {
"pkg/__init__.py": "",
"pkg/sub/__init__.py": example,
}
write_files(files, tmp_path)
monkeypatch.chdir(tmp_path)
version = expand.read_attr('pkg.sub.VERSION')
assert version == '0.1.1'
assert not is_static(version)
def test_import_order(self, tmp_path):
"""
Sometimes the import machinery will import the parent package of a nested
module, which triggers side-effects and might create problems (see issue #3176)
``read_attr`` should bypass these limitations by resolving modules statically
(via ast.literal_eval).
"""
files = {
"src/pkg/__init__.py": "from .main import func\nfrom .about import version",
"src/pkg/main.py": "import super_complicated_dep\ndef func(): return 42",
"src/pkg/about.py": "version = '42'",
}
write_files(files, tmp_path)
attr_desc = "pkg.about.version"
package_dir = {"": "src"}
# `import super_complicated_dep` should not run, otherwise the build fails
assert expand.read_attr(attr_desc, package_dir, tmp_path) == "42"
@pytest.mark.parametrize(
("package_dir", "file", "module", "return_value"),
[
({"": "src"}, "src/pkg/main.py", "pkg.main", 42),
({"pkg": "lib"}, "lib/main.py", "pkg.main", 13),
({}, "single_module.py", "single_module", 70),
({}, "flat_layout/pkg.py", "flat_layout.pkg", 836),
],
)
def test_resolve_class(monkeypatch, tmp_path, package_dir, file, module, return_value):
monkeypatch.setattr(sys, "modules", {}) # reproducibility
files = {file: f"class Custom:\n def testing(self): return {return_value}"}
write_files(files, tmp_path)
cls = expand.resolve_class(f"{module}.Custom", package_dir, tmp_path)
assert cls().testing() == return_value
@pytest.mark.parametrize(
("args", "pkgs"),
[
({"where": ["."], "namespaces": False}, {"pkg", "other"}),
({"where": [".", "dir1"], "namespaces": False}, {"pkg", "other", "dir2"}),
({"namespaces": True}, {"pkg", "other", "dir1", "dir1.dir2"}),
({}, {"pkg", "other", "dir1", "dir1.dir2"}), # default value for `namespaces`
],
)
def test_find_packages(tmp_path, args, pkgs):
files = {
"pkg/__init__.py",
"other/__init__.py",
"dir1/dir2/__init__.py",
}
write_files({k: "" for k in files}, tmp_path)
package_dir = {}
kwargs = {"root_dir": tmp_path, "fill_package_dir": package_dir, **args}
where = kwargs.get("where", ["."])
assert set(expand.find_packages(**kwargs)) == pkgs
for pkg in pkgs:
pkg_path = find_package_path(pkg, package_dir, tmp_path)
assert os.path.exists(pkg_path)
# Make sure the same APIs work outside cwd
where = [
str((tmp_path / p).resolve()).replace(os.sep, "/") # ensure posix-style paths
for p in args.pop("where", ["."])
]
assert set(expand.find_packages(where=where, **args)) == pkgs
@pytest.mark.parametrize(
("files", "where", "expected_package_dir"),
[
(["pkg1/__init__.py", "pkg1/other.py"], ["."], {}),
(["pkg1/__init__.py", "pkg2/__init__.py"], ["."], {}),
(["src/pkg1/__init__.py", "src/pkg1/other.py"], ["src"], {"": "src"}),
(["src/pkg1/__init__.py", "src/pkg2/__init__.py"], ["src"], {"": "src"}),
(
["src1/pkg1/__init__.py", "src2/pkg2/__init__.py"],
["src1", "src2"],
{"pkg1": "src1/pkg1", "pkg2": "src2/pkg2"},
),
(
["src/pkg1/__init__.py", "pkg2/__init__.py"],
["src", "."],
{"pkg1": "src/pkg1"},
),
],
)
def test_fill_package_dir(tmp_path, files, where, expected_package_dir):
write_files({k: "" for k in files}, tmp_path)
pkg_dir = {}
kwargs = {"root_dir": tmp_path, "fill_package_dir": pkg_dir, "namespaces": False}
pkgs = expand.find_packages(where=where, **kwargs)
assert set(pkg_dir.items()) == set(expected_package_dir.items())
for pkg in pkgs:
pkg_path = find_package_path(pkg, pkg_dir, tmp_path)
assert os.path.exists(pkg_path)

View File

@@ -0,0 +1,396 @@
import re
from configparser import ConfigParser
from inspect import cleandoc
import jaraco.path
import pytest
import tomli_w
from path import Path
import setuptools # noqa: F401 # force distutils.core to be patched
from setuptools.config.pyprojecttoml import (
_ToolsTypoInMetadata,
apply_configuration,
expand_configuration,
read_configuration,
validate,
)
from setuptools.dist import Distribution
from setuptools.errors import OptionError
import distutils.core
EXAMPLE = """
[project]
name = "myproj"
keywords = ["some", "key", "words"]
dynamic = ["version", "readme"]
requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
dependencies = [
'importlib-metadata>=0.12;python_version<"3.8"',
'importlib-resources>=1.0;python_version<"3.7"',
'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"',
]
[project.optional-dependencies]
docs = [
"sphinx>=3",
"sphinx-argparse>=0.2.5",
"sphinx-rtd-theme>=0.4.3",
]
testing = [
"pytest>=1",
"coverage>=3,<5",
]
[project.scripts]
exec = "pkg.__main__:exec"
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[tool.setuptools]
package-dir = {"" = "src"}
zip-safe = true
platforms = ["any"]
[tool.setuptools.packages.find]
where = ["src"]
[tool.setuptools.cmdclass]
sdist = "pkg.mod.CustomSdist"
[tool.setuptools.dynamic.version]
attr = "pkg.__version__.VERSION"
[tool.setuptools.dynamic.readme]
file = ["README.md"]
content-type = "text/markdown"
[tool.setuptools.package-data]
"*" = ["*.txt"]
[tool.setuptools.data-files]
"data" = ["_files/*.txt"]
[tool.distutils.sdist]
formats = "gztar"
[tool.distutils.bdist_wheel]
universal = true
"""
def create_example(path, pkg_root):
files = {
"pyproject.toml": EXAMPLE,
"README.md": "hello world",
"_files": {
"file.txt": "",
},
}
packages = {
"pkg": {
"__init__.py": "",
"mod.py": "class CustomSdist: pass",
"__version__.py": "VERSION = (3, 10)",
"__main__.py": "def exec(): print('hello')",
},
}
assert pkg_root # Meta-test: cannot be empty string.
if pkg_root == ".":
files = {**files, **packages}
# skip other files: flat-layout will raise error for multi-package dist
else:
# Use this opportunity to ensure namespaces are discovered
files[pkg_root] = {**packages, "other": {"nested": {"__init__.py": ""}}}
jaraco.path.build(files, prefix=path)
def verify_example(config, path, pkg_root):
pyproject = path / "pyproject.toml"
pyproject.write_text(tomli_w.dumps(config), encoding="utf-8")
expanded = expand_configuration(config, path)
expanded_project = expanded["project"]
assert read_configuration(pyproject, expand=True) == expanded
assert expanded_project["version"] == "3.10"
assert expanded_project["readme"]["text"] == "hello world"
assert "packages" in expanded["tool"]["setuptools"]
if pkg_root == ".":
# Auto-discovery will raise error for multi-package dist
assert set(expanded["tool"]["setuptools"]["packages"]) == {"pkg"}
else:
assert set(expanded["tool"]["setuptools"]["packages"]) == {
"pkg",
"other",
"other.nested",
}
assert expanded["tool"]["setuptools"]["include-package-data"] is True
assert "" in expanded["tool"]["setuptools"]["package-data"]
assert "*" not in expanded["tool"]["setuptools"]["package-data"]
assert expanded["tool"]["setuptools"]["data-files"] == [
("data", ["_files/file.txt"])
]
def test_read_configuration(tmp_path):
create_example(tmp_path, "src")
pyproject = tmp_path / "pyproject.toml"
config = read_configuration(pyproject, expand=False)
assert config["project"].get("version") is None
assert config["project"].get("readme") is None
verify_example(config, tmp_path, "src")
@pytest.mark.parametrize(
("pkg_root", "opts"),
[
(".", {}),
("src", {}),
("lib", {"packages": {"find": {"where": ["lib"]}}}),
],
)
def test_discovered_package_dir_with_attr_directive_in_config(tmp_path, pkg_root, opts):
create_example(tmp_path, pkg_root)
pyproject = tmp_path / "pyproject.toml"
config = read_configuration(pyproject, expand=False)
assert config["project"].get("version") is None
assert config["project"].get("readme") is None
config["tool"]["setuptools"].pop("packages", None)
config["tool"]["setuptools"].pop("package-dir", None)
config["tool"]["setuptools"].update(opts)
verify_example(config, tmp_path, pkg_root)
ENTRY_POINTS = {
"console_scripts": {"a": "mod.a:func"},
"gui_scripts": {"b": "mod.b:func"},
"other": {"c": "mod.c:func [extra]"},
}
class TestEntryPoints:
def write_entry_points(self, tmp_path):
entry_points = ConfigParser()
entry_points.read_dict(ENTRY_POINTS)
with open(tmp_path / "entry-points.txt", "w", encoding="utf-8") as f:
entry_points.write(f)
def pyproject(self, dynamic=None):
project = {"dynamic": dynamic or ["scripts", "gui-scripts", "entry-points"]}
tool = {"dynamic": {"entry-points": {"file": "entry-points.txt"}}}
return {"project": project, "tool": {"setuptools": tool}}
def test_all_listed_in_dynamic(self, tmp_path):
self.write_entry_points(tmp_path)
expanded = expand_configuration(self.pyproject(), tmp_path)
expanded_project = expanded["project"]
assert len(expanded_project["scripts"]) == 1
assert expanded_project["scripts"]["a"] == "mod.a:func"
assert len(expanded_project["gui-scripts"]) == 1
assert expanded_project["gui-scripts"]["b"] == "mod.b:func"
assert len(expanded_project["entry-points"]) == 1
assert expanded_project["entry-points"]["other"]["c"] == "mod.c:func [extra]"
@pytest.mark.parametrize("missing_dynamic", ("scripts", "gui-scripts"))
def test_scripts_not_listed_in_dynamic(self, tmp_path, missing_dynamic):
self.write_entry_points(tmp_path)
dynamic = {"scripts", "gui-scripts", "entry-points"} - {missing_dynamic}
msg = f"defined outside of `pyproject.toml`:.*{missing_dynamic}"
with pytest.raises(OptionError, match=re.compile(msg, re.S)):
expand_configuration(self.pyproject(dynamic), tmp_path)
class TestClassifiers:
def test_dynamic(self, tmp_path):
# Let's create a project example that has dynamic classifiers
# coming from a txt file.
create_example(tmp_path, "src")
classifiers = cleandoc(
"""
Framework :: Flask
Programming Language :: Haskell
"""
)
(tmp_path / "classifiers.txt").write_text(classifiers, encoding="utf-8")
pyproject = tmp_path / "pyproject.toml"
config = read_configuration(pyproject, expand=False)
dynamic = config["project"]["dynamic"]
config["project"]["dynamic"] = list({*dynamic, "classifiers"})
dynamic_config = config["tool"]["setuptools"]["dynamic"]
dynamic_config["classifiers"] = {"file": "classifiers.txt"}
# When the configuration is expanded,
# each line of the file should be an different classifier.
validate(config, pyproject)
expanded = expand_configuration(config, tmp_path)
assert set(expanded["project"]["classifiers"]) == {
"Framework :: Flask",
"Programming Language :: Haskell",
}
def test_dynamic_without_config(self, tmp_path):
config = """
[project]
name = "myproj"
version = '42'
dynamic = ["classifiers"]
"""
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(cleandoc(config), encoding="utf-8")
with pytest.raises(OptionError, match="No configuration .* .classifiers."):
read_configuration(pyproject)
def test_dynamic_readme_from_setup_script_args(self, tmp_path):
config = """
[project]
name = "myproj"
version = '42'
dynamic = ["readme"]
"""
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(cleandoc(config), encoding="utf-8")
dist = Distribution(attrs={"long_description": "42"})
# No error should occur because of missing `readme`
dist = apply_configuration(dist, pyproject)
assert dist.metadata.long_description == "42"
def test_dynamic_without_file(self, tmp_path):
config = """
[project]
name = "myproj"
version = '42'
dynamic = ["classifiers"]
[tool.setuptools.dynamic]
classifiers = {file = ["classifiers.txt"]}
"""
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(cleandoc(config), encoding="utf-8")
with pytest.warns(UserWarning, match="File .*classifiers.txt. cannot be found"):
expanded = read_configuration(pyproject)
assert "classifiers" not in expanded["project"]
@pytest.mark.parametrize(
"example",
(
"""
[project]
name = "myproj"
version = "1.2"
[my-tool.that-disrespect.pep518]
value = 42
""",
),
)
def test_ignore_unrelated_config(tmp_path, example):
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(cleandoc(example), encoding="utf-8")
# Make sure no error is raised due to 3rd party configs in pyproject.toml
assert read_configuration(pyproject) is not None
@pytest.mark.parametrize(
("example", "error_msg"),
[
(
"""
[project]
name = "myproj"
version = "1.2"
requires = ['pywin32; platform_system=="Windows"' ]
""",
"configuration error: .project. must not contain ..requires.. properties",
),
],
)
def test_invalid_example(tmp_path, example, error_msg):
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(cleandoc(example), encoding="utf-8")
pattern = re.compile(f"invalid pyproject.toml.*{error_msg}.*", re.M | re.S)
with pytest.raises(ValueError, match=pattern):
read_configuration(pyproject)
@pytest.mark.parametrize("config", ("", "[tool.something]\nvalue = 42"))
def test_empty(tmp_path, config):
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(config, encoding="utf-8")
# Make sure no error is raised
assert read_configuration(pyproject) == {}
@pytest.mark.parametrize("config", ("[project]\nname = 'myproj'\nversion='42'\n",))
def test_include_package_data_by_default(tmp_path, config):
"""Builds with ``pyproject.toml`` should consider ``include-package-data=True`` as
default.
"""
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(config, encoding="utf-8")
config = read_configuration(pyproject)
assert config["tool"]["setuptools"]["include-package-data"] is True
def test_include_package_data_in_setuppy(tmp_path):
"""Builds with ``pyproject.toml`` should consider ``include_package_data`` set in
``setup.py``.
See https://github.com/pypa/setuptools/issues/3197#issuecomment-1079023889
"""
files = {
"pyproject.toml": "[project]\nname = 'myproj'\nversion='42'\n",
"setup.py": "__import__('setuptools').setup(include_package_data=False)",
}
jaraco.path.build(files, prefix=tmp_path)
with Path(tmp_path):
dist = distutils.core.run_setup("setup.py", {}, stop_after="config")
assert dist.get_name() == "myproj"
assert dist.get_version() == "42"
assert dist.include_package_data is False
def test_warn_tools_typo(tmp_path):
"""Test that the common ``tools.setuptools`` typo in ``pyproject.toml`` issues a warning
See https://github.com/pypa/setuptools/issues/4150
"""
config = """
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = "myproj"
version = '42'
[tools.setuptools]
packages = ["package"]
"""
pyproject = tmp_path / "pyproject.toml"
pyproject.write_text(cleandoc(config), encoding="utf-8")
with pytest.warns(_ToolsTypoInMetadata):
read_configuration(pyproject)

View File

@@ -0,0 +1,109 @@
from inspect import cleandoc
import pytest
from jaraco import path
from setuptools.config.pyprojecttoml import apply_configuration
from setuptools.dist import Distribution
from setuptools.warnings import SetuptoolsWarning
def test_dynamic_dependencies(tmp_path):
files = {
"requirements.txt": "six\n # comment\n",
"pyproject.toml": cleandoc(
"""
[project]
name = "myproj"
version = "1.0"
dynamic = ["dependencies"]
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[tool.setuptools.dynamic.dependencies]
file = ["requirements.txt"]
"""
),
}
path.build(files, prefix=tmp_path)
dist = Distribution()
dist = apply_configuration(dist, tmp_path / "pyproject.toml")
assert dist.install_requires == ["six"]
def test_dynamic_optional_dependencies(tmp_path):
files = {
"requirements-docs.txt": "sphinx\n # comment\n",
"pyproject.toml": cleandoc(
"""
[project]
name = "myproj"
version = "1.0"
dynamic = ["optional-dependencies"]
[tool.setuptools.dynamic.optional-dependencies.docs]
file = ["requirements-docs.txt"]
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
"""
),
}
path.build(files, prefix=tmp_path)
dist = Distribution()
dist = apply_configuration(dist, tmp_path / "pyproject.toml")
assert dist.extras_require == {"docs": ["sphinx"]}
def test_mixed_dynamic_optional_dependencies(tmp_path):
"""
Test that if PEP 621 was loosened to allow mixing of dynamic and static
configurations in the case of fields containing sub-fields (groups),
things would work out.
"""
files = {
"requirements-images.txt": "pillow~=42.0\n # comment\n",
"pyproject.toml": cleandoc(
"""
[project]
name = "myproj"
version = "1.0"
dynamic = ["optional-dependencies"]
[project.optional-dependencies]
docs = ["sphinx"]
[tool.setuptools.dynamic.optional-dependencies.images]
file = ["requirements-images.txt"]
"""
),
}
path.build(files, prefix=tmp_path)
pyproject = tmp_path / "pyproject.toml"
with pytest.raises(ValueError, match="project.optional-dependencies"):
apply_configuration(Distribution(), pyproject)
def test_mixed_extras_require_optional_dependencies(tmp_path):
files = {
"pyproject.toml": cleandoc(
"""
[project]
name = "myproj"
version = "1.0"
optional-dependencies.docs = ["sphinx"]
"""
),
}
path.build(files, prefix=tmp_path)
pyproject = tmp_path / "pyproject.toml"
with pytest.warns(SetuptoolsWarning, match=".extras_require. overwritten"):
dist = Distribution({"extras_require": {"hello": ["world"]}})
dist = apply_configuration(dist, pyproject)
assert dist.extras_require == {"docs": ["sphinx"]}

View File

@@ -0,0 +1,980 @@
import configparser
import contextlib
import inspect
import re
from pathlib import Path
from unittest.mock import Mock, patch
import pytest
from packaging.requirements import InvalidRequirement
from setuptools.config.setupcfg import ConfigHandler, Target, read_configuration
from setuptools.dist import Distribution, _Distribution
from setuptools.warnings import SetuptoolsDeprecationWarning
from ..textwrap import DALS
from distutils.errors import DistutilsFileError, DistutilsOptionError
class ErrConfigHandler(ConfigHandler[Target]):
"""Erroneous handler. Fails to implement required methods."""
section_prefix = "**err**"
def make_package_dir(name, base_dir, ns=False):
dir_package = base_dir
for dir_name in name.split('/'):
dir_package = dir_package.mkdir(dir_name)
init_file = None
if not ns:
init_file = dir_package.join('__init__.py')
init_file.write('')
return dir_package, init_file
def fake_env(
tmpdir, setup_cfg, setup_py=None, encoding='ascii', package_path='fake_package'
):
if setup_py is None:
setup_py = 'from setuptools import setup\nsetup()\n'
tmpdir.join('setup.py').write(setup_py)
config = tmpdir.join('setup.cfg')
config.write(setup_cfg.encode(encoding), mode='wb')
package_dir, init_file = make_package_dir(package_path, tmpdir)
init_file.write(
'VERSION = (1, 2, 3)\n'
'\n'
'VERSION_MAJOR = 1'
'\n'
'def get_version():\n'
' return [3, 4, 5, "dev"]\n'
'\n'
)
return package_dir, config
@contextlib.contextmanager
def get_dist(tmpdir, kwargs_initial=None, parse=True):
kwargs_initial = kwargs_initial or {}
with tmpdir.as_cwd():
dist = Distribution(kwargs_initial)
dist.script_name = 'setup.py'
parse and dist.parse_config_files()
yield dist
def test_parsers_implemented():
with pytest.raises(NotImplementedError):
handler = ErrConfigHandler(None, {}, False, Mock())
handler.parsers
class TestConfigurationReader:
def test_basic(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\n'
'version = 10.1.1\n'
'keywords = one, two\n'
'\n'
'[options]\n'
'scripts = bin/a.py, bin/b.py\n',
)
config_dict = read_configuration(str(config))
assert config_dict['metadata']['version'] == '10.1.1'
assert config_dict['metadata']['keywords'] == ['one', 'two']
assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py']
def test_no_config(self, tmpdir):
with pytest.raises(DistutilsFileError):
read_configuration(str(tmpdir.join('setup.cfg')))
def test_ignore_errors(self, tmpdir):
_, config = fake_env(
tmpdir,
'[metadata]\nversion = attr: none.VERSION\nkeywords = one, two\n',
)
with pytest.raises(ImportError):
read_configuration(str(config))
config_dict = read_configuration(str(config), ignore_option_errors=True)
assert config_dict['metadata']['keywords'] == ['one', 'two']
assert 'version' not in config_dict['metadata']
config.remove()
class TestMetadata:
def test_basic(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'version = 10.1.1\n'
'description = Some description\n'
'long_description_content_type = text/something\n'
'long_description = file: README\n'
'name = fake_name\n'
'keywords = one, two\n'
'provides = package, package.sub\n'
'license = otherlic\n'
'download_url = http://test.test.com/test/\n'
'maintainer_email = test@test.com\n',
)
tmpdir.join('README').write('readme contents\nline2')
meta_initial = {
# This will be used so `otherlic` won't replace it.
'license': 'BSD 3-Clause License',
}
with get_dist(tmpdir, meta_initial) as dist:
metadata = dist.metadata
assert metadata.version == '10.1.1'
assert metadata.description == 'Some description'
assert metadata.long_description_content_type == 'text/something'
assert metadata.long_description == 'readme contents\nline2'
assert metadata.provides == ['package', 'package.sub']
assert metadata.license == 'BSD 3-Clause License'
assert metadata.name == 'fake_name'
assert metadata.keywords == ['one', 'two']
assert metadata.download_url == 'http://test.test.com/test/'
assert metadata.maintainer_email == 'test@test.com'
def test_license_cfg(self, tmpdir):
fake_env(
tmpdir,
DALS(
"""
[metadata]
name=foo
version=0.0.1
license=Apache 2.0
"""
),
)
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.name == "foo"
assert metadata.version == "0.0.1"
assert metadata.license == "Apache 2.0"
def test_file_mixed(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\nlong_description = file: README.rst, CHANGES.rst\n\n',
)
tmpdir.join('README.rst').write('readme contents\nline2')
tmpdir.join('CHANGES.rst').write('changelog contents\nand stuff')
with get_dist(tmpdir) as dist:
assert dist.metadata.long_description == (
'readme contents\nline2\nchangelog contents\nand stuff'
)
def test_file_sandboxed(self, tmpdir):
tmpdir.ensure("README")
project = tmpdir.join('depth1', 'depth2')
project.ensure(dir=True)
fake_env(project, '[metadata]\nlong_description = file: ../../README\n')
with get_dist(project, parse=False) as dist:
with pytest.raises(DistutilsOptionError):
dist.parse_config_files() # file: out of sandbox
def test_aliases(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'author_email = test@test.com\n'
'home_page = http://test.test.com/test/\n'
'summary = Short summary\n'
'platform = a, b\n'
'classifier =\n'
' Framework :: Django\n'
' Programming Language :: Python :: 3.5\n',
)
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.author_email == 'test@test.com'
assert metadata.url == 'http://test.test.com/test/'
assert metadata.description == 'Short summary'
assert metadata.platforms == ['a', 'b']
assert metadata.classifiers == [
'Framework :: Django',
'Programming Language :: Python :: 3.5',
]
def test_multiline(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'name = fake_name\n'
'keywords =\n'
' one\n'
' two\n'
'classifiers =\n'
' Framework :: Django\n'
' Programming Language :: Python :: 3.5\n',
)
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.keywords == ['one', 'two']
assert metadata.classifiers == [
'Framework :: Django',
'Programming Language :: Python :: 3.5',
]
def test_dict(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'project_urls =\n'
' Link One = https://example.com/one/\n'
' Link Two = https://example.com/two/\n',
)
with get_dist(tmpdir) as dist:
metadata = dist.metadata
assert metadata.project_urls == {
'Link One': 'https://example.com/one/',
'Link Two': 'https://example.com/two/',
}
def test_version(self, tmpdir):
package_dir, config = fake_env(
tmpdir, '[metadata]\nversion = attr: fake_package.VERSION\n'
)
sub_a = package_dir.mkdir('subpkg_a')
sub_a.join('__init__.py').write('')
sub_a.join('mod.py').write('VERSION = (2016, 11, 26)')
sub_b = package_dir.mkdir('subpkg_b')
sub_b.join('__init__.py').write('')
sub_b.join('mod.py').write(
'import third_party_module\nVERSION = (2016, 11, 26)'
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
config.write('[metadata]\nversion = attr: fake_package.get_version\n')
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '3.4.5.dev'
config.write('[metadata]\nversion = attr: fake_package.VERSION_MAJOR\n')
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1'
config.write('[metadata]\nversion = attr: fake_package.subpkg_a.mod.VERSION\n')
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '2016.11.26'
config.write('[metadata]\nversion = attr: fake_package.subpkg_b.mod.VERSION\n')
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '2016.11.26'
def test_version_file(self, tmpdir):
fake_env(tmpdir, '[metadata]\nversion = file: fake_package/version.txt\n')
tmpdir.join('fake_package', 'version.txt').write('1.2.3\n')
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
tmpdir.join('fake_package', 'version.txt').write('1.2.3\n4.5.6\n')
with pytest.raises(DistutilsOptionError):
with get_dist(tmpdir) as dist:
dist.metadata.version
def test_version_with_package_dir_simple(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'version = attr: fake_package_simple.VERSION\n'
'[options]\n'
'package_dir =\n'
' = src\n',
package_path='src/fake_package_simple',
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
def test_version_with_package_dir_rename(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'version = attr: fake_package_rename.VERSION\n'
'[options]\n'
'package_dir =\n'
' fake_package_rename = fake_dir\n',
package_path='fake_dir',
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
def test_version_with_package_dir_complex(self, tmpdir):
fake_env(
tmpdir,
'[metadata]\n'
'version = attr: fake_package_complex.VERSION\n'
'[options]\n'
'package_dir =\n'
' fake_package_complex = src/fake_dir\n',
package_path='src/fake_dir',
)
with get_dist(tmpdir) as dist:
assert dist.metadata.version == '1.2.3'
def test_unknown_meta_item(self, tmpdir):
fake_env(tmpdir, '[metadata]\nname = fake_name\nunknown = some\n')
with get_dist(tmpdir, parse=False) as dist:
dist.parse_config_files() # Skip unknown.
def test_usupported_section(self, tmpdir):
fake_env(tmpdir, '[metadata.some]\nkey = val\n')
with get_dist(tmpdir, parse=False) as dist:
with pytest.raises(DistutilsOptionError):
dist.parse_config_files()
def test_classifiers(self, tmpdir):
expected = set([
'Framework :: Django',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
])
# From file.
_, config = fake_env(tmpdir, '[metadata]\nclassifiers = file: classifiers\n')
tmpdir.join('classifiers').write(
'Framework :: Django\n'
'Programming Language :: Python :: 3\n'
'Programming Language :: Python :: 3.5\n'
)
with get_dist(tmpdir) as dist:
assert set(dist.metadata.classifiers) == expected
# From list notation
config.write(
'[metadata]\n'
'classifiers =\n'
' Framework :: Django\n'
' Programming Language :: Python :: 3\n'
' Programming Language :: Python :: 3.5\n'
)
with get_dist(tmpdir) as dist:
assert set(dist.metadata.classifiers) == expected
def test_interpolation(self, tmpdir):
fake_env(tmpdir, '[metadata]\ndescription = %(message)s\n')
with pytest.raises(configparser.InterpolationMissingOptionError):
with get_dist(tmpdir):
pass
def test_non_ascii_1(self, tmpdir):
fake_env(tmpdir, '[metadata]\ndescription = éàïôñ\n', encoding='utf-8')
with get_dist(tmpdir):
pass
def test_non_ascii_3(self, tmpdir):
fake_env(tmpdir, '\n# -*- coding: invalid\n')
with get_dist(tmpdir):
pass
def test_non_ascii_4(self, tmpdir):
fake_env(
tmpdir,
'# -*- coding: utf-8\n[metadata]\ndescription = éàïôñ\n',
encoding='utf-8',
)
with get_dist(tmpdir) as dist:
assert dist.metadata.description == 'éàïôñ'
def test_not_utf8(self, tmpdir):
"""
Config files encoded not in UTF-8 will fail
"""
fake_env(
tmpdir,
'# vim: set fileencoding=iso-8859-15 :\n[metadata]\ndescription = éàïôñ\n',
encoding='iso-8859-15',
)
with pytest.raises(UnicodeDecodeError):
with get_dist(tmpdir):
pass
@pytest.mark.parametrize(
("error_msg", "config", "invalid"),
[
(
"Invalid dash-separated key 'author-email' in 'metadata' (setup.cfg)",
DALS(
"""
[metadata]
author-email = test@test.com
maintainer_email = foo@foo.com
"""
),
{"author-email": "test@test.com"},
),
(
"Invalid uppercase key 'Name' in 'metadata' (setup.cfg)",
DALS(
"""
[metadata]
Name = foo
description = Some description
"""
),
{"Name": "foo"},
),
],
)
def test_invalid_options_previously_deprecated(
self, tmpdir, error_msg, config, invalid
):
# This test and related methods can be removed when no longer needed.
# Deprecation postponed due to push-back from the community in
# https://github.com/pypa/setuptools/issues/4910
fake_env(tmpdir, config)
with pytest.warns(SetuptoolsDeprecationWarning, match=re.escape(error_msg)):
dist = get_dist(tmpdir).__enter__()
tmpdir.join('setup.cfg').remove()
for field, value in invalid.items():
attr = field.replace("-", "_").lower()
assert getattr(dist.metadata, attr) == value
class TestOptions:
def test_basic(self, tmpdir):
fake_env(
tmpdir,
'[options]\n'
'zip_safe = True\n'
'include_package_data = yes\n'
'package_dir = b=c, =src\n'
'packages = pack_a, pack_b.subpack\n'
'namespace_packages = pack1, pack2\n'
'scripts = bin/one.py, bin/two.py\n'
'eager_resources = bin/one.py, bin/two.py\n'
'install_requires = docutils>=0.3; pack ==1.1, ==1.3; hey\n'
'setup_requires = docutils>=0.3; spack ==1.1, ==1.3; there\n'
'dependency_links = http://some.com/here/1, '
'http://some.com/there/2\n'
'python_requires = >=1.0, !=2.8\n'
'py_modules = module1, module2\n',
)
deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages")
with deprec, get_dist(tmpdir) as dist:
assert dist.zip_safe
assert dist.include_package_data
assert dist.package_dir == {'': 'src', 'b': 'c'}
assert dist.packages == ['pack_a', 'pack_b.subpack']
assert dist.namespace_packages == ['pack1', 'pack2']
assert dist.scripts == ['bin/one.py', 'bin/two.py']
assert dist.dependency_links == ([
'http://some.com/here/1',
'http://some.com/there/2',
])
assert dist.install_requires == ([
'docutils>=0.3',
'pack==1.1,==1.3',
'hey',
])
assert dist.setup_requires == ([
'docutils>=0.3',
'spack ==1.1, ==1.3',
'there',
])
assert dist.python_requires == '>=1.0, !=2.8'
assert dist.py_modules == ['module1', 'module2']
def test_multiline(self, tmpdir):
fake_env(
tmpdir,
'[options]\n'
'package_dir = \n'
' b=c\n'
' =src\n'
'packages = \n'
' pack_a\n'
' pack_b.subpack\n'
'namespace_packages = \n'
' pack1\n'
' pack2\n'
'scripts = \n'
' bin/one.py\n'
' bin/two.py\n'
'eager_resources = \n'
' bin/one.py\n'
' bin/two.py\n'
'install_requires = \n'
' docutils>=0.3\n'
' pack ==1.1, ==1.3\n'
' hey\n'
'setup_requires = \n'
' docutils>=0.3\n'
' spack ==1.1, ==1.3\n'
' there\n'
'dependency_links = \n'
' http://some.com/here/1\n'
' http://some.com/there/2\n',
)
deprec = pytest.warns(SetuptoolsDeprecationWarning, match="namespace_packages")
with deprec, get_dist(tmpdir) as dist:
assert dist.package_dir == {'': 'src', 'b': 'c'}
assert dist.packages == ['pack_a', 'pack_b.subpack']
assert dist.namespace_packages == ['pack1', 'pack2']
assert dist.scripts == ['bin/one.py', 'bin/two.py']
assert dist.dependency_links == ([
'http://some.com/here/1',
'http://some.com/there/2',
])
assert dist.install_requires == ([
'docutils>=0.3',
'pack==1.1,==1.3',
'hey',
])
assert dist.setup_requires == ([
'docutils>=0.3',
'spack ==1.1, ==1.3',
'there',
])
def test_package_dir_fail(self, tmpdir):
fake_env(tmpdir, '[options]\npackage_dir = a b\n')
with get_dist(tmpdir, parse=False) as dist:
with pytest.raises(DistutilsOptionError):
dist.parse_config_files()
def test_package_data(self, tmpdir):
fake_env(
tmpdir,
'[options.package_data]\n'
'* = *.txt, *.rst\n'
'hello = *.msg\n'
'\n'
'[options.exclude_package_data]\n'
'* = fake1.txt, fake2.txt\n'
'hello = *.dat\n',
)
with get_dist(tmpdir) as dist:
assert dist.package_data == {
'': ['*.txt', '*.rst'],
'hello': ['*.msg'],
}
assert dist.exclude_package_data == {
'': ['fake1.txt', 'fake2.txt'],
'hello': ['*.dat'],
}
def test_packages(self, tmpdir):
fake_env(tmpdir, '[options]\npackages = find:\n')
with get_dist(tmpdir) as dist:
assert dist.packages == ['fake_package']
def test_find_directive(self, tmpdir):
dir_package, config = fake_env(tmpdir, '[options]\npackages = find:\n')
make_package_dir('sub_one', dir_package)
make_package_dir('sub_two', dir_package)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == set([
'fake_package',
'fake_package.sub_two',
'fake_package.sub_one',
])
config.write(
'[options]\n'
'packages = find:\n'
'\n'
'[options.packages.find]\n'
'where = .\n'
'include =\n'
' fake_package.sub_one\n'
' two\n'
)
with get_dist(tmpdir) as dist:
assert dist.packages == ['fake_package.sub_one']
config.write(
'[options]\n'
'packages = find:\n'
'\n'
'[options.packages.find]\n'
'exclude =\n'
' fake_package.sub_one\n'
)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == set(['fake_package', 'fake_package.sub_two'])
def test_find_namespace_directive(self, tmpdir):
dir_package, config = fake_env(
tmpdir, '[options]\npackages = find_namespace:\n'
)
make_package_dir('sub_one', dir_package)
make_package_dir('sub_two', dir_package, ns=True)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == {
'fake_package',
'fake_package.sub_two',
'fake_package.sub_one',
}
config.write(
'[options]\n'
'packages = find_namespace:\n'
'\n'
'[options.packages.find]\n'
'where = .\n'
'include =\n'
' fake_package.sub_one\n'
' two\n'
)
with get_dist(tmpdir) as dist:
assert dist.packages == ['fake_package.sub_one']
config.write(
'[options]\n'
'packages = find_namespace:\n'
'\n'
'[options.packages.find]\n'
'exclude =\n'
' fake_package.sub_one\n'
)
with get_dist(tmpdir) as dist:
assert set(dist.packages) == {'fake_package', 'fake_package.sub_two'}
def test_extras_require(self, tmpdir):
fake_env(
tmpdir,
'[options.extras_require]\n'
'pdf = ReportLab>=1.2; RXP\n'
'rest = \n'
' docutils>=0.3\n'
' pack ==1.1, ==1.3\n',
)
with get_dist(tmpdir) as dist:
assert dist.extras_require == {
'pdf': ['ReportLab>=1.2', 'RXP'],
'rest': ['docutils>=0.3', 'pack==1.1,==1.3'],
}
assert set(dist.metadata.provides_extras) == {'pdf', 'rest'}
@pytest.mark.parametrize(
"config",
[
"[options.extras_require]\nfoo = bar;python_version<'3'",
"[options.extras_require]\nfoo = bar;os_name=='linux'",
"[options.extras_require]\nfoo = bar;python_version<'3'\n",
"[options.extras_require]\nfoo = bar;os_name=='linux'\n",
"[options]\ninstall_requires = bar;python_version<'3'",
"[options]\ninstall_requires = bar;os_name=='linux'",
"[options]\ninstall_requires = bar;python_version<'3'\n",
"[options]\ninstall_requires = bar;os_name=='linux'\n",
],
)
def test_raises_accidental_env_marker_misconfig(self, config, tmpdir):
fake_env(tmpdir, config)
match = (
r"One of the parsed requirements in `(install_requires|extras_require.+)` "
"looks like a valid environment marker.*"
)
with pytest.raises(InvalidRequirement, match=match):
with get_dist(tmpdir) as _:
pass
@pytest.mark.parametrize(
"config",
[
"[options.extras_require]\nfoo = bar;python_version<3",
"[options.extras_require]\nfoo = bar;python_version<3\n",
"[options]\ninstall_requires = bar;python_version<3",
"[options]\ninstall_requires = bar;python_version<3\n",
],
)
def test_warn_accidental_env_marker_misconfig(self, config, tmpdir):
fake_env(tmpdir, config)
match = (
r"One of the parsed requirements in `(install_requires|extras_require.+)` "
"looks like a valid environment marker.*"
)
with pytest.warns(SetuptoolsDeprecationWarning, match=match):
with get_dist(tmpdir) as _:
pass
@pytest.mark.parametrize(
"config",
[
"[options.extras_require]\nfoo =\n bar;python_version<'3'",
"[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy",
"[options.extras_require]\nfoo =\n bar;python_version<'3'\n",
"[options.extras_require]\nfoo = bar;baz\nboo = xxx;yyy\n",
"[options.extras_require]\nfoo =\n bar\n python_version<3\n",
"[options]\ninstall_requires =\n bar;python_version<'3'",
"[options]\ninstall_requires = bar;baz\nboo = xxx;yyy",
"[options]\ninstall_requires =\n bar;python_version<'3'\n",
"[options]\ninstall_requires = bar;baz\nboo = xxx;yyy\n",
"[options]\ninstall_requires =\n bar\n python_version<3\n",
],
)
@pytest.mark.filterwarnings("error::setuptools.SetuptoolsDeprecationWarning")
def test_nowarn_accidental_env_marker_misconfig(self, config, tmpdir, recwarn):
fake_env(tmpdir, config)
num_warnings = len(recwarn)
with get_dist(tmpdir) as _:
pass
# The examples are valid, no warnings shown
assert len(recwarn) == num_warnings
def test_dash_preserved_extras_require(self, tmpdir):
fake_env(tmpdir, '[options.extras_require]\nfoo-a = foo\nfoo_b = test\n')
with get_dist(tmpdir) as dist:
assert dist.extras_require == {'foo-a': ['foo'], 'foo_b': ['test']}
def test_entry_points(self, tmpdir):
_, config = fake_env(
tmpdir,
'[options.entry_points]\n'
'group1 = point1 = pack.module:func, '
'.point2 = pack.module2:func_rest [rest]\n'
'group2 = point3 = pack.module:func2\n',
)
with get_dist(tmpdir) as dist:
assert dist.entry_points == {
'group1': [
'point1 = pack.module:func',
'.point2 = pack.module2:func_rest [rest]',
],
'group2': ['point3 = pack.module:func2'],
}
expected = (
'[blogtool.parsers]\n'
'.rst = some.nested.module:SomeClass.some_classmethod[reST]\n'
)
tmpdir.join('entry_points').write(expected)
# From file.
config.write('[options]\nentry_points = file: entry_points\n')
with get_dist(tmpdir) as dist:
assert dist.entry_points == expected
def test_case_sensitive_entry_points(self, tmpdir):
fake_env(
tmpdir,
'[options.entry_points]\n'
'GROUP1 = point1 = pack.module:func, '
'.point2 = pack.module2:func_rest [rest]\n'
'group2 = point3 = pack.module:func2\n',
)
with get_dist(tmpdir) as dist:
assert dist.entry_points == {
'GROUP1': [
'point1 = pack.module:func',
'.point2 = pack.module2:func_rest [rest]',
],
'group2': ['point3 = pack.module:func2'],
}
def test_data_files(self, tmpdir):
fake_env(
tmpdir,
'[options.data_files]\n'
'cfg =\n'
' a/b.conf\n'
' c/d.conf\n'
'data = e/f.dat, g/h.dat\n',
)
with get_dist(tmpdir) as dist:
expected = [
('cfg', ['a/b.conf', 'c/d.conf']),
('data', ['e/f.dat', 'g/h.dat']),
]
assert sorted(dist.data_files) == sorted(expected)
def test_data_files_globby(self, tmpdir):
fake_env(
tmpdir,
'[options.data_files]\n'
'cfg =\n'
' a/b.conf\n'
' c/d.conf\n'
'data = *.dat\n'
'icons = \n'
' *.ico\n'
'audio = \n'
' *.wav\n'
' sounds.db\n',
)
# Create dummy files for glob()'s sake:
tmpdir.join('a.dat').write('')
tmpdir.join('b.dat').write('')
tmpdir.join('c.dat').write('')
tmpdir.join('a.ico').write('')
tmpdir.join('b.ico').write('')
tmpdir.join('c.ico').write('')
tmpdir.join('beep.wav').write('')
tmpdir.join('boop.wav').write('')
tmpdir.join('sounds.db').write('')
with get_dist(tmpdir) as dist:
expected = [
('cfg', ['a/b.conf', 'c/d.conf']),
('data', ['a.dat', 'b.dat', 'c.dat']),
('icons', ['a.ico', 'b.ico', 'c.ico']),
('audio', ['beep.wav', 'boop.wav', 'sounds.db']),
]
assert sorted(dist.data_files) == sorted(expected)
def test_python_requires_simple(self, tmpdir):
fake_env(
tmpdir,
DALS(
"""
[options]
python_requires=>=2.7
"""
),
)
with get_dist(tmpdir) as dist:
dist.parse_config_files()
def test_python_requires_compound(self, tmpdir):
fake_env(
tmpdir,
DALS(
"""
[options]
python_requires=>=2.7,!=3.0.*
"""
),
)
with get_dist(tmpdir) as dist:
dist.parse_config_files()
def test_python_requires_invalid(self, tmpdir):
fake_env(
tmpdir,
DALS(
"""
[options]
python_requires=invalid
"""
),
)
with pytest.raises(Exception):
with get_dist(tmpdir) as dist:
dist.parse_config_files()
def test_cmdclass(self, tmpdir):
module_path = Path(tmpdir, "src/custom_build.py") # auto discovery for src
module_path.parent.mkdir(parents=True, exist_ok=True)
module_path.write_text(
"from distutils.core import Command\nclass CustomCmd(Command): pass\n",
encoding="utf-8",
)
setup_cfg = """
[options]
cmdclass =
customcmd = custom_build.CustomCmd
"""
fake_env(tmpdir, inspect.cleandoc(setup_cfg))
with get_dist(tmpdir) as dist:
cmdclass = dist.cmdclass['customcmd']
assert cmdclass.__name__ == "CustomCmd"
assert cmdclass.__module__ == "custom_build"
assert module_path.samefile(inspect.getfile(cmdclass))
def test_requirements_file(self, tmpdir):
fake_env(
tmpdir,
DALS(
"""
[options]
install_requires = file:requirements.txt
[options.extras_require]
colors = file:requirements-extra.txt
"""
),
)
tmpdir.join('requirements.txt').write('\ndocutils>=0.3\n\n')
tmpdir.join('requirements-extra.txt').write('colorama')
with get_dist(tmpdir) as dist:
assert dist.install_requires == ['docutils>=0.3']
assert dist.extras_require == {'colors': ['colorama']}
saved_dist_init = _Distribution.__init__
class TestExternalSetters:
# During creation of the setuptools Distribution() object, we call
# the init of the parent distutils Distribution object via
# _Distribution.__init__ ().
#
# It's possible distutils calls out to various keyword
# implementations (i.e. distutils.setup_keywords entry points)
# that may set a range of variables.
#
# This wraps distutil's Distribution.__init__ and simulates
# pbr or something else setting these values.
def _fake_distribution_init(self, dist, attrs):
saved_dist_init(dist, attrs)
# see self._DISTUTILS_UNSUPPORTED_METADATA
dist.metadata.long_description_content_type = 'text/something'
# Test overwrite setup() args
dist.metadata.project_urls = {
'Link One': 'https://example.com/one/',
'Link Two': 'https://example.com/two/',
}
@patch.object(_Distribution, '__init__', autospec=True)
def test_external_setters(self, mock_parent_init, tmpdir):
mock_parent_init.side_effect = self._fake_distribution_init
dist = Distribution(attrs={'project_urls': {'will_be': 'ignored'}})
assert dist.metadata.long_description_content_type == 'text/something'
assert dist.metadata.project_urls == {
'Link One': 'https://example.com/one/',
'Link Two': 'https://example.com/two/',
}

View File

@@ -0,0 +1,131 @@
import contextlib
import io
import os
import shutil
import site
import sys
import tempfile
from filelock import FileLock
@contextlib.contextmanager
def tempdir(cd=lambda dir: None, **kwargs):
temp_dir = tempfile.mkdtemp(**kwargs)
orig_dir = os.getcwd()
try:
cd(temp_dir)
yield temp_dir
finally:
cd(orig_dir)
shutil.rmtree(temp_dir)
@contextlib.contextmanager
def environment(**replacements):
"""
In a context, patch the environment with replacements. Pass None values
to clear the values.
"""
saved = dict((key, os.environ[key]) for key in replacements if key in os.environ)
# remove values that are null
remove = (key for (key, value) in replacements.items() if value is None)
for key in list(remove):
os.environ.pop(key, None)
replacements.pop(key)
os.environ.update(replacements)
try:
yield saved
finally:
for key in replacements:
os.environ.pop(key, None)
os.environ.update(saved)
@contextlib.contextmanager
def quiet():
"""
Redirect stdout/stderr to StringIO objects to prevent console output from
distutils commands.
"""
old_stdout = sys.stdout
old_stderr = sys.stderr
new_stdout = sys.stdout = io.StringIO()
new_stderr = sys.stderr = io.StringIO()
try:
yield new_stdout, new_stderr
finally:
new_stdout.seek(0)
new_stderr.seek(0)
sys.stdout = old_stdout
sys.stderr = old_stderr
@contextlib.contextmanager
def save_user_site_setting():
saved = site.ENABLE_USER_SITE
try:
yield saved
finally:
site.ENABLE_USER_SITE = saved
@contextlib.contextmanager
def suppress_exceptions(*excs):
try:
yield
except excs:
pass
def multiproc(request):
"""
Return True if running under xdist and multiple
workers are used.
"""
try:
worker_id = request.getfixturevalue('worker_id')
except Exception:
return False
return worker_id != 'master'
@contextlib.contextmanager
def session_locked_tmp_dir(request, tmp_path_factory, name):
"""Uses a file lock to guarantee only one worker can access a temp dir"""
# get the temp directory shared by all workers
base = tmp_path_factory.getbasetemp()
shared_dir = base.parent if multiproc(request) else base
locked_dir = shared_dir / name
with FileLock(locked_dir.with_suffix(".lock")):
# ^-- prevent multiple workers to access the directory at once
locked_dir.mkdir(exist_ok=True, parents=True)
yield locked_dir
@contextlib.contextmanager
def save_paths():
"""Make sure ``sys.path``, ``sys.meta_path`` and ``sys.path_hooks`` are preserved"""
prev = sys.path[:], sys.meta_path[:], sys.path_hooks[:]
try:
yield
finally:
sys.path, sys.meta_path, sys.path_hooks = prev
@contextlib.contextmanager
def save_sys_modules():
"""Make sure initial ``sys.modules`` is preserved"""
prev_modules = sys.modules
try:
sys.modules = sys.modules.copy()
yield
finally:
sys.modules = prev_modules

View File

@@ -0,0 +1,95 @@
import os
import subprocess
import sys
import unicodedata
from subprocess import PIPE as _PIPE, Popen as _Popen
import jaraco.envs
class VirtualEnv(jaraco.envs.VirtualEnv):
name = '.env'
# Some version of PyPy will import distutils on startup, implicitly
# importing setuptools, and thus leading to BackendInvalid errors
# when upgrading Setuptools. Bypass this behavior by avoiding the
# early availability and need to upgrade.
create_opts = ['--no-setuptools']
def run(self, cmd, *args, **kwargs):
cmd = [self.exe(cmd[0])] + cmd[1:]
kwargs = {"cwd": self.root, "encoding": "utf-8", **kwargs} # Allow overriding
# In some environments (eg. downstream distro packaging), where:
# - tox isn't used to run tests and
# - PYTHONPATH is set to point to a specific setuptools codebase and
# - no custom env is explicitly set by a test
# PYTHONPATH will leak into the spawned processes.
# In that case tests look for module in the wrong place (on PYTHONPATH).
# Unless the test sets its own special env, pass a copy of the existing
# environment with removed PYTHONPATH to the subprocesses.
if "env" not in kwargs:
env = dict(os.environ)
if "PYTHONPATH" in env:
del env["PYTHONPATH"]
kwargs["env"] = env
return subprocess.check_output(cmd, *args, **kwargs)
def _which_dirs(cmd):
result = set()
for path in os.environ.get('PATH', '').split(os.pathsep):
filename = os.path.join(path, cmd)
if os.access(filename, os.X_OK):
result.add(path)
return result
def run_setup_py(cmd, pypath=None, path=None, data_stream=0, env=None):
"""
Execution command for tests, separate from those used by the
code directly to prevent accidental behavior issues
"""
if env is None:
env = dict()
for envname in os.environ:
env[envname] = os.environ[envname]
# override the python path if needed
if pypath is not None:
env["PYTHONPATH"] = pypath
# override the execution path if needed
if path is not None:
env["PATH"] = path
if not env.get("PATH", ""):
env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip"))
env["PATH"] = os.pathsep.join(env["PATH"])
cmd = [sys.executable, "setup.py"] + list(cmd)
# https://bugs.python.org/issue8557
shell = sys.platform == 'win32'
try:
proc = _Popen(
cmd,
stdout=_PIPE,
stderr=_PIPE,
shell=shell,
env=env,
encoding="utf-8",
)
if isinstance(data_stream, tuple):
data_stream = slice(*data_stream)
data = proc.communicate()[data_stream]
except OSError:
return 1, ''
# decode the console string if needed
if hasattr(data, "decode"):
# use the default encoding
data = data.decode()
data = unicodedata.normalize('NFC', data)
# communicate calls wait()
return proc.returncode, data

View File

@@ -0,0 +1,392 @@
import contextlib
import io
import os
import subprocess
import sys
import tarfile
import time
from pathlib import Path
import jaraco.path
import path
import pytest
from setuptools._normalization import safer_name
from . import contexts, environment
from .textwrap import DALS
@pytest.fixture
def user_override(monkeypatch):
"""
Override site.USER_BASE and site.USER_SITE with temporary directories in
a context.
"""
with contexts.tempdir() as user_base:
monkeypatch.setattr('site.USER_BASE', user_base)
with contexts.tempdir() as user_site:
monkeypatch.setattr('site.USER_SITE', user_site)
with contexts.save_user_site_setting():
yield
@pytest.fixture
def tmpdir_cwd(tmpdir):
with tmpdir.as_cwd() as orig:
yield orig
@pytest.fixture(autouse=True, scope="session")
def workaround_xdist_376(request):
"""
Workaround pytest-dev/pytest-xdist#376
``pytest-xdist`` tends to inject '' into ``sys.path``,
which may break certain isolation expectations.
Remove the entry so the import
machinery behaves the same irrespective of xdist.
"""
if not request.config.pluginmanager.has_plugin('xdist'):
return
with contextlib.suppress(ValueError):
sys.path.remove('')
@pytest.fixture
def sample_project(tmp_path):
"""
Clone the 'sampleproject' and return a path to it.
"""
cmd = ['git', 'clone', 'https://github.com/pypa/sampleproject']
try:
subprocess.check_call(cmd, cwd=str(tmp_path))
except Exception:
pytest.skip("Unable to clone sampleproject")
return tmp_path / 'sampleproject'
@pytest.fixture
def sample_project_cwd(sample_project):
with path.Path(sample_project):
yield
# sdist and wheel artifacts should be stable across a round of tests
# so we can build them once per session and use the files as "readonly"
# In the case of setuptools, building the wheel without sdist may cause
# it to contain the `build` directory, and therefore create situations with
# `setuptools/build/lib/build/lib/...`. To avoid that, build both artifacts at once.
def _build_distributions(tmp_path_factory, request):
with contexts.session_locked_tmp_dir(
request, tmp_path_factory, "dist_build"
) as tmp: # pragma: no cover
sdist = next(tmp.glob("*.tar.gz"), None)
wheel = next(tmp.glob("*.whl"), None)
if sdist and wheel:
return (sdist, wheel)
# Sanity check: should not create recursive setuptools/build/lib/build/lib/...
assert not Path(request.config.rootdir, "build/lib/build").exists()
subprocess.check_output([
sys.executable,
"-m",
"build",
"--outdir",
str(tmp),
str(request.config.rootdir),
])
# Sanity check: should not create recursive setuptools/build/lib/build/lib/...
assert not Path(request.config.rootdir, "build/lib/build").exists()
return next(tmp.glob("*.tar.gz")), next(tmp.glob("*.whl"))
@pytest.fixture(scope="session")
def setuptools_sdist(tmp_path_factory, request):
prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")
if prebuilt and os.path.exists(prebuilt): # pragma: no cover
return Path(prebuilt).resolve()
sdist, _ = _build_distributions(tmp_path_factory, request)
return sdist
@pytest.fixture(scope="session")
def setuptools_wheel(tmp_path_factory, request):
prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")
if prebuilt and os.path.exists(prebuilt): # pragma: no cover
return Path(prebuilt).resolve()
_, wheel = _build_distributions(tmp_path_factory, request)
return wheel
@pytest.fixture
def venv(tmp_path, setuptools_wheel):
"""Virtual env with the version of setuptools under test installed"""
env = environment.VirtualEnv()
env.root = path.Path(tmp_path / 'venv')
env.create_opts = ['--no-setuptools', '--wheel=bundle']
# TODO: Use `--no-wheel` when setuptools implements its own bdist_wheel
env.req = str(setuptools_wheel)
# In some environments (eg. downstream distro packaging),
# where tox isn't used to run tests and PYTHONPATH is set to point to
# a specific setuptools codebase, PYTHONPATH will leak into the spawned
# processes.
# env.create() should install the just created setuptools
# wheel, but it doesn't if it finds another existing matching setuptools
# installation present on PYTHONPATH:
# `setuptools is already installed with the same version as the provided
# wheel. Use --force-reinstall to force an installation of the wheel.`
# This prevents leaking PYTHONPATH to the created environment.
with contexts.environment(PYTHONPATH=None):
return env.create()
@pytest.fixture
def venv_without_setuptools(tmp_path):
"""Virtual env without any version of setuptools installed"""
env = environment.VirtualEnv()
env.root = path.Path(tmp_path / 'venv_without_setuptools')
env.create_opts = ['--no-setuptools', '--no-wheel']
env.ensure_env()
return env
@pytest.fixture
def bare_venv(tmp_path):
"""Virtual env without any common packages installed"""
env = environment.VirtualEnv()
env.root = path.Path(tmp_path / 'bare_venv')
env.create_opts = ['--no-setuptools', '--no-pip', '--no-wheel', '--no-seed']
env.ensure_env()
return env
def make_sdist(dist_path, files):
"""
Create a simple sdist tarball at dist_path, containing the files
listed in ``files`` as ``(filename, content)`` tuples.
"""
# Distributions with only one file don't play well with pip.
assert len(files) > 1
with tarfile.open(dist_path, 'w:gz') as dist:
for filename, content in files:
file_bytes = io.BytesIO(content.encode('utf-8'))
file_info = tarfile.TarInfo(name=filename)
file_info.size = len(file_bytes.getvalue())
file_info.mtime = int(time.time())
dist.addfile(file_info, fileobj=file_bytes)
def make_trivial_sdist(dist_path, distname, version):
"""
Create a simple sdist tarball at dist_path, containing just a simple
setup.py.
"""
make_sdist(
dist_path,
[
(
'setup.py',
DALS(
f"""\
import setuptools
setuptools.setup(
name={distname!r},
version={version!r}
)
"""
),
),
('setup.cfg', ''),
],
)
def make_nspkg_sdist(dist_path, distname, version):
"""
Make an sdist tarball with distname and version which also contains one
package with the same name as distname. The top-level package is
designated a namespace package).
"""
# Assert that the distname contains at least one period
assert '.' in distname
parts = distname.split('.')
nspackage = parts[0]
packages = ['.'.join(parts[:idx]) for idx in range(1, len(parts) + 1)]
setup_py = DALS(
f"""\
import setuptools
setuptools.setup(
name={distname!r},
version={version!r},
packages={packages!r},
namespace_packages=[{nspackage!r}]
)
"""
)
init = "__import__('pkg_resources').declare_namespace(__name__)"
files = [('setup.py', setup_py), (os.path.join(nspackage, '__init__.py'), init)]
for package in packages[1:]:
filename = os.path.join(*(package.split('.') + ['__init__.py']))
files.append((filename, ''))
make_sdist(dist_path, files)
def make_python_requires_sdist(dist_path, distname, version, python_requires):
make_sdist(
dist_path,
[
(
'setup.py',
DALS(
"""\
import setuptools
setuptools.setup(
name={name!r},
version={version!r},
python_requires={python_requires!r},
)
"""
).format(
name=distname, version=version, python_requires=python_requires
),
),
('setup.cfg', ''),
],
)
def create_setup_requires_package(
path,
distname='foobar',
version='0.1',
make_package=make_trivial_sdist,
setup_py_template=None,
setup_attrs=None,
use_setup_cfg=(),
):
"""Creates a source tree under path for a trivial test package that has a
single requirement in setup_requires--a tarball for that requirement is
also created and added to the dependency_links argument.
``distname`` and ``version`` refer to the name/version of the package that
the test package requires via ``setup_requires``. The name of the test
package itself is just 'test_pkg'.
"""
normalized_distname = safer_name(distname)
test_setup_attrs = {
'name': 'test_pkg',
'version': '0.0',
'setup_requires': [f'{normalized_distname}=={version}'],
'dependency_links': [os.path.abspath(path)],
}
if setup_attrs:
test_setup_attrs.update(setup_attrs)
test_pkg = os.path.join(path, 'test_pkg')
os.mkdir(test_pkg)
# setup.cfg
if use_setup_cfg:
options = []
metadata = []
for name in use_setup_cfg:
value = test_setup_attrs.pop(name)
if name in 'name version'.split():
section = metadata
else:
section = options
if isinstance(value, (tuple, list)):
value = ';'.join(value)
section.append(f'{name}: {value}')
test_setup_cfg_contents = DALS(
"""
[metadata]
{metadata}
[options]
{options}
"""
).format(
options='\n'.join(options),
metadata='\n'.join(metadata),
)
else:
test_setup_cfg_contents = ''
with open(os.path.join(test_pkg, 'setup.cfg'), 'w', encoding="utf-8") as f:
f.write(test_setup_cfg_contents)
# setup.py
if setup_py_template is None:
setup_py_template = DALS(
"""\
import setuptools
setuptools.setup(**%r)
"""
)
with open(os.path.join(test_pkg, 'setup.py'), 'w', encoding="utf-8") as f:
f.write(setup_py_template % test_setup_attrs)
foobar_path = os.path.join(path, f'{normalized_distname}-{version}.tar.gz')
make_package(foobar_path, distname, version)
return test_pkg
@pytest.fixture
def pbr_package(tmp_path, monkeypatch, venv):
files = {
"pyproject.toml": DALS(
"""
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
"""
),
"setup.py": DALS(
"""
__import__('setuptools').setup(
pbr=True,
setup_requires=["pbr"],
)
"""
),
"setup.cfg": DALS(
"""
[metadata]
name = mypkg
[files]
packages =
mypkg
"""
),
"mypkg": {
"__init__.py": "",
"hello.py": "print('Hello world!')",
},
"other": {"test.txt": "Another file in here."},
}
venv.run(["python", "-m", "pip", "install", "pbr"])
prefix = tmp_path / 'mypkg'
prefix.mkdir()
jaraco.path.build(files, prefix=prefix)
monkeypatch.setenv('PBR_VERSION', "0.42")
return prefix

View File

@@ -0,0 +1,3 @@
<html><body>
<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a>
</body></html>

View File

@@ -0,0 +1,4 @@
<html><body>
<a href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/>
<a href="../../external.html" rel="homepage">external homepage</a><br/>
</body></html>

View File

@@ -0,0 +1,77 @@
"""Reusable functions and classes for different types of integration tests.
For example ``Archive`` can be used to check the contents of distribution built
with setuptools, and ``run`` will always try to be as verbose as possible to
facilitate debugging.
"""
import os
import subprocess
import tarfile
from pathlib import Path
from zipfile import ZipFile
def run(cmd, env=None):
r = subprocess.run(
cmd,
capture_output=True,
text=True,
encoding="utf-8",
env={**os.environ, **(env or {})},
# ^-- allow overwriting instead of discarding the current env
)
out = r.stdout + "\n" + r.stderr
# pytest omits stdout/err by default, if the test fails they help debugging
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print(f"Command: {cmd}\nreturn code: {r.returncode}\n\n{out}")
if r.returncode == 0:
return out
raise subprocess.CalledProcessError(r.returncode, cmd, r.stdout, r.stderr)
class Archive:
"""Compatibility layer for ZipFile/Info and TarFile/Info"""
def __init__(self, filename):
self._filename = filename
if filename.endswith("tar.gz"):
self._obj = tarfile.open(filename, "r:gz")
elif filename.endswith("zip"):
self._obj = ZipFile(filename)
else:
raise ValueError(f"{filename} doesn't seem to be a zip or tar.gz")
def __iter__(self):
if hasattr(self._obj, "infolist"):
return iter(self._obj.infolist())
return iter(self._obj)
def get_name(self, zip_or_tar_info):
if hasattr(zip_or_tar_info, "filename"):
return zip_or_tar_info.filename
return zip_or_tar_info.name
def get_content(self, zip_or_tar_info):
if hasattr(self._obj, "extractfile"):
content = self._obj.extractfile(zip_or_tar_info)
if content is None:
msg = f"Invalid {zip_or_tar_info.name} in {self._filename}"
raise ValueError(msg)
return str(content.read(), "utf-8")
return str(self._obj.read(zip_or_tar_info), "utf-8")
def get_sdist_members(sdist_path):
with tarfile.open(sdist_path, "r:gz") as tar:
files = [Path(f) for f in tar.getnames()]
# remove root folder
relative_files = ("/".join(f.parts[1:]) for f in files)
return {f for f in relative_files if f}
def get_wheel_members(wheel_path):
with ZipFile(wheel_path) as zipfile:
return set(zipfile.namelist())

View File

@@ -0,0 +1,20 @@
import subprocess
import pytest
@pytest.mark.uses_network
def test_pbr_integration(pbr_package, venv):
"""Ensure pbr packages install."""
cmd = [
'python',
'-m',
'pip',
'-v',
'install',
'--no-build-isolation',
pbr_package,
]
venv.run(cmd, stderr=subprocess.STDOUT)
out = venv.run(["python", "-c", "import mypkg.hello"])
assert "Hello world!" in out

View File

@@ -0,0 +1,223 @@
# https://github.com/python/mypy/issues/16936
# mypy: disable-error-code="has-type"
"""Integration tests for setuptools that focus on building packages via pip.
The idea behind these tests is not to exhaustively check all the possible
combinations of packages, operating systems, supporting libraries, etc, but
rather check a limited number of popular packages and how they interact with
the exposed public API. This way if any change in API is introduced, we hope to
identify backward compatibility problems before publishing a release.
The number of tested packages is purposefully kept small, to minimise duration
and the associated maintenance cost (changes in the way these packages define
their build process may require changes in the tests).
"""
import json
import os
import shutil
import sys
from enum import Enum
from glob import glob
from hashlib import md5
from urllib.request import urlopen
import pytest
from packaging.requirements import Requirement
from .helpers import Archive, run
pytestmark = pytest.mark.integration
(LATEST,) = Enum("v", "LATEST") # type: ignore[misc] # https://github.com/python/mypy/issues/16936
"""Default version to be checked"""
# There are positive and negative aspects of checking the latest version of the
# packages.
# The main positive aspect is that the latest version might have already
# removed the use of APIs deprecated in previous releases of setuptools.
# Packages to be tested:
# (Please notice the test environment cannot support EVERY library required for
# compiling binary extensions. In Ubuntu/Debian nomenclature, we only assume
# that `build-essential`, `gfortran` and `libopenblas-dev` are installed,
# due to their relevance to the numerical/scientific programming ecosystem)
EXAMPLES = [
("pip", LATEST), # just in case...
("pytest", LATEST), # uses setuptools_scm
("mypy", LATEST), # custom build_py + ext_modules
# --- Popular packages: https://hugovk.github.io/top-pypi-packages/ ---
("botocore", LATEST),
("kiwisolver", LATEST), # build_ext
("brotli", LATEST), # not in the list but used by urllib3
("pyyaml", LATEST), # cython + custom build_ext + custom distclass
("charset-normalizer", LATEST), # uses mypyc, used by aiohttp
("protobuf", LATEST),
# ("requests", LATEST), # XXX: https://github.com/psf/requests/pull/6920
("celery", LATEST),
# When adding packages to this list, make sure they expose a `__version__`
# attribute, or modify the tests below
]
# Some packages have "optional" dependencies that modify their build behaviour
# and are not listed in pyproject.toml, others still use `setup_requires`
EXTRA_BUILD_DEPS = {
"pyyaml": ("Cython<3.0",), # constraint to avoid errors
"charset-normalizer": ("mypy>=1.4.1",), # no pyproject.toml available
}
EXTRA_ENV_VARS = {
"pyyaml": {"PYYAML_FORCE_CYTHON": "1"},
"charset-normalizer": {"CHARSET_NORMALIZER_USE_MYPYC": "1"},
}
IMPORT_NAME = {
"pyyaml": "yaml",
"protobuf": "google.protobuf",
}
VIRTUALENV = (sys.executable, "-m", "virtualenv")
# By default, pip will try to build packages in isolation (PEP 517), which
# means it will download the previous stable version of setuptools.
# `pip` flags can avoid that (the version of setuptools under test
# should be the one to be used)
INSTALL_OPTIONS = (
"--ignore-installed",
"--no-build-isolation",
# Omit "--no-binary :all:" the sdist is supplied directly.
# Allows dependencies as wheels.
)
# The downside of `--no-build-isolation` is that pip will not download build
# dependencies. The test script will have to also handle that.
@pytest.fixture
def venv_python(tmp_path):
run([*VIRTUALENV, str(tmp_path / ".venv")])
possible_path = (str(p.parent) for p in tmp_path.glob(".venv/*/python*"))
return shutil.which("python", path=os.pathsep.join(possible_path))
@pytest.fixture(autouse=True)
def _prepare(tmp_path, venv_python, monkeypatch):
download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path))
os.makedirs(download_path, exist_ok=True)
# Environment vars used for building some of the packages
monkeypatch.setenv("USE_MYPYC", "1")
yield
# Let's provide the maximum amount of information possible in the case
# it is necessary to debug the tests directly from the CI logs.
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print("Temporary directory:")
map(print, tmp_path.glob("*"))
print("Virtual environment:")
run([venv_python, "-m", "pip", "freeze"])
@pytest.mark.parametrize(("package", "version"), EXAMPLES)
@pytest.mark.uses_network
def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel):
venv_pip = (venv_python, "-m", "pip")
sdist = retrieve_sdist(package, version, tmp_path)
deps = build_deps(package, sdist)
if deps:
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print("Dependencies:", deps)
run([*venv_pip, "install", *deps])
# Use a virtualenv to simulate PEP 517 isolation
# but install fresh setuptools wheel to ensure the version under development
env = EXTRA_ENV_VARS.get(package, {})
run([*venv_pip, "install", "--force-reinstall", setuptools_wheel])
run([*venv_pip, "install", *INSTALL_OPTIONS, sdist], env)
# Execute a simple script to make sure the package was installed correctly
pkg = IMPORT_NAME.get(package, package).replace("-", "_")
script = f"import {pkg}; print(getattr({pkg}, '__version__', 0))"
run([venv_python, "-c", script])
# ---- Helper Functions ----
def retrieve_sdist(package, version, tmp_path):
"""Either use cached sdist file or download it from PyPI"""
# `pip download` cannot be used due to
# https://github.com/pypa/pip/issues/1884
# https://discuss.python.org/t/pep-625-file-name-of-a-source-distribution/4686
# We have to find the correct distribution file and download it
download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path))
dist = retrieve_pypi_sdist_metadata(package, version)
# Remove old files to prevent cache to grow indefinitely
for file in glob(os.path.join(download_path, f"{package}*")):
if dist["filename"] != file:
os.unlink(file)
dist_file = os.path.join(download_path, dist["filename"])
if not os.path.exists(dist_file):
download(dist["url"], dist_file, dist["md5_digest"])
return dist_file
def retrieve_pypi_sdist_metadata(package, version):
# https://warehouse.pypa.io/api-reference/json.html
id_ = package if version is LATEST else f"{package}/{version}"
with urlopen(f"https://pypi.org/pypi/{id_}/json") as f:
metadata = json.load(f)
if metadata["info"]["yanked"]:
raise ValueError(f"Release for {package} {version} was yanked")
version = metadata["info"]["version"]
release = metadata["releases"][version] if version is LATEST else metadata["urls"]
(sdist,) = filter(lambda d: d["packagetype"] == "sdist", release)
return sdist
def download(url, dest, md5_digest):
with urlopen(url) as f:
data = f.read()
assert md5(data).hexdigest() == md5_digest
with open(dest, "wb") as f:
f.write(data)
assert os.path.exists(dest)
def build_deps(package, sdist_file):
"""Find out what are the build dependencies for a package.
"Manually" install them, since pip will not install build
deps with `--no-build-isolation`.
"""
# delay importing, since pytest discovery phase may hit this file from a
# testenv without tomli
from setuptools.compat.py310 import tomllib
archive = Archive(sdist_file)
info = tomllib.loads(_read_pyproject(archive))
deps = info.get("build-system", {}).get("requires", [])
deps += EXTRA_BUILD_DEPS.get(package, [])
# Remove setuptools from requirements (and deduplicate)
requirements = {Requirement(d).name: d for d in deps}
return [v for k, v in requirements.items() if k != "setuptools"]
def _read_pyproject(archive):
contents = (
archive.get_content(member)
for member in archive
if os.path.basename(archive.get_name(member)) == "pyproject.toml"
)
return next(contents, "")

View File

@@ -0,0 +1 @@
value = 'three, sir!'

View File

@@ -0,0 +1,90 @@
import ast
import json
import textwrap
from pathlib import Path
def iter_namespace_pkgs(namespace):
parts = namespace.split(".")
for i in range(len(parts)):
yield ".".join(parts[: i + 1])
def build_namespace_package(tmpdir, name, version="1.0", impl="pkg_resources"):
src_dir = tmpdir / name
src_dir.mkdir()
setup_py = src_dir / 'setup.py'
namespace, _, rest = name.rpartition('.')
namespaces = list(iter_namespace_pkgs(namespace))
setup_args = {
"name": name,
"version": version,
"packages": namespaces,
}
if impl == "pkg_resources":
tmpl = '__import__("pkg_resources").declare_namespace(__name__)'
setup_args["namespace_packages"] = namespaces
elif impl == "pkgutil":
tmpl = '__path__ = __import__("pkgutil").extend_path(__path__, __name__)'
else:
raise ValueError(f"Cannot recognise {impl=} when creating namespaces")
args = json.dumps(setup_args, indent=4)
assert ast.literal_eval(args) # ensure it is valid Python
script = textwrap.dedent(
"""\
import setuptools
args = {args}
setuptools.setup(**args)
"""
).format(args=args)
setup_py.write_text(script, encoding='utf-8')
ns_pkg_dir = Path(src_dir, namespace.replace(".", "/"))
ns_pkg_dir.mkdir(parents=True)
for ns in namespaces:
pkg_init = src_dir / ns.replace(".", "/") / '__init__.py'
pkg_init.write_text(tmpl, encoding='utf-8')
pkg_mod = ns_pkg_dir / (rest + '.py')
some_functionality = 'name = {rest!r}'.format(**locals())
pkg_mod.write_text(some_functionality, encoding='utf-8')
return src_dir
def build_pep420_namespace_package(tmpdir, name):
src_dir = tmpdir / name
src_dir.mkdir()
pyproject = src_dir / "pyproject.toml"
namespace, _, rest = name.rpartition(".")
script = f"""\
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project]
name = "{name}"
version = "3.14159"
"""
pyproject.write_text(textwrap.dedent(script), encoding='utf-8')
ns_pkg_dir = Path(src_dir, namespace.replace(".", "/"))
ns_pkg_dir.mkdir(parents=True)
pkg_mod = ns_pkg_dir / (rest + ".py")
some_functionality = f"name = {rest!r}"
pkg_mod.write_text(some_functionality, encoding='utf-8')
return src_dir
def make_site_dir(target):
"""
Add a sitecustomize.py module in target to cause
target to be added to site dirs such that .pth files
are processed there.
"""
sc = target / 'sitecustomize.py'
target_str = str(target)
tmpl = '__import__("site").addsitedir({target_str!r})'
sc.write_text(tmpl.format(**locals()), encoding='utf-8')

View File

@@ -0,0 +1 @@
result = 'passed'

View File

@@ -0,0 +1,36 @@
import io
import tarfile
import pytest
from setuptools import archive_util
@pytest.fixture
def tarfile_with_unicode(tmpdir):
"""
Create a tarfile containing only a file whose name is
a zero byte file called testimäge.png.
"""
tarobj = io.BytesIO()
with tarfile.open(fileobj=tarobj, mode="w:gz") as tgz:
data = b""
filename = "testimäge.png"
t = tarfile.TarInfo(filename)
t.size = len(data)
tgz.addfile(t, io.BytesIO(data))
target = tmpdir / 'unicode-pkg-1.0.tar.gz'
with open(str(target), mode='wb') as tf:
tf.write(tarobj.getvalue())
return str(target)
@pytest.mark.xfail(reason="#710 and #712")
def test_unicode_files(tarfile_with_unicode, tmpdir):
target = tmpdir / 'out'
archive_util.unpack_archive(tarfile_with_unicode, str(target))

View File

@@ -0,0 +1,28 @@
"""develop tests"""
import sys
from unittest import mock
import pytest
from setuptools import SetuptoolsDeprecationWarning
from setuptools.dist import Distribution
@pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only')
@pytest.mark.xfail(reason="bdist_rpm is long deprecated, should we remove it? #1988")
@mock.patch('distutils.command.bdist_rpm.bdist_rpm')
def test_bdist_rpm_warning(distutils_cmd, tmpdir_cwd):
dist = Distribution(
dict(
script_name='setup.py',
script_args=['bdist_rpm'],
name='foo',
py_modules=['hi'],
)
)
dist.parse_command_line()
with pytest.warns(SetuptoolsDeprecationWarning):
dist.run_commands()
distutils_cmd.run.assert_called_once()

View File

@@ -0,0 +1,73 @@
"""develop tests"""
import os
import re
import zipfile
import pytest
from setuptools.dist import Distribution
from . import contexts
SETUP_PY = """\
from setuptools import setup
setup(py_modules=['hi'])
"""
@pytest.fixture
def setup_context(tmpdir):
with (tmpdir / 'setup.py').open('w') as f:
f.write(SETUP_PY)
with (tmpdir / 'hi.py').open('w') as f:
f.write('1\n')
with tmpdir.as_cwd():
yield tmpdir
class Test:
@pytest.mark.usefixtures("user_override")
@pytest.mark.usefixtures("setup_context")
def test_bdist_egg(self):
dist = Distribution(
dict(
script_name='setup.py',
script_args=['bdist_egg'],
name='foo',
py_modules=['hi'],
)
)
os.makedirs(os.path.join('build', 'src'))
with contexts.quiet():
dist.parse_command_line()
dist.run_commands()
# let's see if we got our egg link at the right place
[content] = os.listdir('dist')
assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content)
@pytest.mark.xfail(
os.environ.get('PYTHONDONTWRITEBYTECODE', False),
reason="Byte code disabled",
)
@pytest.mark.usefixtures("user_override")
@pytest.mark.usefixtures("setup_context")
def test_exclude_source_files(self):
dist = Distribution(
dict(
script_name='setup.py',
script_args=['bdist_egg', '--exclude-source-files'],
py_modules=['hi'],
)
)
with contexts.quiet():
dist.parse_command_line()
dist.run_commands()
[dist_name] = os.listdir('dist')
dist_filename = os.path.join('dist', dist_name)
zip = zipfile.ZipFile(dist_filename)
names = list(zi.filename for zi in zip.filelist)
assert 'hi.pyc' in names
assert 'hi.py' not in names

View File

@@ -0,0 +1,708 @@
from __future__ import annotations
import builtins
import importlib
import os.path
import platform
import shutil
import stat
import struct
import sys
import sysconfig
from contextlib import suppress
from inspect import cleandoc
from zipfile import ZipFile
import jaraco.path
import pytest
from packaging import tags
import setuptools
from setuptools.command.bdist_wheel import bdist_wheel, get_abi_tag
from setuptools.dist import Distribution
from setuptools.warnings import SetuptoolsDeprecationWarning
from distutils.core import run_setup
DEFAULT_FILES = {
"dummy_dist-1.0.dist-info/top_level.txt",
"dummy_dist-1.0.dist-info/METADATA",
"dummy_dist-1.0.dist-info/WHEEL",
"dummy_dist-1.0.dist-info/RECORD",
}
DEFAULT_LICENSE_FILES = {
"LICENSE",
"LICENSE.txt",
"LICENCE",
"LICENCE.txt",
"COPYING",
"COPYING.md",
"NOTICE",
"NOTICE.rst",
"AUTHORS",
"AUTHORS.txt",
}
OTHER_IGNORED_FILES = {
"LICENSE~",
"AUTHORS~",
}
SETUPPY_EXAMPLE = """\
from setuptools import setup
setup(
name='dummy_dist',
version='1.0',
)
"""
EXAMPLES = {
"dummy-dist": {
"setup.py": SETUPPY_EXAMPLE,
"licenses_dir": {"DUMMYFILE": ""},
**dict.fromkeys(DEFAULT_LICENSE_FILES | OTHER_IGNORED_FILES, ""),
},
"simple-dist": {
"setup.py": cleandoc(
"""
from setuptools import setup
setup(
name="simple.dist",
version="0.1",
description="A testing distribution \N{SNOWMAN}",
extras_require={"voting": ["beaglevote"]},
)
"""
),
"simpledist": "",
},
"complex-dist": {
"setup.py": cleandoc(
"""
from setuptools import setup
setup(
name="complex-dist",
version="0.1",
description="Another testing distribution \N{SNOWMAN}",
long_description="Another testing distribution \N{SNOWMAN}",
author="Illustrious Author",
author_email="illustrious@example.org",
url="http://example.org/exemplary",
packages=["complexdist"],
setup_requires=["setuptools"],
install_requires=["quux", "splort"],
extras_require={"simple": ["simple.dist"]},
entry_points={
"console_scripts": [
"complex-dist=complexdist:main",
"complex-dist2=complexdist:main",
],
},
)
"""
),
"complexdist": {"__init__.py": "def main(): return"},
},
"headers-dist": {
"setup.py": cleandoc(
"""
from setuptools import setup
setup(
name="headers.dist",
version="0.1",
description="A distribution with headers",
headers=["header.h"],
)
"""
),
"headersdist.py": "",
"header.h": "",
},
"commasinfilenames-dist": {
"setup.py": cleandoc(
"""
from setuptools import setup
setup(
name="testrepo",
version="0.1",
packages=["mypackage"],
description="A test package with commas in file names",
include_package_data=True,
package_data={"mypackage.data": ["*"]},
)
"""
),
"mypackage": {
"__init__.py": "",
"data": {"__init__.py": "", "1,2,3.txt": ""},
},
"testrepo-0.1.0": {
"mypackage": {"__init__.py": ""},
},
},
"unicode-dist": {
"setup.py": cleandoc(
"""
from setuptools import setup
setup(
name="unicode.dist",
version="0.1",
description="A testing distribution \N{SNOWMAN}",
packages=["unicodedist"],
zip_safe=True,
)
"""
),
"unicodedist": {"__init__.py": "", "åäö_日本語.py": ""},
},
"utf8-metadata-dist": {
"setup.cfg": cleandoc(
"""
[metadata]
name = utf8-metadata-dist
version = 42
author_email = "John X. Ãørçeč" <john@utf8.org>, Γαμα קּ 東 <gama@utf8.org>
long_description = file: README.rst
"""
),
"README.rst": "UTF-8 描述 説明",
},
"licenses-dist": {
"setup.cfg": cleandoc(
"""
[metadata]
name = licenses-dist
version = 1.0
license_files = **/LICENSE
"""
),
"LICENSE": "",
"src": {
"vendor": {"LICENSE": ""},
},
},
}
if sys.platform != "win32":
# ABI3 extensions don't really work on Windows
EXAMPLES["abi3extension-dist"] = {
"setup.py": cleandoc(
"""
from setuptools import Extension, setup
setup(
name="extension.dist",
version="0.1",
description="A testing distribution \N{SNOWMAN}",
ext_modules=[
Extension(
name="extension", sources=["extension.c"], py_limited_api=True
)
],
)
"""
),
"setup.cfg": "[bdist_wheel]\npy_limited_api=cp32",
"extension.c": "#define Py_LIMITED_API 0x03020000\n#include <Python.h>",
}
def bdist_wheel_cmd(**kwargs):
"""Run command in the same process so that it is easier to collect coverage"""
dist_obj = (
run_setup("setup.py", stop_after="init")
if os.path.exists("setup.py")
else Distribution({"script_name": "%%build_meta%%"})
)
dist_obj.parse_config_files()
cmd = bdist_wheel(dist_obj)
for attr, value in kwargs.items():
setattr(cmd, attr, value)
cmd.finalize_options()
return cmd
def mkexample(tmp_path_factory, name):
basedir = tmp_path_factory.mktemp(name)
jaraco.path.build(EXAMPLES[name], prefix=str(basedir))
return basedir
@pytest.fixture(scope="session")
def wheel_paths(tmp_path_factory):
build_base = tmp_path_factory.mktemp("build")
dist_dir = tmp_path_factory.mktemp("dist")
for name in EXAMPLES:
example_dir = mkexample(tmp_path_factory, name)
build_dir = build_base / name
with jaraco.path.DirectoryStack().context(example_dir):
bdist_wheel_cmd(bdist_dir=str(build_dir), dist_dir=str(dist_dir)).run()
return sorted(str(fname) for fname in dist_dir.glob("*.whl"))
@pytest.fixture
def dummy_dist(tmp_path_factory):
return mkexample(tmp_path_factory, "dummy-dist")
@pytest.fixture
def licenses_dist(tmp_path_factory):
return mkexample(tmp_path_factory, "licenses-dist")
def test_no_scripts(wheel_paths):
"""Make sure entry point scripts are not generated."""
path = next(path for path in wheel_paths if "complex_dist" in path)
for entry in ZipFile(path).infolist():
assert ".data/scripts/" not in entry.filename
def test_unicode_record(wheel_paths):
path = next(path for path in wheel_paths if "unicode_dist" in path)
with ZipFile(path) as zf:
record = zf.read("unicode_dist-0.1.dist-info/RECORD")
assert "åäö_日本語.py".encode() in record
UTF8_PKG_INFO = """\
Metadata-Version: 2.1
Name: helloworld
Version: 42
Author-email: "John X. Ãørçeč" <john@utf8.org>, Γαμα קּ 東 <gama@utf8.org>
UTF-8 描述 説明
"""
def test_preserve_unicode_metadata(monkeypatch, tmp_path):
monkeypatch.chdir(tmp_path)
egginfo = tmp_path / "dummy_dist.egg-info"
distinfo = tmp_path / "dummy_dist.dist-info"
egginfo.mkdir()
(egginfo / "PKG-INFO").write_text(UTF8_PKG_INFO, encoding="utf-8")
(egginfo / "dependency_links.txt").touch()
class simpler_bdist_wheel(bdist_wheel):
"""Avoid messing with setuptools/distutils internals"""
def __init__(self):
pass
@property
def license_paths(self):
return []
cmd_obj = simpler_bdist_wheel()
cmd_obj.egg2dist(egginfo, distinfo)
metadata = (distinfo / "METADATA").read_text(encoding="utf-8")
assert 'Author-email: "John X. Ãørçeč"' in metadata
assert "Γαμα קּ 東 " in metadata
assert "UTF-8 描述 説明" in metadata
def test_licenses_default(dummy_dist, monkeypatch, tmp_path):
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
license_files = {
"dummy_dist-1.0.dist-info/licenses/" + fname
for fname in DEFAULT_LICENSE_FILES
}
assert set(wf.namelist()) == DEFAULT_FILES | license_files
def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
dummy_dist.joinpath("setup.cfg").write_text(
"[metadata]\nlicense_file=licenses_dir/DUMMYFILE", encoding="utf-8"
)
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
license_files = {"dummy_dist-1.0.dist-info/licenses/licenses_dir/DUMMYFILE"}
assert set(wf.namelist()) == DEFAULT_FILES | license_files
@pytest.mark.parametrize(
("config_file", "config"),
[
("setup.cfg", "[metadata]\nlicense_files=licenses_dir/*\n LICENSE"),
("setup.cfg", "[metadata]\nlicense_files=licenses_dir/*, LICENSE"),
(
"setup.py",
SETUPPY_EXAMPLE.replace(
")", " license_files=['licenses_dir/DUMMYFILE', 'LICENSE'])"
),
),
],
)
def test_licenses_override(dummy_dist, monkeypatch, tmp_path, config_file, config):
dummy_dist.joinpath(config_file).write_text(config, encoding="utf-8")
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
license_files = {
"dummy_dist-1.0.dist-info/licenses/" + fname
for fname in {"licenses_dir/DUMMYFILE", "LICENSE"}
}
assert set(wf.namelist()) == DEFAULT_FILES | license_files
metadata = wf.read("dummy_dist-1.0.dist-info/METADATA").decode("utf8")
assert "License-File: licenses_dir/DUMMYFILE" in metadata
assert "License-File: LICENSE" in metadata
def test_licenses_preserve_folder_structure(licenses_dist, monkeypatch, tmp_path):
monkeypatch.chdir(licenses_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
print(os.listdir("dist"))
with ZipFile("dist/licenses_dist-1.0-py3-none-any.whl") as wf:
default_files = {name.replace("dummy_", "licenses_") for name in DEFAULT_FILES}
license_files = {
"licenses_dist-1.0.dist-info/licenses/LICENSE",
"licenses_dist-1.0.dist-info/licenses/src/vendor/LICENSE",
}
assert set(wf.namelist()) == default_files | license_files
metadata = wf.read("licenses_dist-1.0.dist-info/METADATA").decode("utf8")
assert "License-File: src/vendor/LICENSE" in metadata
assert "License-File: LICENSE" in metadata
def test_licenses_disabled(dummy_dist, monkeypatch, tmp_path):
dummy_dist.joinpath("setup.cfg").write_text(
"[metadata]\nlicense_files=\n", encoding="utf-8"
)
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path)).run()
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
assert set(wf.namelist()) == DEFAULT_FILES
def test_build_number(dummy_dist, monkeypatch, tmp_path):
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2").run()
with ZipFile("dist/dummy_dist-1.0-2-py3-none-any.whl") as wf:
filenames = set(wf.namelist())
assert "dummy_dist-1.0.dist-info/RECORD" in filenames
assert "dummy_dist-1.0.dist-info/METADATA" in filenames
def test_universal_deprecated(dummy_dist, monkeypatch, tmp_path):
monkeypatch.chdir(dummy_dist)
with pytest.warns(SetuptoolsDeprecationWarning, match=".*universal is deprecated"):
bdist_wheel_cmd(bdist_dir=str(tmp_path), universal=True).run()
# For now we still respect the option
assert os.path.exists("dist/dummy_dist-1.0-py2.py3-none-any.whl")
EXTENSION_EXAMPLE = """\
#include <Python.h>
static PyMethodDef methods[] = {
{ NULL, NULL, 0, NULL }
};
static struct PyModuleDef module_def = {
PyModuleDef_HEAD_INIT,
"extension",
"Dummy extension module",
-1,
methods
};
PyMODINIT_FUNC PyInit_extension(void) {
return PyModule_Create(&module_def);
}
"""
EXTENSION_SETUPPY = """\
from __future__ import annotations
from setuptools import Extension, setup
setup(
name="extension.dist",
version="0.1",
description="A testing distribution \N{SNOWMAN}",
ext_modules=[Extension(name="extension", sources=["extension.c"])],
)
"""
@pytest.mark.filterwarnings(
"once:Config variable '.*' is unset.*, Python ABI tag may be incorrect"
)
def test_limited_abi(monkeypatch, tmp_path, tmp_path_factory):
"""Test that building a binary wheel with the limited ABI works."""
source_dir = tmp_path_factory.mktemp("extension_dist")
(source_dir / "setup.py").write_text(EXTENSION_SETUPPY, encoding="utf-8")
(source_dir / "extension.c").write_text(EXTENSION_EXAMPLE, encoding="utf-8")
build_dir = tmp_path.joinpath("build")
dist_dir = tmp_path.joinpath("dist")
monkeypatch.chdir(source_dir)
bdist_wheel_cmd(bdist_dir=str(build_dir), dist_dir=str(dist_dir)).run()
def test_build_from_readonly_tree(dummy_dist, monkeypatch, tmp_path):
basedir = str(tmp_path.joinpath("dummy"))
shutil.copytree(str(dummy_dist), basedir)
monkeypatch.chdir(basedir)
# Make the tree read-only
for root, _dirs, files in os.walk(basedir):
for fname in files:
os.chmod(os.path.join(root, fname), stat.S_IREAD)
bdist_wheel_cmd().run()
@pytest.mark.parametrize(
("option", "compress_type"),
list(bdist_wheel.supported_compressions.items()),
ids=list(bdist_wheel.supported_compressions),
)
def test_compression(dummy_dist, monkeypatch, tmp_path, option, compress_type):
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path), compression=option).run()
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
filenames = set(wf.namelist())
assert "dummy_dist-1.0.dist-info/RECORD" in filenames
assert "dummy_dist-1.0.dist-info/METADATA" in filenames
for zinfo in wf.filelist:
assert zinfo.compress_type == compress_type
def test_wheelfile_line_endings(wheel_paths):
for path in wheel_paths:
with ZipFile(path) as wf:
wheelfile = next(fn for fn in wf.filelist if fn.filename.endswith("WHEEL"))
wheelfile_contents = wf.read(wheelfile)
assert b"\r" not in wheelfile_contents
def test_unix_epoch_timestamps(dummy_dist, monkeypatch, tmp_path):
monkeypatch.setenv("SOURCE_DATE_EPOCH", "0")
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(bdist_dir=str(tmp_path), build_number="2a").run()
with ZipFile("dist/dummy_dist-1.0-2a-py3-none-any.whl") as wf:
for zinfo in wf.filelist:
assert zinfo.date_time >= (1980, 1, 1, 0, 0, 0) # min epoch is used
def test_get_abi_tag_windows(monkeypatch):
monkeypatch.setattr(tags, "interpreter_name", lambda: "cp")
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "cp313-win_amd64")
assert get_abi_tag() == "cp313"
monkeypatch.setattr(sys, "gettotalrefcount", lambda: 1, False)
assert get_abi_tag() == "cp313d"
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "cp313t-win_amd64")
assert get_abi_tag() == "cp313td"
monkeypatch.delattr(sys, "gettotalrefcount")
assert get_abi_tag() == "cp313t"
def test_get_abi_tag_pypy_old(monkeypatch):
monkeypatch.setattr(tags, "interpreter_name", lambda: "pp")
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "pypy36-pp73")
assert get_abi_tag() == "pypy36_pp73"
def test_get_abi_tag_pypy_new(monkeypatch):
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "pypy37-pp73-darwin")
monkeypatch.setattr(tags, "interpreter_name", lambda: "pp")
assert get_abi_tag() == "pypy37_pp73"
def test_get_abi_tag_graalpy(monkeypatch):
monkeypatch.setattr(
sysconfig, "get_config_var", lambda x: "graalpy231-310-native-x86_64-linux"
)
monkeypatch.setattr(tags, "interpreter_name", lambda: "graalpy")
assert get_abi_tag() == "graalpy231_310_native"
def test_get_abi_tag_fallback(monkeypatch):
monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "unknown-python-310")
monkeypatch.setattr(tags, "interpreter_name", lambda: "unknown-python")
assert get_abi_tag() == "unknown_python_310"
def test_platform_with_space(dummy_dist, monkeypatch):
"""Ensure building on platforms with a space in the name succeed."""
monkeypatch.chdir(dummy_dist)
bdist_wheel_cmd(plat_name="isilon onefs").run()
def test_data_dir_with_tag_build(monkeypatch, tmp_path):
"""
Setuptools allow authors to set PEP 440's local version segments
using ``egg_info.tag_build``. This should be reflected not only in the
``.whl`` file name, but also in the ``.dist-info`` and ``.data`` dirs.
See pypa/setuptools#3997.
"""
monkeypatch.chdir(tmp_path)
files = {
"setup.py": """
from setuptools import setup
setup(headers=["hello.h"])
""",
"setup.cfg": """
[metadata]
name = test
version = 1.0
[options.data_files]
hello/world = file.txt
[egg_info]
tag_build = +what
tag_date = 0
""",
"file.txt": "",
"hello.h": "",
}
for file, content in files.items():
with open(file, "w", encoding="utf-8") as fh:
fh.write(cleandoc(content))
bdist_wheel_cmd().run()
# Ensure .whl, .dist-info and .data contain the local segment
wheel_path = "dist/test-1.0+what-py3-none-any.whl"
assert os.path.exists(wheel_path)
entries = set(ZipFile(wheel_path).namelist())
for expected in (
"test-1.0+what.data/headers/hello.h",
"test-1.0+what.data/data/hello/world/file.txt",
"test-1.0+what.dist-info/METADATA",
"test-1.0+what.dist-info/WHEEL",
):
assert expected in entries
for not_expected in (
"test.data/headers/hello.h",
"test-1.0.data/data/hello/world/file.txt",
"test.dist-info/METADATA",
"test-1.0.dist-info/WHEEL",
):
assert not_expected not in entries
@pytest.mark.parametrize(
("reported", "expected"),
[("linux-x86_64", "linux_i686"), ("linux-aarch64", "linux_armv7l")],
)
@pytest.mark.skipif(
platform.system() != "Linux", reason="Only makes sense to test on Linux"
)
def test_platform_linux32(reported, expected, monkeypatch):
monkeypatch.setattr(struct, "calcsize", lambda x: 4)
dist = setuptools.Distribution()
cmd = bdist_wheel(dist)
cmd.plat_name = reported
cmd.root_is_pure = False
_, _, actual = cmd.get_tag()
assert actual == expected
def test_no_ctypes(monkeypatch) -> None:
def _fake_import(name: str, *args, **kwargs):
if name == "ctypes":
raise ModuleNotFoundError(f"No module named {name}")
return importlib.__import__(name, *args, **kwargs)
with suppress(KeyError):
monkeypatch.delitem(sys.modules, "wheel.macosx_libfile")
# Install an importer shim that refuses to load ctypes
monkeypatch.setattr(builtins, "__import__", _fake_import)
with pytest.raises(ModuleNotFoundError, match="No module named ctypes"):
import wheel.macosx_libfile # noqa: F401
# Unload and reimport the bdist_wheel command module to make sure it won't try to
# import ctypes
monkeypatch.delitem(sys.modules, "setuptools.command.bdist_wheel")
import setuptools.command.bdist_wheel # noqa: F401
def test_dist_info_provided(dummy_dist, monkeypatch, tmp_path):
monkeypatch.chdir(dummy_dist)
distinfo = tmp_path / "dummy_dist.dist-info"
distinfo.mkdir()
(distinfo / "METADATA").write_text("name: helloworld", encoding="utf-8")
# We don't control the metadata. According to PEP-517, "The hook MAY also
# create other files inside this directory, and a build frontend MUST
# preserve".
(distinfo / "FOO").write_text("bar", encoding="utf-8")
bdist_wheel_cmd(bdist_dir=str(tmp_path), dist_info_dir=str(distinfo)).run()
expected = {
"dummy_dist-1.0.dist-info/FOO",
"dummy_dist-1.0.dist-info/RECORD",
}
with ZipFile("dist/dummy_dist-1.0-py3-none-any.whl") as wf:
files_found = set(wf.namelist())
# Check that all expected files are there.
assert expected - files_found == set()
# Make sure there is no accidental egg-info bleeding into the wheel.
assert not [path for path in files_found if 'egg-info' in str(path)]
def test_allow_grace_period_parent_directory_license(monkeypatch, tmp_path):
# Motivation: https://github.com/pypa/setuptools/issues/4892
# TODO: Remove this test after deprecation period is over
files = {
"LICENSE.txt": "parent license", # <---- the license files are outside
"NOTICE.txt": "parent notice",
"python": {
"pyproject.toml": cleandoc(
"""
[project]
name = "test-proj"
dynamic = ["version"] # <---- testing dynamic will not break
[tool.setuptools.dynamic]
version.file = "VERSION"
"""
),
"setup.cfg": cleandoc(
"""
[metadata]
license_files =
../LICENSE.txt
../NOTICE.txt
"""
),
"VERSION": "42",
},
}
jaraco.path.build(files, prefix=str(tmp_path))
monkeypatch.chdir(tmp_path / "python")
msg = "Pattern '../.*.txt' cannot contain '..'"
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
bdist_wheel_cmd().run()
with ZipFile("dist/test_proj-42-py3-none-any.whl") as wf:
files_found = set(wf.namelist())
expected_files = {
"test_proj-42.dist-info/licenses/LICENSE.txt",
"test_proj-42.dist-info/licenses/NOTICE.txt",
}
assert expected_files <= files_found
metadata = wf.read("test_proj-42.dist-info/METADATA").decode("utf8")
assert "License-File: LICENSE.txt" in metadata
assert "License-File: NOTICE.txt" in metadata

View File

@@ -0,0 +1,33 @@
from setuptools import Command
from setuptools.command.build import build
from setuptools.dist import Distribution
def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
"""
Check that the setuptools Distribution uses the
setuptools specific build object.
"""
dist = Distribution(
dict(
script_name='setup.py',
script_args=['build'],
packages=[],
package_data={'': ['path/*']},
)
)
assert isinstance(dist.get_command_obj("build"), build)
class Subcommand(Command):
"""Dummy command to be used in tests"""
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
raise NotImplementedError("just to check if the command runs")

View File

@@ -0,0 +1,84 @@
import random
from unittest import mock
import pytest
from setuptools.command.build_clib import build_clib
from setuptools.dist import Distribution
from distutils.errors import DistutilsSetupError
class TestBuildCLib:
@mock.patch('setuptools.command.build_clib.newer_pairwise_group')
def test_build_libraries(self, mock_newer):
dist = Distribution()
cmd = build_clib(dist)
# this will be a long section, just making sure all
# exceptions are properly raised
libs = [('example', {'sources': 'broken.c'})]
with pytest.raises(DistutilsSetupError):
cmd.build_libraries(libs)
obj_deps = 'some_string'
libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})]
with pytest.raises(DistutilsSetupError):
cmd.build_libraries(libs)
obj_deps = {'': ''}
libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})]
with pytest.raises(DistutilsSetupError):
cmd.build_libraries(libs)
obj_deps = {'source.c': ''}
libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})]
with pytest.raises(DistutilsSetupError):
cmd.build_libraries(libs)
# with that out of the way, let's see if the crude dependency
# system works
cmd.compiler = mock.MagicMock(spec=cmd.compiler)
mock_newer.return_value = ([], [])
obj_deps = {'': ('global.h',), 'example.c': ('example.h',)}
libs = [('example', {'sources': ['example.c'], 'obj_deps': obj_deps})]
cmd.build_libraries(libs)
assert [['example.c', 'global.h', 'example.h']] in mock_newer.call_args[0]
assert not cmd.compiler.compile.called
assert cmd.compiler.create_static_lib.call_count == 1
# reset the call numbers so we can test again
cmd.compiler.reset_mock()
mock_newer.return_value = '' # anything as long as it's not ([],[])
cmd.build_libraries(libs)
assert cmd.compiler.compile.call_count == 1
assert cmd.compiler.create_static_lib.call_count == 1
@mock.patch('setuptools.command.build_clib.newer_pairwise_group')
def test_build_libraries_reproducible(self, mock_newer):
dist = Distribution()
cmd = build_clib(dist)
# with that out of the way, let's see if the crude dependency
# system works
cmd.compiler = mock.MagicMock(spec=cmd.compiler)
mock_newer.return_value = ([], [])
original_sources = ['a-example.c', 'example.c']
sources = original_sources
obj_deps = {'': ('global.h',), 'example.c': ('example.h',)}
libs = [('example', {'sources': sources, 'obj_deps': obj_deps})]
cmd.build_libraries(libs)
computed_call_args = mock_newer.call_args[0]
while sources == original_sources:
sources = random.sample(original_sources, len(original_sources))
libs = [('example', {'sources': sources, 'obj_deps': obj_deps})]
cmd.build_libraries(libs)
assert computed_call_args == mock_newer.call_args[0]

View File

@@ -0,0 +1,293 @@
from __future__ import annotations
import os
import sys
from importlib.util import cache_from_source as _compiled_file_name
import pytest
from jaraco import path
from setuptools.command.build_ext import build_ext, get_abi3_suffix
from setuptools.dist import Distribution
from setuptools.errors import CompileError
from setuptools.extension import Extension
from . import environment
from .textwrap import DALS
import distutils.command.build_ext as orig
from distutils.sysconfig import get_config_var
IS_PYPY = '__pypy__' in sys.builtin_module_names
class TestBuildExt:
def test_get_ext_filename(self):
"""
Setuptools needs to give back the same
result as distutils, even if the fullname
is not in ext_map.
"""
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
def test_abi3_filename(self):
"""
Filename needs to be loadable by several versions
of Python 3 if 'is_abi3' is truthy on Extension()
"""
print(get_abi3_suffix())
extension = Extension('spam.eggs', ['eggs.c'], py_limited_api=True)
dist = Distribution(dict(ext_modules=[extension]))
cmd = build_ext(dist)
cmd.finalize_options()
assert 'spam.eggs' in cmd.ext_map
res = cmd.get_ext_filename('spam.eggs')
if not get_abi3_suffix():
assert res.endswith(get_config_var('EXT_SUFFIX'))
elif sys.platform == 'win32':
assert res.endswith('eggs.pyd')
else:
assert 'abi3' in res
def test_ext_suffix_override(self):
"""
SETUPTOOLS_EXT_SUFFIX variable always overrides
default extension options.
"""
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['for_abi3'] = ext = Extension(
'for_abi3',
['s.c'],
# Override shouldn't affect abi3 modules
py_limited_api=True,
)
# Mock value needed to pass tests
ext._links_to_dynamic = False
if not IS_PYPY:
expect = cmd.get_ext_filename('for_abi3')
else:
# PyPy builds do not use ABI3 tag, so they will
# also get the overridden suffix.
expect = 'for_abi3.test-suffix'
try:
os.environ['SETUPTOOLS_EXT_SUFFIX'] = '.test-suffix'
res = cmd.get_ext_filename('normal')
assert 'normal.test-suffix' == res
res = cmd.get_ext_filename('for_abi3')
assert expect == res
finally:
del os.environ['SETUPTOOLS_EXT_SUFFIX']
def dist_with_example(self):
files = {
"src": {"mypkg": {"subpkg": {"ext2.c": ""}}},
"c-extensions": {"ext1": {"main.c": ""}},
}
ext1 = Extension("mypkg.ext1", ["c-extensions/ext1/main.c"])
ext2 = Extension("mypkg.subpkg.ext2", ["src/mypkg/subpkg/ext2.c"])
ext3 = Extension("ext3", ["c-extension/ext3.c"])
path.build(files)
return Distribution({
"script_name": "%test%",
"ext_modules": [ext1, ext2, ext3],
"package_dir": {"": "src"},
})
def test_get_outputs(self, tmpdir_cwd, monkeypatch):
monkeypatch.setenv('SETUPTOOLS_EXT_SUFFIX', '.mp3') # make test OS-independent
monkeypatch.setattr('setuptools.command.build_ext.use_stubs', False)
dist = self.dist_with_example()
# Regular build: get_outputs not empty, but get_output_mappings is empty
build_ext = dist.get_command_obj("build_ext")
build_ext.editable_mode = False
build_ext.ensure_finalized()
build_lib = build_ext.build_lib.replace(os.sep, "/")
outputs = [x.replace(os.sep, "/") for x in build_ext.get_outputs()]
assert outputs == [
f"{build_lib}/ext3.mp3",
f"{build_lib}/mypkg/ext1.mp3",
f"{build_lib}/mypkg/subpkg/ext2.mp3",
]
assert build_ext.get_output_mapping() == {}
# Editable build: get_output_mappings should contain everything in get_outputs
dist.reinitialize_command("build_ext")
build_ext.editable_mode = True
build_ext.ensure_finalized()
mapping = {
k.replace(os.sep, "/"): v.replace(os.sep, "/")
for k, v in build_ext.get_output_mapping().items()
}
assert mapping == {
f"{build_lib}/ext3.mp3": "src/ext3.mp3",
f"{build_lib}/mypkg/ext1.mp3": "src/mypkg/ext1.mp3",
f"{build_lib}/mypkg/subpkg/ext2.mp3": "src/mypkg/subpkg/ext2.mp3",
}
def test_get_output_mapping_with_stub(self, tmpdir_cwd, monkeypatch):
monkeypatch.setenv('SETUPTOOLS_EXT_SUFFIX', '.mp3') # make test OS-independent
monkeypatch.setattr('setuptools.command.build_ext.use_stubs', True)
dist = self.dist_with_example()
# Editable build should create compiled stubs (.pyc files only, no .py)
build_ext = dist.get_command_obj("build_ext")
build_ext.editable_mode = True
build_ext.ensure_finalized()
for ext in build_ext.extensions:
monkeypatch.setattr(ext, "_needs_stub", True)
build_lib = build_ext.build_lib.replace(os.sep, "/")
mapping = {
k.replace(os.sep, "/"): v.replace(os.sep, "/")
for k, v in build_ext.get_output_mapping().items()
}
def C(file):
"""Make it possible to do comparisons and tests in a OS-independent way"""
return _compiled_file_name(file).replace(os.sep, "/")
assert mapping == {
C(f"{build_lib}/ext3.py"): C("src/ext3.py"),
f"{build_lib}/ext3.mp3": "src/ext3.mp3",
C(f"{build_lib}/mypkg/ext1.py"): C("src/mypkg/ext1.py"),
f"{build_lib}/mypkg/ext1.mp3": "src/mypkg/ext1.mp3",
C(f"{build_lib}/mypkg/subpkg/ext2.py"): C("src/mypkg/subpkg/ext2.py"),
f"{build_lib}/mypkg/subpkg/ext2.mp3": "src/mypkg/subpkg/ext2.mp3",
}
# Ensure only the compiled stubs are present not the raw .py stub
assert f"{build_lib}/mypkg/ext1.py" not in mapping
assert f"{build_lib}/mypkg/subpkg/ext2.py" not in mapping
# Visualize what the cached stub files look like
example_stub = C(f"{build_lib}/mypkg/ext1.py")
assert example_stub in mapping
assert example_stub.startswith(f"{build_lib}/mypkg/__pycache__/ext1")
assert example_stub.endswith(".pyc")
class TestBuildExtInplace:
def get_build_ext_cmd(self, optional: bool, **opts) -> build_ext:
files: dict[str, str | dict[str, dict[str, str]]] = {
"eggs.c": "#include missingheader.h\n",
".build": {"lib": {}, "tmp": {}},
}
path.build(files)
extension = Extension('spam.eggs', ['eggs.c'], optional=optional)
dist = Distribution(dict(ext_modules=[extension]))
dist.script_name = 'setup.py'
cmd = build_ext(dist)
vars(cmd).update(build_lib=".build/lib", build_temp=".build/tmp", **opts)
cmd.ensure_finalized()
return cmd
def get_log_messages(self, caplog, capsys):
"""
Historically, distutils "logged" by printing to sys.std*.
Later versions adopted the logging framework. Grab
messages regardless of how they were captured.
"""
std = capsys.readouterr()
return std.out.splitlines() + std.err.splitlines() + caplog.messages
def test_optional(self, tmpdir_cwd, caplog, capsys):
"""
If optional extensions fail to build, setuptools should show the error
in the logs but not fail to build
"""
cmd = self.get_build_ext_cmd(optional=True, inplace=True)
cmd.run()
assert any(
'build_ext: building extension "spam.eggs" failed'
for msg in self.get_log_messages(caplog, capsys)
)
# No compile error exception should be raised
def test_non_optional(self, tmpdir_cwd):
# Non-optional extensions should raise an exception
cmd = self.get_build_ext_cmd(optional=False, inplace=True)
with pytest.raises(CompileError):
cmd.run()
def test_build_ext_config_handling(tmpdir_cwd):
files = {
'setup.py': DALS(
"""
from setuptools import Extension, setup
setup(
name='foo',
version='0.0.0',
ext_modules=[Extension('foo', ['foo.c'])],
)
"""
),
'foo.c': DALS(
"""
#include "Python.h"
#if PY_MAJOR_VERSION >= 3
static struct PyModuleDef moduledef = {
PyModuleDef_HEAD_INIT,
"foo",
NULL,
0,
NULL,
NULL,
NULL,
NULL,
NULL
};
#define INITERROR return NULL
PyMODINIT_FUNC PyInit_foo(void)
#else
#define INITERROR return
void initfoo(void)
#endif
{
#if PY_MAJOR_VERSION >= 3
PyObject *module = PyModule_Create(&moduledef);
#else
PyObject *module = Py_InitModule("extension", NULL);
#endif
if (module == NULL)
INITERROR;
#if PY_MAJOR_VERSION >= 3
return module;
#endif
}
"""
),
'setup.cfg': DALS(
"""
[build]
build_base = foo_build
"""
),
}
path.build(files)
code, (stdout, stderr) = environment.run_setup_py(
cmd=['build'],
data_stream=(0, 2),
)
assert code == 0, f'\nSTDOUT:\n{stdout}\nSTDERR:\n{stderr}'

View File

@@ -0,0 +1,959 @@
import contextlib
import importlib
import os
import re
import shutil
import signal
import sys
import tarfile
import warnings
from concurrent import futures
from pathlib import Path
from typing import Any, Callable
from zipfile import ZipFile
import pytest
from jaraco import path
from packaging.requirements import Requirement
from setuptools.warnings import SetuptoolsDeprecationWarning
from .textwrap import DALS
SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180")) # in seconds
IS_PYPY = '__pypy__' in sys.builtin_module_names
pytestmark = pytest.mark.skipif(
sys.platform == "win32" and IS_PYPY,
reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor "
"is flaky and problematic",
)
class BuildBackendBase:
def __init__(self, cwd='.', env=None, backend_name='setuptools.build_meta'):
self.cwd = cwd
self.env = env or {}
self.backend_name = backend_name
class BuildBackend(BuildBackendBase):
"""PEP 517 Build Backend"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.pool = futures.ProcessPoolExecutor(max_workers=1)
def __getattr__(self, name: str) -> Callable[..., Any]:
"""Handles arbitrary function invocations on the build backend."""
def method(*args, **kw):
root = os.path.abspath(self.cwd)
caller = BuildBackendCaller(root, self.env, self.backend_name)
pid = None
try:
pid = self.pool.submit(os.getpid).result(TIMEOUT)
return self.pool.submit(caller, name, *args, **kw).result(TIMEOUT)
except futures.TimeoutError:
self.pool.shutdown(wait=False) # doesn't stop already running processes
self._kill(pid)
pytest.xfail(f"Backend did not respond before timeout ({TIMEOUT} s)")
except (futures.process.BrokenProcessPool, MemoryError, OSError):
if IS_PYPY:
pytest.xfail("PyPy frequently fails tests with ProcessPoolExector")
raise
return method
def _kill(self, pid):
if pid is None:
return
with contextlib.suppress(ProcessLookupError, OSError):
os.kill(pid, signal.SIGTERM if os.name == "nt" else signal.SIGKILL)
class BuildBackendCaller(BuildBackendBase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
(self.backend_name, _, self.backend_obj) = self.backend_name.partition(':')
def __call__(self, name, *args, **kw):
"""Handles arbitrary function invocations on the build backend."""
os.chdir(self.cwd)
os.environ.update(self.env)
mod = importlib.import_module(self.backend_name)
if self.backend_obj:
backend = getattr(mod, self.backend_obj)
else:
backend = mod
return getattr(backend, name)(*args, **kw)
defns = [
{ # simple setup.py script
'setup.py': DALS(
"""
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello'],
setup_requires=['six'],
)
"""
),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
},
{ # setup.py that relies on __name__
'setup.py': DALS(
"""
assert __name__ == '__main__'
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello'],
setup_requires=['six'],
)
"""
),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
},
{ # setup.py script that runs arbitrary code
'setup.py': DALS(
"""
variable = True
def function():
return variable
assert variable
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello'],
setup_requires=['six'],
)
"""
),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
},
{ # setup.py script that constructs temp files to be included in the distribution
'setup.py': DALS(
"""
# Some packages construct files on the fly, include them in the package,
# and immediately remove them after `setup()` (e.g. pybind11==2.9.1).
# Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)`
# to obtain a distribution object first, and then run the distutils
# commands later, because these files will be removed in the meantime.
with open('world.py', 'w', encoding="utf-8") as f:
f.write('x = 42')
try:
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['world'],
setup_requires=['six'],
)
finally:
# Some packages will clean temporary files
__import__('os').unlink('world.py')
"""
),
},
{ # setup.cfg only
'setup.cfg': DALS(
"""
[metadata]
name = foo
version = 0.0.0
[options]
py_modules=hello
setup_requires=six
"""
),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
},
{ # setup.cfg and setup.py
'setup.cfg': DALS(
"""
[metadata]
name = foo
version = 0.0.0
[options]
py_modules=hello
setup_requires=six
"""
),
'setup.py': "__import__('setuptools').setup()",
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
},
]
class TestBuildMetaBackend:
backend_name = 'setuptools.build_meta'
def get_build_backend(self):
return BuildBackend(backend_name=self.backend_name)
@pytest.fixture(params=defns)
def build_backend(self, tmpdir, request):
path.build(request.param, prefix=str(tmpdir))
with tmpdir.as_cwd():
yield self.get_build_backend()
def test_get_requires_for_build_wheel(self, build_backend):
actual = build_backend.get_requires_for_build_wheel()
expected = ['six']
assert sorted(actual) == sorted(expected)
def test_get_requires_for_build_sdist(self, build_backend):
actual = build_backend.get_requires_for_build_sdist()
expected = ['six']
assert sorted(actual) == sorted(expected)
def test_build_wheel(self, build_backend):
dist_dir = os.path.abspath('pip-wheel')
os.makedirs(dist_dir)
wheel_name = build_backend.build_wheel(dist_dir)
wheel_file = os.path.join(dist_dir, wheel_name)
assert os.path.isfile(wheel_file)
# Temporary files should be removed
assert not os.path.isfile('world.py')
with ZipFile(wheel_file) as zipfile:
wheel_contents = set(zipfile.namelist())
# Each one of the examples have a single module
# that should be included in the distribution
python_scripts = (f for f in wheel_contents if f.endswith('.py'))
modules = [f for f in python_scripts if not f.endswith('setup.py')]
assert len(modules) == 1
@pytest.mark.parametrize('build_type', ('wheel', 'sdist'))
def test_build_with_existing_file_present(self, build_type, tmpdir_cwd):
# Building a sdist/wheel should still succeed if there's
# already a sdist/wheel in the destination directory.
files = {
'setup.py': "from setuptools import setup\nsetup()",
'VERSION': "0.0.1",
'setup.cfg': DALS(
"""
[metadata]
name = foo
version = file: VERSION
"""
),
'pyproject.toml': DALS(
"""
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
"""
),
}
path.build(files)
dist_dir = os.path.abspath('preexisting-' + build_type)
build_backend = self.get_build_backend()
build_method = getattr(build_backend, 'build_' + build_type)
# Build a first sdist/wheel.
# Note: this also check the destination directory is
# successfully created if it does not exist already.
first_result = build_method(dist_dir)
# Change version.
with open("VERSION", "wt", encoding="utf-8") as version_file:
version_file.write("0.0.2")
# Build a *second* sdist/wheel.
second_result = build_method(dist_dir)
assert os.path.isfile(os.path.join(dist_dir, first_result))
assert first_result != second_result
# And if rebuilding the exact same sdist/wheel?
open(os.path.join(dist_dir, second_result), 'wb').close()
third_result = build_method(dist_dir)
assert third_result == second_result
assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0
@pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB])
def test_build_with_pyproject_config(self, tmpdir, setup_script):
files = {
'pyproject.toml': DALS(
"""
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "foo"
license = {text = "MIT"}
description = "This is a Python package"
dynamic = ["version", "readme"]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers"
]
urls = {Homepage = "http://github.com"}
dependencies = [
"appdirs",
]
[project.optional-dependencies]
all = [
"tomli>=1",
"pyscaffold>=4,<5",
'importlib; python_version == "2.6"',
]
[project.scripts]
foo = "foo.cli:main"
[tool.setuptools]
zip-safe = false
package-dir = {"" = "src"}
packages = {find = {where = ["src"]}}
license-files = ["LICENSE*"]
[tool.setuptools.dynamic]
version = {attr = "foo.__version__"}
readme = {file = "README.rst"}
[tool.distutils.sdist]
formats = "gztar"
"""
),
"MANIFEST.in": DALS(
"""
global-include *.py *.txt
global-exclude *.py[cod]
"""
),
"README.rst": "This is a ``README``",
"LICENSE.txt": "---- placeholder MIT license ----",
"src": {
"foo": {
"__init__.py": "__version__ = '0.1'",
"__init__.pyi": "__version__: str",
"cli.py": "def main(): print('hello world')",
"data.txt": "def main(): print('hello world')",
"py.typed": "",
}
},
}
if setup_script:
files["setup.py"] = setup_script
build_backend = self.get_build_backend()
with tmpdir.as_cwd():
path.build(files)
msgs = [
"'tool.setuptools.license-files' is deprecated in favor of 'project.license-files'",
"`project.license` as a TOML table is deprecated",
]
with warnings.catch_warnings():
for msg in msgs:
warnings.filterwarnings("ignore", msg, SetuptoolsDeprecationWarning)
sdist_path = build_backend.build_sdist("temp")
wheel_file = build_backend.build_wheel("temp")
with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
sdist_contents = set(tar.getnames())
with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
wheel_contents = set(zipfile.namelist())
metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8")
license = str(
zipfile.read("foo-0.1.dist-info/licenses/LICENSE.txt"), "utf-8"
)
epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
assert sdist_contents - {"foo-0.1/setup.py"} == {
'foo-0.1',
'foo-0.1/LICENSE.txt',
'foo-0.1/MANIFEST.in',
'foo-0.1/PKG-INFO',
'foo-0.1/README.rst',
'foo-0.1/pyproject.toml',
'foo-0.1/setup.cfg',
'foo-0.1/src',
'foo-0.1/src/foo',
'foo-0.1/src/foo/__init__.py',
'foo-0.1/src/foo/__init__.pyi',
'foo-0.1/src/foo/cli.py',
'foo-0.1/src/foo/data.txt',
'foo-0.1/src/foo/py.typed',
'foo-0.1/src/foo.egg-info',
'foo-0.1/src/foo.egg-info/PKG-INFO',
'foo-0.1/src/foo.egg-info/SOURCES.txt',
'foo-0.1/src/foo.egg-info/dependency_links.txt',
'foo-0.1/src/foo.egg-info/entry_points.txt',
'foo-0.1/src/foo.egg-info/requires.txt',
'foo-0.1/src/foo.egg-info/top_level.txt',
'foo-0.1/src/foo.egg-info/not-zip-safe',
}
assert wheel_contents == {
"foo/__init__.py",
"foo/__init__.pyi", # include type information by default
"foo/cli.py",
"foo/data.txt", # include_package_data defaults to True
"foo/py.typed", # include type information by default
"foo-0.1.dist-info/licenses/LICENSE.txt",
"foo-0.1.dist-info/METADATA",
"foo-0.1.dist-info/WHEEL",
"foo-0.1.dist-info/entry_points.txt",
"foo-0.1.dist-info/top_level.txt",
"foo-0.1.dist-info/RECORD",
}
assert license == "---- placeholder MIT license ----"
for line in (
"Summary: This is a Python package",
"License: MIT",
"License-File: LICENSE.txt",
"Classifier: Intended Audience :: Developers",
"Requires-Dist: appdirs",
"Requires-Dist: " + str(Requirement('tomli>=1 ; extra == "all"')),
"Requires-Dist: "
+ str(Requirement('importlib; python_version=="2.6" and extra =="all"')),
):
assert line in metadata, (line, metadata)
assert metadata.strip().endswith("This is a ``README``")
assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main"
def test_static_metadata_in_pyproject_config(self, tmpdir):
# Make sure static metadata in pyproject.toml is not overwritten by setup.py
# as required by PEP 621
files = {
'pyproject.toml': DALS(
"""
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "foo"
description = "This is a Python package"
version = "42"
dependencies = ["six"]
"""
),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
'setup.py': DALS(
"""
__import__('setuptools').setup(
name='bar',
version='13',
)
"""
),
}
build_backend = self.get_build_backend()
with tmpdir.as_cwd():
path.build(files)
sdist_path = build_backend.build_sdist("temp")
wheel_file = build_backend.build_wheel("temp")
assert (tmpdir / "temp/foo-42.tar.gz").exists()
assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists()
assert not (tmpdir / "temp/bar-13.tar.gz").exists()
assert not (tmpdir / "temp/bar-42.tar.gz").exists()
assert not (tmpdir / "temp/foo-13.tar.gz").exists()
assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists()
assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists()
assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists()
with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8")
members = tar.getnames()
assert "bar-13/PKG-INFO" not in members
with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8")
members = zipfile.namelist()
assert "bar-13.dist-info/METADATA" not in members
for file in pkg_info, metadata:
for line in ("Name: foo", "Version: 42"):
assert line in file
for line in ("Name: bar", "Version: 13"):
assert line not in file
def test_build_sdist(self, build_backend):
dist_dir = os.path.abspath('pip-sdist')
os.makedirs(dist_dir)
sdist_name = build_backend.build_sdist(dist_dir)
assert os.path.isfile(os.path.join(dist_dir, sdist_name))
def test_prepare_metadata_for_build_wheel(self, build_backend):
dist_dir = os.path.abspath('pip-dist-info')
os.makedirs(dist_dir)
dist_info = build_backend.prepare_metadata_for_build_wheel(dist_dir)
assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA'))
def test_prepare_metadata_inplace(self, build_backend):
"""
Some users might pass metadata_directory pre-populated with `.tox` or `.venv`.
See issue #3523.
"""
for pre_existing in [
".tox/python/lib/python3.10/site-packages/attrs-22.1.0.dist-info",
".tox/python/lib/python3.10/site-packages/autocommand-2.2.1.dist-info",
".nox/python/lib/python3.10/site-packages/build-0.8.0.dist-info",
".venv/python3.10/site-packages/click-8.1.3.dist-info",
"venv/python3.10/site-packages/distlib-0.3.5.dist-info",
"env/python3.10/site-packages/docutils-0.19.dist-info",
]:
os.makedirs(pre_existing, exist_ok=True)
dist_info = build_backend.prepare_metadata_for_build_wheel(".")
assert os.path.isfile(os.path.join(dist_info, 'METADATA'))
def test_build_sdist_explicit_dist(self, build_backend):
# explicitly specifying the dist folder should work
# the folder sdist_directory and the ``--dist-dir`` can be the same
dist_dir = os.path.abspath('dist')
sdist_name = build_backend.build_sdist(dist_dir)
assert os.path.isfile(os.path.join(dist_dir, sdist_name))
def test_build_sdist_version_change(self, build_backend):
sdist_into_directory = os.path.abspath("out_sdist")
os.makedirs(sdist_into_directory)
sdist_name = build_backend.build_sdist(sdist_into_directory)
assert os.path.isfile(os.path.join(sdist_into_directory, sdist_name))
# if the setup.py changes subsequent call of the build meta
# should still succeed, given the
# sdist_directory the frontend specifies is empty
setup_loc = os.path.abspath("setup.py")
if not os.path.exists(setup_loc):
setup_loc = os.path.abspath("setup.cfg")
with open(setup_loc, 'rt', encoding="utf-8") as file_handler:
content = file_handler.read()
with open(setup_loc, 'wt', encoding="utf-8") as file_handler:
file_handler.write(content.replace("version='0.0.0'", "version='0.0.1'"))
shutil.rmtree(sdist_into_directory)
os.makedirs(sdist_into_directory)
sdist_name = build_backend.build_sdist("out_sdist")
assert os.path.isfile(os.path.join(os.path.abspath("out_sdist"), sdist_name))
def test_build_sdist_pyproject_toml_exists(self, tmpdir_cwd):
files = {
'setup.py': DALS(
"""
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello']
)"""
),
'hello.py': '',
'pyproject.toml': DALS(
"""
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
"""
),
}
path.build(files)
build_backend = self.get_build_backend()
targz_path = build_backend.build_sdist("temp")
with tarfile.open(os.path.join("temp", targz_path)) as tar:
assert any('pyproject.toml' in name for name in tar.getnames())
def test_build_sdist_setup_py_exists(self, tmpdir_cwd):
# If build_sdist is called from a script other than setup.py,
# ensure setup.py is included
path.build(defns[0])
build_backend = self.get_build_backend()
targz_path = build_backend.build_sdist("temp")
with tarfile.open(os.path.join("temp", targz_path)) as tar:
assert any('setup.py' in name for name in tar.getnames())
def test_build_sdist_setup_py_manifest_excluded(self, tmpdir_cwd):
# Ensure that MANIFEST.in can exclude setup.py
files = {
'setup.py': DALS(
"""
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello']
)"""
),
'hello.py': '',
'MANIFEST.in': DALS(
"""
exclude setup.py
"""
),
}
path.build(files)
build_backend = self.get_build_backend()
targz_path = build_backend.build_sdist("temp")
with tarfile.open(os.path.join("temp", targz_path)) as tar:
assert not any('setup.py' in name for name in tar.getnames())
def test_build_sdist_builds_targz_even_if_zip_indicated(self, tmpdir_cwd):
files = {
'setup.py': DALS(
"""
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello']
)"""
),
'hello.py': '',
'setup.cfg': DALS(
"""
[sdist]
formats=zip
"""
),
}
path.build(files)
build_backend = self.get_build_backend()
build_backend.build_sdist("temp")
_relative_path_import_files = {
'setup.py': DALS(
"""
__import__('setuptools').setup(
name='foo',
version=__import__('hello').__version__,
py_modules=['hello']
)"""
),
'hello.py': '__version__ = "0.0.0"',
'setup.cfg': DALS(
"""
[sdist]
formats=zip
"""
),
}
def test_build_sdist_relative_path_import(self, tmpdir_cwd):
path.build(self._relative_path_import_files)
build_backend = self.get_build_backend()
with pytest.raises(ImportError, match="^No module named 'hello'$"):
build_backend.build_sdist("temp")
_simple_pyproject_example = {
"pyproject.toml": DALS(
"""
[project]
name = "proj"
version = "42"
"""
),
"src": {"proj": {"__init__.py": ""}},
}
def _assert_link_tree(self, parent_dir):
"""All files in the directory should be either links or hard links"""
files = list(Path(parent_dir).glob("**/*"))
assert files # Should not be empty
for file in files:
assert file.is_symlink() or os.stat(file).st_nlink > 0
def test_editable_without_config_settings(self, tmpdir_cwd):
"""
Sanity check to ensure tests with --mode=strict are different from the ones
without --mode.
--mode=strict should create a local directory with a package tree.
The directory should not get created otherwise.
"""
path.build(self._simple_pyproject_example)
build_backend = self.get_build_backend()
assert not Path("build").exists()
build_backend.build_editable("temp")
assert not Path("build").exists()
def test_build_wheel_inplace(self, tmpdir_cwd):
config_settings = {"--build-option": ["build_ext", "--inplace"]}
path.build(self._simple_pyproject_example)
build_backend = self.get_build_backend()
assert not Path("build").exists()
Path("build").mkdir()
build_backend.prepare_metadata_for_build_wheel("build", config_settings)
build_backend.build_wheel("build", config_settings)
assert Path("build/proj-42-py3-none-any.whl").exists()
@pytest.mark.parametrize("config_settings", [{"editable-mode": "strict"}])
def test_editable_with_config_settings(self, tmpdir_cwd, config_settings):
path.build({**self._simple_pyproject_example, '_meta': {}})
assert not Path("build").exists()
build_backend = self.get_build_backend()
build_backend.prepare_metadata_for_build_editable("_meta", config_settings)
build_backend.build_editable("temp", config_settings, "_meta")
self._assert_link_tree(next(Path("build").glob("__editable__.*")))
@pytest.mark.parametrize(
("setup_literal", "requirements"),
[
("'foo'", ['foo']),
("['foo']", ['foo']),
(r"'foo\n'", ['foo']),
(r"'foo\n\n'", ['foo']),
("['foo', 'bar']", ['foo', 'bar']),
(r"'# Has a comment line\nfoo'", ['foo']),
(r"'foo # Has an inline comment'", ['foo']),
(r"'foo \\\n >=3.0'", ['foo>=3.0']),
(r"'foo\nbar'", ['foo', 'bar']),
(r"'foo\nbar\n'", ['foo', 'bar']),
(r"['foo\n', 'bar\n']", ['foo', 'bar']),
],
)
@pytest.mark.parametrize('use_wheel', [True, False])
def test_setup_requires(self, setup_literal, requirements, use_wheel, tmpdir_cwd):
files = {
'setup.py': DALS(
"""
from setuptools import setup
setup(
name="qux",
version="0.0.0",
py_modules=["hello"],
setup_requires={setup_literal},
)
"""
).format(setup_literal=setup_literal),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
}
path.build(files)
build_backend = self.get_build_backend()
if use_wheel:
get_requires = build_backend.get_requires_for_build_wheel
else:
get_requires = build_backend.get_requires_for_build_sdist
# Ensure that the build requirements are properly parsed
expected = sorted(requirements)
actual = get_requires()
assert expected == sorted(actual)
def test_setup_requires_with_auto_discovery(self, tmpdir_cwd):
# Make sure patches introduced to retrieve setup_requires don't accidentally
# activate auto-discovery and cause problems due to the incomplete set of
# attributes passed to MinimalDistribution
files = {
'pyproject.toml': DALS(
"""
[project]
name = "proj"
version = "42"
"""
),
"setup.py": DALS(
"""
__import__('setuptools').setup(
setup_requires=["foo"],
py_modules = ["hello", "world"]
)
"""
),
'hello.py': "'hello'",
'world.py': "'world'",
}
path.build(files)
build_backend = self.get_build_backend()
setup_requires = build_backend.get_requires_for_build_wheel()
assert setup_requires == ["foo"]
def test_dont_install_setup_requires(self, tmpdir_cwd):
files = {
'setup.py': DALS(
"""
from setuptools import setup
setup(
name="qux",
version="0.0.0",
py_modules=["hello"],
setup_requires=["does-not-exist >99"],
)
"""
),
'hello.py': DALS(
"""
def run():
print('hello')
"""
),
}
path.build(files)
build_backend = self.get_build_backend()
dist_dir = os.path.abspath('pip-dist-info')
os.makedirs(dist_dir)
# does-not-exist can't be satisfied, so if it attempts to install
# setup_requires, it will fail.
build_backend.prepare_metadata_for_build_wheel(dist_dir)
_sys_argv_0_passthrough = {
'setup.py': DALS(
"""
import os
import sys
__import__('setuptools').setup(
name='foo',
version='0.0.0',
)
sys_argv = os.path.abspath(sys.argv[0])
file_path = os.path.abspath('setup.py')
assert sys_argv == file_path
"""
)
}
def test_sys_argv_passthrough(self, tmpdir_cwd):
path.build(self._sys_argv_0_passthrough)
build_backend = self.get_build_backend()
with pytest.raises(AssertionError):
build_backend.build_sdist("temp")
_setup_py_file_abspath = {
'setup.py': DALS(
"""
import os
assert os.path.isabs(__file__)
__import__('setuptools').setup(
name='foo',
version='0.0.0',
py_modules=['hello'],
setup_requires=['six'],
)
"""
)
}
def test_setup_py_file_abspath(self, tmpdir_cwd):
path.build(self._setup_py_file_abspath)
build_backend = self.get_build_backend()
build_backend.build_sdist("temp")
@pytest.mark.parametrize('build_hook', ('build_sdist', 'build_wheel'))
def test_build_with_empty_setuppy(self, build_backend, build_hook):
files = {'setup.py': ''}
path.build(files)
msg = re.escape('No distribution was found.')
with pytest.raises(ValueError, match=msg):
getattr(build_backend, build_hook)("temp")
class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
backend_name = 'setuptools.build_meta:__legacy__'
# build_meta_legacy-specific tests
def test_build_sdist_relative_path_import(self, tmpdir_cwd):
# This must fail in build_meta, but must pass in build_meta_legacy
path.build(self._relative_path_import_files)
build_backend = self.get_build_backend()
build_backend.build_sdist("temp")
def test_sys_argv_passthrough(self, tmpdir_cwd):
path.build(self._sys_argv_0_passthrough)
build_backend = self.get_build_backend()
build_backend.build_sdist("temp")
@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
def test_sys_exit_0_in_setuppy(monkeypatch, tmp_path):
"""Setuptools should be resilient to setup.py with ``sys.exit(0)`` (#3973)."""
monkeypatch.chdir(tmp_path)
setuppy = """
import sys, setuptools
setuptools.setup(name='foo', version='0.0.0')
sys.exit(0)
"""
(tmp_path / "setup.py").write_text(DALS(setuppy), encoding="utf-8")
backend = BuildBackend(backend_name="setuptools.build_meta")
assert backend.get_requires_for_build_wheel() == []
def test_system_exit_in_setuppy(monkeypatch, tmp_path):
monkeypatch.chdir(tmp_path)
setuppy = "import sys; sys.exit('some error')"
(tmp_path / "setup.py").write_text(setuppy, encoding="utf-8")
with pytest.raises(SystemExit, match="some error"):
backend = BuildBackend(backend_name="setuptools.build_meta")
backend.get_requires_for_build_wheel()

View File

@@ -0,0 +1,480 @@
import os
import shutil
import stat
import warnings
from pathlib import Path
from unittest.mock import Mock
import jaraco.path
import pytest
from setuptools import SetuptoolsDeprecationWarning
from setuptools.dist import Distribution
from .textwrap import DALS
def test_directories_in_package_data_glob(tmpdir_cwd):
"""
Directories matching the glob in package_data should
not be included in the package data.
Regression test for #261.
"""
dist = Distribution(
dict(
script_name='setup.py',
script_args=['build_py'],
packages=[''],
package_data={'': ['path/*']},
)
)
os.makedirs('path/subpath')
dist.parse_command_line()
dist.run_commands()
def test_recursive_in_package_data_glob(tmpdir_cwd):
"""
Files matching recursive globs (**) in package_data should
be included in the package data.
#1806
"""
dist = Distribution(
dict(
script_name='setup.py',
script_args=['build_py'],
packages=[''],
package_data={'': ['path/**/data']},
)
)
os.makedirs('path/subpath/subsubpath')
open('path/subpath/subsubpath/data', 'wb').close()
dist.parse_command_line()
dist.run_commands()
assert stat.S_ISREG(os.stat('build/lib/path/subpath/subsubpath/data').st_mode), (
"File is not included"
)
def test_read_only(tmpdir_cwd):
"""
Ensure read-only flag is not preserved in copy
for package modules and package data, as that
causes problems with deleting read-only files on
Windows.
#1451
"""
dist = Distribution(
dict(
script_name='setup.py',
script_args=['build_py'],
packages=['pkg'],
package_data={'pkg': ['data.dat']},
)
)
os.makedirs('pkg')
open('pkg/__init__.py', 'wb').close()
open('pkg/data.dat', 'wb').close()
os.chmod('pkg/__init__.py', stat.S_IREAD)
os.chmod('pkg/data.dat', stat.S_IREAD)
dist.parse_command_line()
dist.run_commands()
shutil.rmtree('build')
@pytest.mark.xfail(
'platform.system() == "Windows"',
reason="On Windows, files do not have executable bits",
raises=AssertionError,
strict=True,
)
def test_executable_data(tmpdir_cwd):
"""
Ensure executable bit is preserved in copy for
package data, as users rely on it for scripts.
#2041
"""
dist = Distribution(
dict(
script_name='setup.py',
script_args=['build_py'],
packages=['pkg'],
package_data={'pkg': ['run-me']},
)
)
os.makedirs('pkg')
open('pkg/__init__.py', 'wb').close()
open('pkg/run-me', 'wb').close()
os.chmod('pkg/run-me', 0o700)
dist.parse_command_line()
dist.run_commands()
assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, (
"Script is not executable"
)
EXAMPLE_WITH_MANIFEST = {
"setup.cfg": DALS(
"""
[metadata]
name = mypkg
version = 42
[options]
include_package_data = True
packages = find:
[options.packages.find]
exclude = *.tests*
"""
),
"mypkg": {
"__init__.py": "",
"resource_file.txt": "",
"tests": {
"__init__.py": "",
"test_mypkg.py": "",
"test_file.txt": "",
},
},
"MANIFEST.in": DALS(
"""
global-include *.py *.txt
global-exclude *.py[cod]
prune dist
prune build
prune *.egg-info
"""
),
}
def test_excluded_subpackages(tmpdir_cwd):
jaraco.path.build(EXAMPLE_WITH_MANIFEST)
dist = Distribution({"script_name": "%PEP 517%"})
dist.parse_config_files()
build_py = dist.get_command_obj("build_py")
msg = r"Python recognizes 'mypkg\.tests' as an importable package"
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
# TODO: To fix #3260 we need some transition period to deprecate the
# existing behavior of `include_package_data`. After the transition, we
# should remove the warning and fix the behavior.
if os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib":
# pytest.warns reset the warning filter temporarily
# https://github.com/pytest-dev/pytest/issues/4011#issuecomment-423494810
warnings.filterwarnings(
"ignore",
"'encoding' argument not specified",
module="distutils.text_file",
# This warning is already fixed in pypa/distutils but not in stdlib
)
build_py.finalize_options()
build_py.run()
build_dir = Path(dist.get_command_obj("build_py").build_lib)
assert (build_dir / "mypkg/__init__.py").exists()
assert (build_dir / "mypkg/resource_file.txt").exists()
# Setuptools is configured to ignore `mypkg.tests`, therefore the following
# files/dirs should not be included in the distribution.
for f in [
"mypkg/tests/__init__.py",
"mypkg/tests/test_mypkg.py",
"mypkg/tests/test_file.txt",
"mypkg/tests",
]:
with pytest.raises(AssertionError):
# TODO: Enforce the following assertion once #3260 is fixed
# (remove context manager and the following xfail).
assert not (build_dir / f).exists()
pytest.xfail("#3260")
@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
def test_existing_egg_info(tmpdir_cwd, monkeypatch):
"""When provided with the ``existing_egg_info_dir`` attribute, build_py should not
attempt to run egg_info again.
"""
# == Pre-condition ==
# Generate an egg-info dir
jaraco.path.build(EXAMPLE_WITH_MANIFEST)
dist = Distribution({"script_name": "%PEP 517%"})
dist.parse_config_files()
assert dist.include_package_data
egg_info = dist.get_command_obj("egg_info")
dist.run_command("egg_info")
egg_info_dir = next(Path(egg_info.egg_base).glob("*.egg-info"))
assert egg_info_dir.is_dir()
# == Setup ==
build_py = dist.get_command_obj("build_py")
build_py.finalize_options()
egg_info = dist.get_command_obj("egg_info")
egg_info_run = Mock(side_effect=egg_info.run)
monkeypatch.setattr(egg_info, "run", egg_info_run)
# == Remove caches ==
# egg_info is called when build_py looks for data_files, which gets cached.
# We need to ensure it is not cached yet, otherwise it may impact on the tests
build_py.__dict__.pop('data_files', None)
dist.reinitialize_command(egg_info)
# == Sanity check ==
# Ensure that if existing_egg_info is not given, build_py attempts to run egg_info
build_py.existing_egg_info_dir = None
build_py.run()
egg_info_run.assert_called()
# == Remove caches ==
egg_info_run.reset_mock()
build_py.__dict__.pop('data_files', None)
dist.reinitialize_command(egg_info)
# == Actual test ==
# Ensure that if existing_egg_info_dir is given, egg_info doesn't run
build_py.existing_egg_info_dir = egg_info_dir
build_py.run()
egg_info_run.assert_not_called()
assert build_py.data_files
# Make sure the list of outputs is actually OK
outputs = map(lambda x: x.replace(os.sep, "/"), build_py.get_outputs())
assert outputs
example = str(Path(build_py.build_lib, "mypkg/__init__.py")).replace(os.sep, "/")
assert example in outputs
EXAMPLE_ARBITRARY_MAPPING = {
"pyproject.toml": DALS(
"""
[project]
name = "mypkg"
version = "42"
[tool.setuptools]
packages = ["mypkg", "mypkg.sub1", "mypkg.sub2", "mypkg.sub2.nested"]
[tool.setuptools.package-dir]
"" = "src"
"mypkg.sub2" = "src/mypkg/_sub2"
"mypkg.sub2.nested" = "other"
"""
),
"src": {
"mypkg": {
"__init__.py": "",
"resource_file.txt": "",
"sub1": {
"__init__.py": "",
"mod1.py": "",
},
"_sub2": {
"mod2.py": "",
},
},
},
"other": {
"__init__.py": "",
"mod3.py": "",
},
"MANIFEST.in": DALS(
"""
global-include *.py *.txt
global-exclude *.py[cod]
"""
),
}
def test_get_outputs(tmpdir_cwd):
jaraco.path.build(EXAMPLE_ARBITRARY_MAPPING)
dist = Distribution({"script_name": "%test%"})
dist.parse_config_files()
build_py = dist.get_command_obj("build_py")
build_py.editable_mode = True
build_py.ensure_finalized()
build_lib = build_py.build_lib.replace(os.sep, "/")
outputs = {x.replace(os.sep, "/") for x in build_py.get_outputs()}
assert outputs == {
f"{build_lib}/mypkg/__init__.py",
f"{build_lib}/mypkg/resource_file.txt",
f"{build_lib}/mypkg/sub1/__init__.py",
f"{build_lib}/mypkg/sub1/mod1.py",
f"{build_lib}/mypkg/sub2/mod2.py",
f"{build_lib}/mypkg/sub2/nested/__init__.py",
f"{build_lib}/mypkg/sub2/nested/mod3.py",
}
mapping = {
k.replace(os.sep, "/"): v.replace(os.sep, "/")
for k, v in build_py.get_output_mapping().items()
}
assert mapping == {
f"{build_lib}/mypkg/__init__.py": "src/mypkg/__init__.py",
f"{build_lib}/mypkg/resource_file.txt": "src/mypkg/resource_file.txt",
f"{build_lib}/mypkg/sub1/__init__.py": "src/mypkg/sub1/__init__.py",
f"{build_lib}/mypkg/sub1/mod1.py": "src/mypkg/sub1/mod1.py",
f"{build_lib}/mypkg/sub2/mod2.py": "src/mypkg/_sub2/mod2.py",
f"{build_lib}/mypkg/sub2/nested/__init__.py": "other/__init__.py",
f"{build_lib}/mypkg/sub2/nested/mod3.py": "other/mod3.py",
}
class TestTypeInfoFiles:
PYPROJECTS = {
"default_pyproject": DALS(
"""
[project]
name = "foo"
version = "1"
"""
),
"dont_include_package_data": DALS(
"""
[project]
name = "foo"
version = "1"
[tool.setuptools]
include-package-data = false
"""
),
"exclude_type_info": DALS(
"""
[project]
name = "foo"
version = "1"
[tool.setuptools]
include-package-data = false
[tool.setuptools.exclude-package-data]
"*" = ["py.typed", "*.pyi"]
"""
),
}
EXAMPLES = {
"simple_namespace": {
"directory_structure": {
"foo": {
"bar.pyi": "",
"py.typed": "",
"__init__.py": "",
}
},
"expected_type_files": {"foo/bar.pyi", "foo/py.typed"},
},
"nested_inside_namespace": {
"directory_structure": {
"foo": {
"bar": {
"py.typed": "",
"mod.pyi": "",
}
}
},
"expected_type_files": {"foo/bar/mod.pyi", "foo/bar/py.typed"},
},
"namespace_nested_inside_regular": {
"directory_structure": {
"foo": {
"namespace": {
"foo.pyi": "",
},
"__init__.pyi": "",
"py.typed": "",
}
},
"expected_type_files": {
"foo/namespace/foo.pyi",
"foo/__init__.pyi",
"foo/py.typed",
},
},
}
@pytest.mark.parametrize(
"pyproject",
[
"default_pyproject",
pytest.param(
"dont_include_package_data",
marks=pytest.mark.xfail(reason="pypa/setuptools#4350"),
),
],
)
@pytest.mark.parametrize("example", EXAMPLES.keys())
def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example):
structure = {
**self.EXAMPLES[example]["directory_structure"],
"pyproject.toml": self.PYPROJECTS[pyproject],
}
expected_type_files = self.EXAMPLES[example]["expected_type_files"]
jaraco.path.build(structure)
build_py = get_finalized_build_py()
outputs = get_outputs(build_py)
assert expected_type_files <= outputs
@pytest.mark.parametrize("pyproject", ["exclude_type_info"])
@pytest.mark.parametrize("example", EXAMPLES.keys())
def test_type_files_can_be_excluded(self, tmpdir_cwd, pyproject, example):
structure = {
**self.EXAMPLES[example]["directory_structure"],
"pyproject.toml": self.PYPROJECTS[pyproject],
}
expected_type_files = self.EXAMPLES[example]["expected_type_files"]
jaraco.path.build(structure)
build_py = get_finalized_build_py()
outputs = get_outputs(build_py)
assert expected_type_files.isdisjoint(outputs)
def test_stub_only_package(self, tmpdir_cwd):
structure = {
"pyproject.toml": DALS(
"""
[project]
name = "foo-stubs"
version = "1"
"""
),
"foo-stubs": {"__init__.pyi": "", "bar.pyi": ""},
}
expected_type_files = {"foo-stubs/__init__.pyi", "foo-stubs/bar.pyi"}
jaraco.path.build(structure)
build_py = get_finalized_build_py()
outputs = get_outputs(build_py)
assert expected_type_files <= outputs
def get_finalized_build_py(script_name="%build_py-test%"):
dist = Distribution({"script_name": script_name})
dist.parse_config_files()
build_py = dist.get_command_obj("build_py")
build_py.finalize_options()
return build_py
def get_outputs(build_py):
build_dir = Path(build_py.build_lib)
return {
os.path.relpath(x, build_dir).replace(os.sep, "/")
for x in build_py.get_outputs()
}

View File

@@ -0,0 +1,647 @@
import os
import sys
from configparser import ConfigParser
from itertools import product
from typing import cast
import jaraco.path
import pytest
from path import Path
import setuptools # noqa: F401 # force distutils.core to be patched
from setuptools.command.sdist import sdist
from setuptools.discovery import find_package_path, find_parent_package
from setuptools.dist import Distribution
from setuptools.errors import PackageDiscoveryError
from .contexts import quiet
from .integration.helpers import get_sdist_members, get_wheel_members, run
from .textwrap import DALS
import distutils.core
class TestFindParentPackage:
def test_single_package(self, tmp_path):
# find_parent_package should find a non-namespace parent package
(tmp_path / "src/namespace/pkg/nested").mkdir(exist_ok=True, parents=True)
(tmp_path / "src/namespace/pkg/nested/__init__.py").touch()
(tmp_path / "src/namespace/pkg/__init__.py").touch()
packages = ["namespace", "namespace.pkg", "namespace.pkg.nested"]
assert find_parent_package(packages, {"": "src"}, tmp_path) == "namespace.pkg"
def test_multiple_toplevel(self, tmp_path):
# find_parent_package should return null if the given list of packages does not
# have a single parent package
multiple = ["pkg", "pkg1", "pkg2"]
for name in multiple:
(tmp_path / f"src/{name}").mkdir(exist_ok=True, parents=True)
(tmp_path / f"src/{name}/__init__.py").touch()
assert find_parent_package(multiple, {"": "src"}, tmp_path) is None
class TestDiscoverPackagesAndPyModules:
"""Make sure discovered values for ``packages`` and ``py_modules`` work
similarly to explicit configuration for the simple scenarios.
"""
OPTIONS = {
# Different options according to the circumstance being tested
"explicit-src": {"package_dir": {"": "src"}, "packages": ["pkg"]},
"variation-lib": {
"package_dir": {"": "lib"}, # variation of the source-layout
},
"explicit-flat": {"packages": ["pkg"]},
"explicit-single_module": {"py_modules": ["pkg"]},
"explicit-namespace": {"packages": ["ns", "ns.pkg"]},
"automatic-src": {},
"automatic-flat": {},
"automatic-single_module": {},
"automatic-namespace": {},
}
FILES = {
"src": ["src/pkg/__init__.py", "src/pkg/main.py"],
"lib": ["lib/pkg/__init__.py", "lib/pkg/main.py"],
"flat": ["pkg/__init__.py", "pkg/main.py"],
"single_module": ["pkg.py"],
"namespace": ["ns/pkg/__init__.py"],
}
def _get_info(self, circumstance):
_, _, layout = circumstance.partition("-")
files = self.FILES[layout]
options = self.OPTIONS[circumstance]
return files, options
@pytest.mark.parametrize("circumstance", OPTIONS.keys())
def test_sdist_filelist(self, tmp_path, circumstance):
files, options = self._get_info(circumstance)
_populate_project_dir(tmp_path, files, options)
_, cmd = _run_sdist_programatically(tmp_path, options)
manifest = [f.replace(os.sep, "/") for f in cmd.filelist.files]
for file in files:
assert any(f.endswith(file) for f in manifest)
@pytest.mark.parametrize("circumstance", OPTIONS.keys())
def test_project(self, tmp_path, circumstance):
files, options = self._get_info(circumstance)
_populate_project_dir(tmp_path, files, options)
# Simulate a pre-existing `build` directory
(tmp_path / "build").mkdir()
(tmp_path / "build/lib").mkdir()
(tmp_path / "build/bdist.linux-x86_64").mkdir()
(tmp_path / "build/bdist.linux-x86_64/file.py").touch()
(tmp_path / "build/lib/__init__.py").touch()
(tmp_path / "build/lib/file.py").touch()
(tmp_path / "dist").mkdir()
(tmp_path / "dist/file.py").touch()
_run_build(tmp_path)
sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
print("~~~~~ sdist_members ~~~~~")
print('\n'.join(sdist_files))
assert sdist_files >= set(files)
wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
print("~~~~~ wheel_members ~~~~~")
print('\n'.join(wheel_files))
orig_files = {f.replace("src/", "").replace("lib/", "") for f in files}
assert wheel_files >= orig_files
# Make sure build files are not included by mistake
for file in wheel_files:
assert "build" not in files
assert "dist" not in files
PURPOSEFULLY_EMPY = {
"setup.cfg": DALS(
"""
[metadata]
name = myproj
version = 0.0.0
[options]
{param} =
"""
),
"setup.py": DALS(
"""
__import__('setuptools').setup(
name="myproj",
version="0.0.0",
{param}=[]
)
"""
),
"pyproject.toml": DALS(
"""
[build-system]
requires = []
build-backend = 'setuptools.build_meta'
[project]
name = "myproj"
version = "0.0.0"
[tool.setuptools]
{param} = []
"""
),
"template-pyproject.toml": DALS(
"""
[build-system]
requires = []
build-backend = 'setuptools.build_meta'
"""
),
}
@pytest.mark.parametrize(
("config_file", "param", "circumstance"),
product(
["setup.cfg", "setup.py", "pyproject.toml"],
["packages", "py_modules"],
FILES.keys(),
),
)
def test_purposefully_empty(self, tmp_path, config_file, param, circumstance):
files = self.FILES[circumstance] + ["mod.py", "other.py", "src/pkg/__init__.py"]
_populate_project_dir(tmp_path, files, {})
if config_file == "pyproject.toml":
template_param = param.replace("_", "-")
else:
# Make sure build works with or without setup.cfg
pyproject = self.PURPOSEFULLY_EMPY["template-pyproject.toml"]
(tmp_path / "pyproject.toml").write_text(pyproject, encoding="utf-8")
template_param = param
config = self.PURPOSEFULLY_EMPY[config_file].format(param=template_param)
(tmp_path / config_file).write_text(config, encoding="utf-8")
dist = _get_dist(tmp_path, {})
# When either parameter package or py_modules is an empty list,
# then there should be no discovery
assert getattr(dist, param) == []
other = {"py_modules": "packages", "packages": "py_modules"}[param]
assert getattr(dist, other) is None
@pytest.mark.parametrize(
("extra_files", "pkgs"),
[
(["venv/bin/simulate_venv"], {"pkg"}),
(["pkg-stubs/__init__.pyi"], {"pkg", "pkg-stubs"}),
(["other-stubs/__init__.pyi"], {"pkg", "other-stubs"}),
(
# Type stubs can also be namespaced
["namespace-stubs/pkg/__init__.pyi"],
{"pkg", "namespace-stubs", "namespace-stubs.pkg"},
),
(
# Just the top-level package can have `-stubs`, ignore nested ones
["namespace-stubs/pkg-stubs/__init__.pyi"],
{"pkg", "namespace-stubs"},
),
(["_hidden/file.py"], {"pkg"}),
(["news/finalize.py"], {"pkg"}),
],
)
def test_flat_layout_with_extra_files(self, tmp_path, extra_files, pkgs):
files = self.FILES["flat"] + extra_files
_populate_project_dir(tmp_path, files, {})
dist = _get_dist(tmp_path, {})
assert set(dist.packages) == pkgs
@pytest.mark.parametrize(
"extra_files",
[
["other/__init__.py"],
["other/finalize.py"],
],
)
def test_flat_layout_with_dangerous_extra_files(self, tmp_path, extra_files):
files = self.FILES["flat"] + extra_files
_populate_project_dir(tmp_path, files, {})
with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
_get_dist(tmp_path, {})
def test_flat_layout_with_single_module(self, tmp_path):
files = self.FILES["single_module"] + ["invalid-module-name.py"]
_populate_project_dir(tmp_path, files, {})
dist = _get_dist(tmp_path, {})
assert set(dist.py_modules) == {"pkg"}
def test_flat_layout_with_multiple_modules(self, tmp_path):
files = self.FILES["single_module"] + ["valid_module_name.py"]
_populate_project_dir(tmp_path, files, {})
with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
_get_dist(tmp_path, {})
def test_py_modules_when_wheel_dir_is_cwd(self, tmp_path):
"""Regression for issue 3692"""
from setuptools import build_meta
pyproject = '[project]\nname = "test"\nversion = "1"'
(tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8")
(tmp_path / "foo.py").touch()
with jaraco.path.DirectoryStack().context(tmp_path):
build_meta.build_wheel(".")
# Ensure py_modules are found
wheel_files = get_wheel_members(next(tmp_path.glob("*.whl")))
assert "foo.py" in wheel_files
class TestNoConfig:
DEFAULT_VERSION = "0.0.0" # Default version given by setuptools
EXAMPLES = {
"pkg1": ["src/pkg1.py"],
"pkg2": ["src/pkg2/__init__.py"],
"pkg3": ["src/pkg3/__init__.py", "src/pkg3-stubs/__init__.py"],
"pkg4": ["pkg4/__init__.py", "pkg4-stubs/__init__.py"],
"ns.nested.pkg1": ["src/ns/nested/pkg1/__init__.py"],
"ns.nested.pkg2": ["ns/nested/pkg2/__init__.py"],
}
@pytest.mark.parametrize("example", EXAMPLES.keys())
def test_discover_name(self, tmp_path, example):
_populate_project_dir(tmp_path, self.EXAMPLES[example], {})
dist = _get_dist(tmp_path, {})
assert dist.get_name() == example
def test_build_with_discovered_name(self, tmp_path):
files = ["src/ns/nested/pkg/__init__.py"]
_populate_project_dir(tmp_path, files, {})
_run_build(tmp_path, "--sdist")
# Expected distribution file
dist_file = tmp_path / f"dist/ns_nested_pkg-{self.DEFAULT_VERSION}.tar.gz"
assert dist_file.is_file()
class TestWithAttrDirective:
@pytest.mark.parametrize(
("folder", "opts"),
[
("src", {}),
("lib", {"packages": "find:", "packages.find": {"where": "lib"}}),
],
)
def test_setupcfg_metadata(self, tmp_path, folder, opts):
files = [f"{folder}/pkg/__init__.py", "setup.cfg"]
_populate_project_dir(tmp_path, files, opts)
config = (tmp_path / "setup.cfg").read_text(encoding="utf-8")
overwrite = {
folder: {"pkg": {"__init__.py": "version = 42"}},
"setup.cfg": "[metadata]\nversion = attr: pkg.version\n" + config,
}
jaraco.path.build(overwrite, prefix=tmp_path)
dist = _get_dist(tmp_path, {})
assert dist.get_name() == "pkg"
assert dist.get_version() == "42"
assert dist.package_dir
package_path = find_package_path("pkg", dist.package_dir, tmp_path)
assert os.path.exists(package_path)
assert folder in Path(package_path).parts()
_run_build(tmp_path, "--sdist")
dist_file = tmp_path / "dist/pkg-42.tar.gz"
assert dist_file.is_file()
def test_pyproject_metadata(self, tmp_path):
_populate_project_dir(tmp_path, ["src/pkg/__init__.py"], {})
overwrite = {
"src": {"pkg": {"__init__.py": "version = 42"}},
"pyproject.toml": (
"[project]\nname = 'pkg'\ndynamic = ['version']\n"
"[tool.setuptools.dynamic]\nversion = {attr = 'pkg.version'}\n"
),
}
jaraco.path.build(overwrite, prefix=tmp_path)
dist = _get_dist(tmp_path, {})
assert dist.get_version() == "42"
assert dist.package_dir == {"": "src"}
class TestWithCExtension:
def _simulate_package_with_extension(self, tmp_path):
# This example is based on: https://github.com/nucleic/kiwi/tree/1.4.0
files = [
"benchmarks/file.py",
"docs/Makefile",
"docs/requirements.txt",
"docs/source/conf.py",
"proj/header.h",
"proj/file.py",
"py/proj.cpp",
"py/other.cpp",
"py/file.py",
"py/py.typed",
"py/tests/test_proj.py",
"README.rst",
]
_populate_project_dir(tmp_path, files, {})
setup_script = """
from setuptools import Extension, setup
ext_modules = [
Extension(
"proj",
["py/proj.cpp", "py/other.cpp"],
include_dirs=["."],
language="c++",
),
]
setup(ext_modules=ext_modules)
"""
(tmp_path / "setup.py").write_text(DALS(setup_script), encoding="utf-8")
def test_skip_discovery_with_setupcfg_metadata(self, tmp_path):
"""Ensure that auto-discovery is not triggered when the project is based on
C-extensions only, for backward compatibility.
"""
self._simulate_package_with_extension(tmp_path)
pyproject = """
[build-system]
requires = []
build-backend = 'setuptools.build_meta'
"""
(tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8")
setupcfg = """
[metadata]
name = proj
version = 42
"""
(tmp_path / "setup.cfg").write_text(DALS(setupcfg), encoding="utf-8")
dist = _get_dist(tmp_path, {})
assert dist.get_name() == "proj"
assert dist.get_version() == "42"
assert dist.py_modules is None
assert dist.packages is None
assert len(dist.ext_modules) == 1
assert dist.ext_modules[0].name == "proj"
def test_dont_skip_discovery_with_pyproject_metadata(self, tmp_path):
"""When opting-in to pyproject.toml metadata, auto-discovery will be active if
the package lists C-extensions, but does not configure py-modules or packages.
This way we ensure users with complex package layouts that would lead to the
discovery of multiple top-level modules/packages see errors and are forced to
explicitly set ``packages`` or ``py-modules``.
"""
self._simulate_package_with_extension(tmp_path)
pyproject = """
[project]
name = 'proj'
version = '42'
"""
(tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8")
with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
_get_dist(tmp_path, {})
class TestWithPackageData:
def _simulate_package_with_data_files(self, tmp_path, src_root):
files = [
f"{src_root}/proj/__init__.py",
f"{src_root}/proj/file1.txt",
f"{src_root}/proj/nested/file2.txt",
]
_populate_project_dir(tmp_path, files, {})
manifest = """
global-include *.py *.txt
"""
(tmp_path / "MANIFEST.in").write_text(DALS(manifest), encoding="utf-8")
EXAMPLE_SETUPCFG = """
[metadata]
name = proj
version = 42
[options]
include_package_data = True
"""
EXAMPLE_PYPROJECT = """
[project]
name = "proj"
version = "42"
"""
PYPROJECT_PACKAGE_DIR = """
[tool.setuptools]
package-dir = {"" = "src"}
"""
@pytest.mark.parametrize(
("src_root", "files"),
[
(".", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
(".", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
("src", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
("src", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
(
"src",
{
"setup.cfg": DALS(EXAMPLE_SETUPCFG)
+ DALS(
"""
packages = find:
package_dir =
=src
[options.packages.find]
where = src
"""
)
},
),
(
"src",
{
"pyproject.toml": DALS(EXAMPLE_PYPROJECT)
+ DALS(
"""
[tool.setuptools]
package-dir = {"" = "src"}
"""
)
},
),
],
)
def test_include_package_data(self, tmp_path, src_root, files):
"""
Make sure auto-discovery does not affect package include_package_data.
See issue #3196.
"""
jaraco.path.build(files, prefix=str(tmp_path))
self._simulate_package_with_data_files(tmp_path, src_root)
expected = {
os.path.normpath(f"{src_root}/proj/file1.txt").replace(os.sep, "/"),
os.path.normpath(f"{src_root}/proj/nested/file2.txt").replace(os.sep, "/"),
}
_run_build(tmp_path)
sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
print("~~~~~ sdist_members ~~~~~")
print('\n'.join(sdist_files))
assert sdist_files >= expected
wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
print("~~~~~ wheel_members ~~~~~")
print('\n'.join(wheel_files))
orig_files = {f.replace("src/", "").replace("lib/", "") for f in expected}
assert wheel_files >= orig_files
def test_compatible_with_numpy_configuration(tmp_path):
files = [
"dir1/__init__.py",
"dir2/__init__.py",
"file.py",
]
_populate_project_dir(tmp_path, files, {})
dist = Distribution({})
dist.configuration = object()
dist.set_defaults()
assert dist.py_modules is None
assert dist.packages is None
def test_name_discovery_doesnt_break_cli(tmpdir_cwd):
jaraco.path.build({"pkg.py": ""})
dist = Distribution({})
dist.script_args = ["--name"]
dist.set_defaults()
dist.parse_command_line() # <-- no exception should be raised here.
assert dist.get_name() == "pkg"
def test_preserve_explicit_name_with_dynamic_version(tmpdir_cwd, monkeypatch):
"""According to #3545 it seems that ``name`` discovery is running,
even when the project already explicitly sets it.
This seems to be related to parsing of dynamic versions (via ``attr`` directive),
which requires the auto-discovery of ``package_dir``.
"""
files = {
"src": {
"pkg": {"__init__.py": "__version__ = 42\n"},
},
"pyproject.toml": DALS(
"""
[project]
name = "myproj" # purposefully different from package name
dynamic = ["version"]
[tool.setuptools.dynamic]
version = {"attr" = "pkg.__version__"}
"""
),
}
jaraco.path.build(files)
dist = Distribution({})
orig_analyse_name = dist.set_defaults.analyse_name
def spy_analyse_name():
# We can check if name discovery was triggered by ensuring the original
# name remains instead of the package name.
orig_analyse_name()
assert dist.get_name() == "myproj"
monkeypatch.setattr(dist.set_defaults, "analyse_name", spy_analyse_name)
dist.parse_config_files()
assert dist.get_version() == "42"
assert set(dist.packages) == {"pkg"}
def _populate_project_dir(root, files, options):
# NOTE: Currently pypa/build will refuse to build the project if no
# `pyproject.toml` or `setup.py` is found. So it is impossible to do
# completely "config-less" projects.
basic = {
"setup.py": "import setuptools\nsetuptools.setup()",
"README.md": "# Example Package",
"LICENSE": "Copyright (c) 2018",
}
jaraco.path.build(basic, prefix=root)
_write_setupcfg(root, options)
paths = (root / f for f in files)
for path in paths:
path.parent.mkdir(exist_ok=True, parents=True)
path.touch()
def _write_setupcfg(root, options):
if not options:
print("~~~~~ **NO** setup.cfg ~~~~~")
return
setupcfg = ConfigParser()
setupcfg.add_section("options")
for key, value in options.items():
if key == "packages.find":
setupcfg.add_section(f"options.{key}")
setupcfg[f"options.{key}"].update(value)
elif isinstance(value, list):
setupcfg["options"][key] = ", ".join(value)
elif isinstance(value, dict):
str_value = "\n".join(f"\t{k} = {v}" for k, v in value.items())
setupcfg["options"][key] = "\n" + str_value
else:
setupcfg["options"][key] = str(value)
with open(root / "setup.cfg", "w", encoding="utf-8") as f:
setupcfg.write(f)
print("~~~~~ setup.cfg ~~~~~")
print((root / "setup.cfg").read_text(encoding="utf-8"))
def _run_build(path, *flags):
cmd = [sys.executable, "-m", "build", "--no-isolation", *flags, str(path)]
return run(cmd, env={'DISTUTILS_DEBUG': ''})
def _get_dist(dist_path, attrs):
root = "/".join(os.path.split(dist_path)) # POSIX-style
script = dist_path / 'setup.py'
if script.exists():
with Path(dist_path):
dist = cast(
Distribution,
distutils.core.run_setup("setup.py", {}, stop_after="init"),
)
else:
dist = Distribution(attrs)
dist.src_root = root
dist.script_name = "setup.py"
with Path(dist_path):
dist.parse_config_files()
dist.set_defaults()
return dist
def _run_sdist_programatically(dist_path, attrs):
dist = _get_dist(dist_path, attrs)
cmd = sdist(dist)
cmd.ensure_finalized()
assert cmd.distribution.packages or cmd.distribution.py_modules
with quiet(), Path(dist_path):
cmd.run()
return dist, cmd

View File

@@ -0,0 +1,622 @@
from __future__ import annotations
import functools
import importlib
import io
from email import message_from_string
from email.generator import Generator
from email.message import EmailMessage, Message
from email.parser import Parser
from email.policy import EmailPolicy
from inspect import cleandoc
from pathlib import Path
from unittest.mock import Mock
import jaraco.path
import pytest
from packaging.metadata import Metadata
from packaging.requirements import Requirement
from setuptools import _reqs, sic
from setuptools._core_metadata import rfc822_escape, rfc822_unescape
from setuptools.command.egg_info import egg_info, write_requirements
from setuptools.config import expand, setupcfg
from setuptools.dist import Distribution
from .config.downloads import retrieve_file, urls_from_file
EXAMPLE_BASE_INFO = dict(
name="package",
version="0.0.1",
author="Foo Bar",
author_email="foo@bar.net",
long_description="Long\ndescription",
description="Short description",
keywords=["one", "two"],
)
@pytest.mark.parametrize(
("content", "result"),
(
pytest.param(
"Just a single line",
None,
id="single_line",
),
pytest.param(
"Multiline\nText\nwithout\nextra indents\n",
None,
id="multiline",
),
pytest.param(
"Multiline\n With\n\nadditional\n indentation",
None,
id="multiline_with_indentation",
),
pytest.param(
" Leading whitespace",
"Leading whitespace",
id="remove_leading_whitespace",
),
pytest.param(
" Leading whitespace\nIn\n Multiline comment",
"Leading whitespace\nIn\n Multiline comment",
id="remove_leading_whitespace_multiline",
),
),
)
def test_rfc822_unescape(content, result):
assert (result or content) == rfc822_unescape(rfc822_escape(content))
def __read_test_cases():
base = EXAMPLE_BASE_INFO
params = functools.partial(dict, base)
return [
('Metadata version 1.0', params()),
(
'Metadata Version 1.0: Short long description',
params(
long_description='Short long description',
),
),
(
'Metadata version 1.1: Classifiers',
params(
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'License :: OSI Approved :: MIT License',
],
),
),
(
'Metadata version 1.1: Download URL',
params(
download_url='https://example.com',
),
),
(
'Metadata Version 1.2: Requires-Python',
params(
python_requires='>=3.7',
),
),
pytest.param(
'Metadata Version 1.2: Project-Url',
params(project_urls=dict(Foo='https://example.bar')),
marks=pytest.mark.xfail(
reason="Issue #1578: project_urls not read",
),
),
(
'Metadata Version 2.1: Long Description Content Type',
params(
long_description_content_type='text/x-rst; charset=UTF-8',
),
),
(
'License',
params(
license='MIT',
),
),
(
'License multiline',
params(
license='This is a long license \nover multiple lines',
),
),
pytest.param(
'Metadata Version 2.1: Provides Extra',
params(provides_extras=['foo', 'bar']),
marks=pytest.mark.xfail(reason="provides_extras not read"),
),
(
'Missing author',
dict(
name='foo',
version='1.0.0',
author_email='snorri@sturluson.name',
),
),
(
'Missing author e-mail',
dict(
name='foo',
version='1.0.0',
author='Snorri Sturluson',
),
),
(
'Missing author and e-mail',
dict(
name='foo',
version='1.0.0',
),
),
(
'Bypass normalized version',
dict(
name='foo',
version=sic('1.0.0a'),
),
),
]
@pytest.mark.parametrize(("name", "attrs"), __read_test_cases())
def test_read_metadata(name, attrs):
dist = Distribution(attrs)
metadata_out = dist.metadata
dist_class = metadata_out.__class__
# Write to PKG_INFO and then load into a new metadata object
PKG_INFO = io.StringIO()
metadata_out.write_pkg_file(PKG_INFO)
PKG_INFO.seek(0)
pkg_info = PKG_INFO.read()
assert _valid_metadata(pkg_info)
PKG_INFO.seek(0)
metadata_in = dist_class()
metadata_in.read_pkg_file(PKG_INFO)
tested_attrs = [
('name', dist_class.get_name),
('version', dist_class.get_version),
('author', dist_class.get_contact),
('author_email', dist_class.get_contact_email),
('metadata_version', dist_class.get_metadata_version),
('provides', dist_class.get_provides),
('description', dist_class.get_description),
('long_description', dist_class.get_long_description),
('download_url', dist_class.get_download_url),
('keywords', dist_class.get_keywords),
('platforms', dist_class.get_platforms),
('obsoletes', dist_class.get_obsoletes),
('requires', dist_class.get_requires),
('classifiers', dist_class.get_classifiers),
('project_urls', lambda s: getattr(s, 'project_urls', {})),
('provides_extras', lambda s: getattr(s, 'provides_extras', {})),
]
for attr, getter in tested_attrs:
assert getter(metadata_in) == getter(metadata_out)
def __maintainer_test_cases():
attrs = {"name": "package", "version": "1.0", "description": "xxx"}
def merge_dicts(d1, d2):
d1 = d1.copy()
d1.update(d2)
return d1
return [
('No author, no maintainer', attrs.copy()),
(
'Author (no e-mail), no maintainer',
merge_dicts(attrs, {'author': 'Author Name'}),
),
(
'Author (e-mail), no maintainer',
merge_dicts(
attrs, {'author': 'Author Name', 'author_email': 'author@name.com'}
),
),
(
'No author, maintainer (no e-mail)',
merge_dicts(attrs, {'maintainer': 'Maintainer Name'}),
),
(
'No author, maintainer (e-mail)',
merge_dicts(
attrs,
{
'maintainer': 'Maintainer Name',
'maintainer_email': 'maintainer@name.com',
},
),
),
(
'Author (no e-mail), Maintainer (no-email)',
merge_dicts(
attrs, {'author': 'Author Name', 'maintainer': 'Maintainer Name'}
),
),
(
'Author (e-mail), Maintainer (e-mail)',
merge_dicts(
attrs,
{
'author': 'Author Name',
'author_email': 'author@name.com',
'maintainer': 'Maintainer Name',
'maintainer_email': 'maintainer@name.com',
},
),
),
(
'No author (e-mail), no maintainer (e-mail)',
merge_dicts(
attrs,
{
'author_email': 'author@name.com',
'maintainer_email': 'maintainer@name.com',
},
),
),
('Author unicode', merge_dicts(attrs, {'author': '鉄沢寛'})),
('Maintainer unicode', merge_dicts(attrs, {'maintainer': 'Jan Łukasiewicz'})),
]
@pytest.mark.parametrize(("name", "attrs"), __maintainer_test_cases())
def test_maintainer_author(name, attrs, tmpdir):
tested_keys = {
'author': 'Author',
'author_email': 'Author-email',
'maintainer': 'Maintainer',
'maintainer_email': 'Maintainer-email',
}
# Generate a PKG-INFO file
dist = Distribution(attrs)
fn = tmpdir.mkdir('pkg_info')
fn_s = str(fn)
dist.metadata.write_pkg_info(fn_s)
with open(str(fn.join('PKG-INFO')), 'r', encoding='utf-8') as f:
pkg_info = f.read()
assert _valid_metadata(pkg_info)
# Drop blank lines and strip lines from default description
raw_pkg_lines = pkg_info.splitlines()
pkg_lines = list(filter(None, raw_pkg_lines[:-2]))
pkg_lines_set = set(pkg_lines)
# Duplicate lines should not be generated
assert len(pkg_lines) == len(pkg_lines_set)
for fkey, dkey in tested_keys.items():
val = attrs.get(dkey, None)
if val is None:
for line in pkg_lines:
assert not line.startswith(fkey + ':')
else:
line = f'{fkey}: {val}'
assert line in pkg_lines_set
class TestParityWithMetadataFromPyPaWheel:
def base_example(self):
attrs = dict(
**EXAMPLE_BASE_INFO,
# Example with complex requirement definition
python_requires=">=3.8",
install_requires="""
packaging==23.2
more-itertools==8.8.0; extra == "other"
jaraco.text==3.7.0
importlib-resources==5.10.2; python_version<"3.8"
importlib-metadata==6.0.0 ; python_version<"3.8"
colorama>=0.4.4; sys_platform == "win32"
""",
extras_require={
"testing": """
pytest >= 6
pytest-checkdocs >= 2.4
tomli ; \\
# Using stdlib when possible
python_version < "3.11"
ini2toml[lite]>=0.9
""",
"other": [],
},
)
# Generate a PKG-INFO file using setuptools
return Distribution(attrs)
def test_requires_dist(self, tmp_path):
dist = self.base_example()
pkg_info = _get_pkginfo(dist)
assert _valid_metadata(pkg_info)
# Ensure Requires-Dist is present
expected = [
'Metadata-Version:',
'Requires-Python: >=3.8',
'Provides-Extra: other',
'Provides-Extra: testing',
'Requires-Dist: tomli; python_version < "3.11" and extra == "testing"',
'Requires-Dist: more-itertools==8.8.0; extra == "other"',
'Requires-Dist: ini2toml[lite]>=0.9; extra == "testing"',
]
for line in expected:
assert line in pkg_info
HERE = Path(__file__).parent
EXAMPLES_FILE = HERE / "config/setupcfg_examples.txt"
@pytest.fixture(params=[None, *urls_from_file(EXAMPLES_FILE)])
def dist(self, request, monkeypatch, tmp_path):
"""Example of distribution with arbitrary configuration"""
monkeypatch.chdir(tmp_path)
monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.42"))
monkeypatch.setattr(expand, "read_files", Mock(return_value="hello world"))
monkeypatch.setattr(
Distribution, "_finalize_license_files", Mock(return_value=None)
)
if request.param is None:
yield self.base_example()
else:
# Real-world usage
config = retrieve_file(request.param)
yield setupcfg.apply_configuration(Distribution({}), config)
@pytest.mark.uses_network
def test_equivalent_output(self, tmp_path, dist):
"""Ensure output from setuptools is equivalent to the one from `pypa/wheel`"""
# Generate a METADATA file using pypa/wheel for comparison
wheel_metadata = importlib.import_module("wheel.metadata")
pkginfo_to_metadata = getattr(wheel_metadata, "pkginfo_to_metadata", None)
if pkginfo_to_metadata is None: # pragma: nocover
pytest.xfail(
"wheel.metadata.pkginfo_to_metadata is undefined, "
"(this is likely to be caused by API changes in pypa/wheel"
)
# Generate an simplified "egg-info" dir for pypa/wheel to convert
pkg_info = _get_pkginfo(dist)
egg_info_dir = tmp_path / "pkg.egg-info"
egg_info_dir.mkdir(parents=True)
(egg_info_dir / "PKG-INFO").write_text(pkg_info, encoding="utf-8")
write_requirements(egg_info(dist), egg_info_dir, egg_info_dir / "requires.txt")
# Get pypa/wheel generated METADATA but normalize requirements formatting
metadata_msg = pkginfo_to_metadata(egg_info_dir, egg_info_dir / "PKG-INFO")
metadata_str = _normalize_metadata(metadata_msg)
pkg_info_msg = message_from_string(pkg_info)
pkg_info_str = _normalize_metadata(pkg_info_msg)
# Compare setuptools PKG-INFO x pypa/wheel METADATA
assert metadata_str == pkg_info_str
# Make sure it parses/serializes well in pypa/wheel
_assert_roundtrip_message(pkg_info)
class TestPEP643:
STATIC_CONFIG = {
"setup.cfg": cleandoc(
"""
[metadata]
name = package
version = 0.0.1
author = Foo Bar
author_email = foo@bar.net
long_description = Long
description
description = Short description
keywords = one, two
platforms = abcd
[options]
install_requires = requests
"""
),
"pyproject.toml": cleandoc(
"""
[project]
name = "package"
version = "0.0.1"
authors = [
{name = "Foo Bar", email = "foo@bar.net"}
]
description = "Short description"
readme = {text = "Long\\ndescription", content-type = "text/plain"}
keywords = ["one", "two"]
dependencies = ["requests"]
license = "AGPL-3.0-or-later"
[tool.setuptools]
provides = ["abcd"]
obsoletes = ["abcd"]
"""
),
}
@pytest.mark.parametrize("file", STATIC_CONFIG.keys())
def test_static_config_has_no_dynamic(self, file, tmpdir_cwd):
Path(file).write_text(self.STATIC_CONFIG[file], encoding="utf-8")
metadata = _get_metadata()
assert metadata.get_all("Dynamic") is None
assert metadata.get_all("dynamic") is None
@pytest.mark.parametrize("file", STATIC_CONFIG.keys())
@pytest.mark.parametrize(
"fields",
[
# Single dynamic field
{"requires-python": ("python_requires", ">=3.12")},
{"author-email": ("author_email", "snoopy@peanuts.com")},
{"keywords": ("keywords", ["hello", "world"])},
{"platform": ("platforms", ["abcd"])},
# Multiple dynamic fields
{
"summary": ("description", "hello world"),
"description": ("long_description", "bla bla bla bla"),
"requires-dist": ("install_requires", ["hello-world"]),
},
],
)
def test_modified_fields_marked_as_dynamic(self, file, fields, tmpdir_cwd):
# We start with a static config
Path(file).write_text(self.STATIC_CONFIG[file], encoding="utf-8")
dist = _makedist()
# ... but then we simulate the effects of a plugin modifying the distribution
for attr, value in fields.values():
# `dist` and `dist.metadata` are complicated...
# Some attributes work when set on `dist`, others on `dist.metadata`...
# Here we set in both just in case (this also avoids calling `_finalize_*`)
setattr(dist, attr, value)
setattr(dist.metadata, attr, value)
# Then we should be able to list the modified fields as Dynamic
metadata = _get_metadata(dist)
assert set(metadata.get_all("Dynamic")) == set(fields)
@pytest.mark.parametrize(
"extra_toml",
[
"# Let setuptools autofill license-files",
"license-files = ['LICENSE*', 'AUTHORS*', 'NOTICE']",
],
)
def test_license_files_dynamic(self, extra_toml, tmpdir_cwd):
# For simplicity (and for the time being) setuptools is not making
# any special handling to guarantee `License-File` is considered static.
# Instead we rely in the fact that, although suboptimal, it is OK to have
# it as dynamics, as per:
# https://github.com/pypa/setuptools/issues/4629#issuecomment-2331233677
files = {
"pyproject.toml": self.STATIC_CONFIG["pyproject.toml"].replace(
'license = "AGPL-3.0-or-later"',
f"dynamic = ['license']\n{extra_toml}",
),
"LICENSE.md": "--- mock license ---",
"NOTICE": "--- mock notice ---",
"AUTHORS.txt": "--- me ---",
}
# Sanity checks:
assert extra_toml in files["pyproject.toml"]
assert 'license = "AGPL-3.0-or-later"' not in extra_toml
jaraco.path.build(files)
dist = _makedist(license_expression="AGPL-3.0-or-later")
metadata = _get_metadata(dist)
assert set(metadata.get_all("Dynamic")) == {
'license-file',
'license-expression',
}
assert metadata.get("License-Expression") == "AGPL-3.0-or-later"
assert set(metadata.get_all("License-File")) == {
"NOTICE",
"AUTHORS.txt",
"LICENSE.md",
}
def _makedist(**attrs):
dist = Distribution(attrs)
dist.parse_config_files()
return dist
def _assert_roundtrip_message(metadata: str) -> None:
"""Emulate the way wheel.bdist_wheel parses and regenerates the message,
then ensures the metadata generated by setuptools is compatible.
"""
with io.StringIO(metadata) as buffer:
msg = Parser(EmailMessage).parse(buffer)
serialization_policy = EmailPolicy(
utf8=True,
mangle_from_=False,
max_line_length=0,
)
with io.BytesIO() as buffer:
out = io.TextIOWrapper(buffer, encoding="utf-8")
Generator(out, policy=serialization_policy).flatten(msg)
out.flush()
regenerated = buffer.getvalue()
raw_metadata = bytes(metadata, "utf-8")
# Normalise newlines to avoid test errors on Windows:
raw_metadata = b"\n".join(raw_metadata.splitlines())
regenerated = b"\n".join(regenerated.splitlines())
assert regenerated == raw_metadata
def _normalize_metadata(msg: Message) -> str:
"""Allow equivalent metadata to be compared directly"""
# The main challenge regards the requirements and extras.
# Both setuptools and wheel already apply some level of normalization
# but they differ regarding which character is chosen, according to the
# following spec it should be "-":
# https://packaging.python.org/en/latest/specifications/name-normalization/
# Related issues:
# https://github.com/pypa/packaging/issues/845
# https://github.com/pypa/packaging/issues/644#issuecomment-2429813968
extras = {x.replace("_", "-"): x for x in msg.get_all("Provides-Extra", [])}
reqs = [
_normalize_req(req, extras)
for req in _reqs.parse(msg.get_all("Requires-Dist", []))
]
del msg["Requires-Dist"]
del msg["Provides-Extra"]
# Ensure consistent ord
for req in sorted(reqs):
msg["Requires-Dist"] = req
for extra in sorted(extras):
msg["Provides-Extra"] = extra
# TODO: Handle lack of PEP 643 implementation in pypa/wheel?
del msg["Metadata-Version"]
return msg.as_string()
def _normalize_req(req: Requirement, extras: dict[str, str]) -> str:
"""Allow equivalent requirement objects to be compared directly"""
as_str = str(req).replace(req.name, req.name.replace("_", "-"))
for norm, orig in extras.items():
as_str = as_str.replace(orig, norm)
return as_str
def _get_pkginfo(dist: Distribution):
with io.StringIO() as fp:
dist.metadata.write_pkg_file(fp)
return fp.getvalue()
def _get_metadata(dist: Distribution | None = None):
return message_from_string(_get_pkginfo(dist or _makedist()))
def _valid_metadata(text: str) -> bool:
metadata = Metadata.from_email(text, validate=True) # can raise exceptions
return metadata is not None

View File

@@ -0,0 +1,15 @@
import sys
from setuptools import depends
class TestGetModuleConstant:
def test_basic(self):
"""
Invoke get_module_constant on a module in
the test package.
"""
mod_name = 'setuptools.tests.mod_with_constant'
val = depends.get_module_constant(mod_name, 'value')
assert val == 'three, sir!'
assert 'setuptools.tests.mod_with_constant' not in sys.modules

View File

@@ -0,0 +1,112 @@
"""develop tests"""
import os
import platform
import subprocess
import sys
import pytest
from setuptools._path import paths_on_pythonpath
from . import contexts, namespaces
SETUP_PY = """\
from setuptools import setup
setup(name='foo',
packages=['foo'],
)
"""
INIT_PY = """print "foo"
"""
@pytest.fixture
def temp_user(monkeypatch):
with contexts.tempdir() as user_base:
with contexts.tempdir() as user_site:
monkeypatch.setattr('site.USER_BASE', user_base)
monkeypatch.setattr('site.USER_SITE', user_site)
yield
@pytest.fixture
def test_env(tmpdir, temp_user):
target = tmpdir
foo = target.mkdir('foo')
setup = target / 'setup.py'
if setup.isfile():
raise ValueError(dir(target))
with setup.open('w') as f:
f.write(SETUP_PY)
init = foo / '__init__.py'
with init.open('w') as f:
f.write(INIT_PY)
with target.as_cwd():
yield target
class TestNamespaces:
@staticmethod
def install_develop(src_dir, target):
develop_cmd = [
sys.executable,
'setup.py',
'develop',
'--install-dir',
str(target),
]
with src_dir.as_cwd():
with paths_on_pythonpath([str(target)]):
subprocess.check_call(develop_cmd)
@pytest.mark.skipif(
bool(os.environ.get("APPVEYOR")),
reason="https://github.com/pypa/setuptools/issues/851",
)
@pytest.mark.skipif(
platform.python_implementation() == 'PyPy',
reason="https://github.com/pypa/setuptools/issues/1202",
)
@pytest.mark.uses_network
def test_namespace_package_importable(self, tmpdir):
"""
Installing two packages sharing the same namespace, one installed
naturally using pip or `--single-version-externally-managed`
and the other installed using `develop` should leave the namespace
in tact and both packages reachable by import.
"""
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
target = tmpdir / 'packages'
# use pip to install to the target directory
install_cmd = [
sys.executable,
'-m',
'pip',
'install',
str(pkg_A),
'-t',
str(target),
]
subprocess.check_call(install_cmd)
self.install_develop(pkg_B, target)
namespaces.make_site_dir(target)
try_import = [
sys.executable,
'-c',
'import myns.pkgA; import myns.pkgB',
]
with paths_on_pythonpath([str(target)]):
subprocess.check_call(try_import)
# additionally ensure that pkg_resources import works
pkg_resources_imp = [
sys.executable,
'-c',
'import pkg_resources',
]
with paths_on_pythonpath([str(target)]):
subprocess.check_call(pkg_resources_imp)

View File

@@ -0,0 +1,278 @@
import os
import re
import urllib.parse
import urllib.request
import pytest
from setuptools import Distribution
from setuptools.dist import check_package_data, check_specifier
from .fixtures import make_trivial_sdist
from .test_find_packages import ensure_files
from .textwrap import DALS
from distutils.errors import DistutilsSetupError
def test_dist_fetch_build_egg(tmpdir):
"""
Check multiple calls to `Distribution.fetch_build_egg` work as expected.
"""
index = tmpdir.mkdir('index')
index_url = urllib.parse.urljoin('file://', urllib.request.pathname2url(str(index)))
def sdist_with_index(distname, version):
dist_dir = index.mkdir(distname)
dist_sdist = f'{distname}-{version}.tar.gz'
make_trivial_sdist(str(dist_dir.join(dist_sdist)), distname, version)
with dist_dir.join('index.html').open('w') as fp:
fp.write(
DALS(
"""
<!DOCTYPE html><html><body>
<a href="{dist_sdist}" rel="internal">{dist_sdist}</a><br/>
</body></html>
"""
).format(dist_sdist=dist_sdist)
)
sdist_with_index('barbazquux', '3.2.0')
sdist_with_index('barbazquux-runner', '2.11.1')
with tmpdir.join('setup.cfg').open('w') as fp:
fp.write(
DALS(
"""
[easy_install]
index_url = {index_url}
"""
).format(index_url=index_url)
)
reqs = """
barbazquux-runner
barbazquux
""".split()
with tmpdir.as_cwd():
dist = Distribution()
dist.parse_config_files()
resolved_dists = [dist.fetch_build_egg(r) for r in reqs]
assert [dist.name for dist in resolved_dists if dist] == reqs
EXAMPLE_BASE_INFO = dict(
name="package",
version="0.0.1",
author="Foo Bar",
author_email="foo@bar.net",
long_description="Long\ndescription",
description="Short description",
keywords=["one", "two"],
)
def test_provides_extras_deterministic_order():
attrs = dict(extras_require=dict(a=['foo'], b=['bar']))
dist = Distribution(attrs)
assert list(dist.metadata.provides_extras) == ['a', 'b']
attrs['extras_require'] = dict(reversed(attrs['extras_require'].items()))
dist = Distribution(attrs)
assert list(dist.metadata.provides_extras) == ['b', 'a']
CHECK_PACKAGE_DATA_TESTS = (
# Valid.
(
{
'': ['*.txt', '*.rst'],
'hello': ['*.msg'],
},
None,
),
# Not a dictionary.
(
(
('', ['*.txt', '*.rst']),
('hello', ['*.msg']),
),
(
"'package_data' must be a dictionary mapping package"
" names to lists of string wildcard patterns"
),
),
# Invalid key type.
(
{
400: ['*.txt', '*.rst'],
},
("keys of 'package_data' dict must be strings (got 400)"),
),
# Invalid value type.
(
{
'hello': '*.msg',
},
(
"\"values of 'package_data' dict\" must be of type <tuple[str, ...] | list[str]>"
" (got '*.msg')"
),
),
# Invalid value type (generators are single use)
(
{
'hello': (x for x in "generator"),
},
(
"\"values of 'package_data' dict\" must be of type <tuple[str, ...] | list[str]>"
" (got <generator object"
),
),
)
@pytest.mark.parametrize(('package_data', 'expected_message'), CHECK_PACKAGE_DATA_TESTS)
def test_check_package_data(package_data, expected_message):
if expected_message is None:
assert check_package_data(None, 'package_data', package_data) is None
else:
with pytest.raises(DistutilsSetupError, match=re.escape(expected_message)):
check_package_data(None, 'package_data', package_data)
def test_check_specifier():
# valid specifier value
attrs = {'name': 'foo', 'python_requires': '>=3.0, !=3.1'}
dist = Distribution(attrs)
check_specifier(dist, attrs, attrs['python_requires'])
attrs = {'name': 'foo', 'python_requires': ['>=3.0', '!=3.1']}
dist = Distribution(attrs)
check_specifier(dist, attrs, attrs['python_requires'])
# invalid specifier value
attrs = {'name': 'foo', 'python_requires': '>=invalid-version'}
with pytest.raises(DistutilsSetupError):
dist = Distribution(attrs)
def test_metadata_name():
with pytest.raises(DistutilsSetupError, match='missing.*name'):
Distribution()._validate_metadata()
@pytest.mark.parametrize(
('dist_name', 'py_module'),
[
("my.pkg", "my_pkg"),
("my-pkg", "my_pkg"),
("my_pkg", "my_pkg"),
("pkg", "pkg"),
],
)
def test_dist_default_py_modules(tmp_path, dist_name, py_module):
(tmp_path / f"{py_module}.py").touch()
(tmp_path / "setup.py").touch()
(tmp_path / "noxfile.py").touch()
# ^-- make sure common tool files are ignored
attrs = {**EXAMPLE_BASE_INFO, "name": dist_name, "src_root": str(tmp_path)}
# Find `py_modules` corresponding to dist_name if not given
dist = Distribution(attrs)
dist.set_defaults()
assert dist.py_modules == [py_module]
# When `py_modules` is given, don't do anything
dist = Distribution({**attrs, "py_modules": ["explicity_py_module"]})
dist.set_defaults()
assert dist.py_modules == ["explicity_py_module"]
# When `packages` is given, don't do anything
dist = Distribution({**attrs, "packages": ["explicity_package"]})
dist.set_defaults()
assert not dist.py_modules
@pytest.mark.parametrize(
('dist_name', 'package_dir', 'package_files', 'packages'),
[
("my.pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
("my-pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]),
("my.pkg", None, ["my/pkg/__init__.py"], ["my", "my.pkg"]),
(
"my_pkg",
None,
["src/my_pkg/__init__.py", "src/my_pkg2/__init__.py"],
["my_pkg", "my_pkg2"],
),
(
"my_pkg",
{"pkg": "lib", "pkg2": "lib2"},
["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"],
["pkg", "pkg.nested", "pkg2"],
),
],
)
def test_dist_default_packages(
tmp_path, dist_name, package_dir, package_files, packages
):
ensure_files(tmp_path, package_files)
(tmp_path / "setup.py").touch()
(tmp_path / "noxfile.py").touch()
# ^-- should not be included by default
attrs = {
**EXAMPLE_BASE_INFO,
"name": dist_name,
"src_root": str(tmp_path),
"package_dir": package_dir,
}
# Find `packages` either corresponding to dist_name or inside src
dist = Distribution(attrs)
dist.set_defaults()
assert not dist.py_modules
assert not dist.py_modules
assert set(dist.packages) == set(packages)
# When `py_modules` is given, don't do anything
dist = Distribution({**attrs, "py_modules": ["explicit_py_module"]})
dist.set_defaults()
assert not dist.packages
assert set(dist.py_modules) == {"explicit_py_module"}
# When `packages` is given, don't do anything
dist = Distribution({**attrs, "packages": ["explicit_package"]})
dist.set_defaults()
assert not dist.py_modules
assert set(dist.packages) == {"explicit_package"}
@pytest.mark.parametrize(
('dist_name', 'package_dir', 'package_files'),
[
("my.pkg.nested", None, ["my/pkg/nested/__init__.py"]),
("my.pkg", None, ["my/pkg/__init__.py", "my/pkg/file.py"]),
("my_pkg", None, ["my_pkg.py"]),
("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/nested/__init__.py"]),
("my_pkg", None, ["src/my_pkg/__init__.py", "src/my_pkg/nested/__init__.py"]),
(
"my_pkg",
{"my_pkg": "lib", "my_pkg.lib2": "lib2"},
["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"],
),
# Should not try to guess a name from multiple py_modules/packages
("UNKNOWN", None, ["src/mod1.py", "src/mod2.py"]),
("UNKNOWN", None, ["src/pkg1/__ini__.py", "src/pkg2/__init__.py"]),
],
)
def test_dist_default_name(tmp_path, dist_name, package_dir, package_files):
"""Make sure dist.name is discovered from packages/py_modules"""
ensure_files(tmp_path, package_files)
attrs = {
**EXAMPLE_BASE_INFO,
"src_root": "/".join(os.path.split(tmp_path)), # POSIX-style
"package_dir": package_dir,
}
del attrs["name"]
dist = Distribution(attrs)
dist.set_defaults()
assert dist.py_modules or dist.packages
assert dist.get_name() == dist_name

View File

@@ -0,0 +1,147 @@
"""Test .dist-info style distributions."""
import pathlib
import re
import shutil
import subprocess
import sys
from functools import partial
import pytest
from setuptools.archive_util import unpack_archive
from .textwrap import DALS
read = partial(pathlib.Path.read_text, encoding="utf-8")
class TestDistInfo:
def test_invalid_version(self, tmp_path):
"""
Supplying an invalid version crashes dist_info.
"""
config = "[metadata]\nname=proj\nversion=42\n[egg_info]\ntag_build=invalid!!!\n"
(tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
msg = re.compile("invalid version", re.M | re.I)
proc = run_command_inner("dist_info", cwd=tmp_path, check=False)
assert proc.returncode
assert msg.search(proc.stdout)
assert not list(tmp_path.glob("*.dist-info"))
def test_tag_arguments(self, tmp_path):
config = """
[metadata]
name=proj
version=42
[egg_info]
tag_date=1
tag_build=.post
"""
(tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
print(run_command("dist_info", "--no-date", cwd=tmp_path))
dist_info = next(tmp_path.glob("*.dist-info"))
assert dist_info.name.startswith("proj-42")
shutil.rmtree(dist_info)
print(run_command("dist_info", "--tag-build", ".a", cwd=tmp_path))
dist_info = next(tmp_path.glob("*.dist-info"))
assert dist_info.name.startswith("proj-42a")
@pytest.mark.parametrize("keep_egg_info", (False, True))
def test_output_dir(self, tmp_path, keep_egg_info):
config = "[metadata]\nname=proj\nversion=42\n"
(tmp_path / "setup.cfg").write_text(config, encoding="utf-8")
out = tmp_path / "__out"
out.mkdir()
opts = ["--keep-egg-info"] if keep_egg_info else []
run_command("dist_info", "--output-dir", out, *opts, cwd=tmp_path)
assert len(list(out.glob("*.dist-info"))) == 1
assert len(list(tmp_path.glob("*.dist-info"))) == 0
expected_egg_info = int(keep_egg_info)
assert len(list(out.glob("*.egg-info"))) == expected_egg_info
assert len(list(tmp_path.glob("*.egg-info"))) == 0
assert len(list(out.glob("*.__bkp__"))) == 0
assert len(list(tmp_path.glob("*.__bkp__"))) == 0
class TestWheelCompatibility:
"""Make sure the .dist-info directory produced with the ``dist_info`` command
is the same as the one produced by ``bdist_wheel``.
"""
SETUPCFG = DALS(
"""
[metadata]
name = {name}
version = {version}
[options]
install_requires =
foo>=12; sys_platform != "linux"
[options.extras_require]
test = pytest
[options.entry_points]
console_scripts =
executable-name = my_package.module:function
discover =
myproj = my_package.other_module:function
"""
)
EGG_INFO_OPTS = [
# Related: #3088 #2872
("", ""),
(".post", "[egg_info]\ntag_build = post\n"),
(".post", "[egg_info]\ntag_build = .post\n"),
(".post", "[egg_info]\ntag_build = post\ntag_date = 1\n"),
(".dev", "[egg_info]\ntag_build = .dev\n"),
(".dev", "[egg_info]\ntag_build = .dev\ntag_date = 1\n"),
("a1", "[egg_info]\ntag_build = .a1\n"),
("+local", "[egg_info]\ntag_build = +local\n"),
]
@pytest.mark.parametrize("name", "my-proj my_proj my.proj My.Proj".split())
@pytest.mark.parametrize("version", ["0.42.13"])
@pytest.mark.parametrize(("suffix", "cfg"), EGG_INFO_OPTS)
def test_dist_info_is_the_same_as_in_wheel(
self, name, version, tmp_path, suffix, cfg
):
config = self.SETUPCFG.format(name=name, version=version) + cfg
for i in "dir_wheel", "dir_dist":
(tmp_path / i).mkdir()
(tmp_path / i / "setup.cfg").write_text(config, encoding="utf-8")
run_command("bdist_wheel", cwd=tmp_path / "dir_wheel")
wheel = next(tmp_path.glob("dir_wheel/dist/*.whl"))
unpack_archive(wheel, tmp_path / "unpack")
wheel_dist_info = next(tmp_path.glob("unpack/*.dist-info"))
run_command("dist_info", cwd=tmp_path / "dir_dist")
dist_info = next(tmp_path.glob("dir_dist/*.dist-info"))
assert dist_info.name == wheel_dist_info.name
assert dist_info.name.startswith(f"my_proj-{version}{suffix}")
for file in "METADATA", "entry_points.txt":
assert read(dist_info / file) == read(wheel_dist_info / file)
def run_command_inner(*cmd, **kwargs):
opts = {
"stderr": subprocess.STDOUT,
"stdout": subprocess.PIPE,
"text": True,
"encoding": "utf-8",
"check": True,
**kwargs,
}
cmd = [sys.executable, "-c", "__import__('setuptools').setup()", *map(str, cmd)]
return subprocess.run(cmd, **opts)
def run_command(*args, **kwargs):
return run_command_inner(*args, **kwargs).stdout

View File

@@ -0,0 +1,198 @@
import os
import platform
import sys
import textwrap
import pytest
IS_PYPY = '__pypy__' in sys.builtin_module_names
_TEXT_KWARGS = {"text": True, "encoding": "utf-8"} # For subprocess.run
def win_sr(env):
"""
On Windows, SYSTEMROOT must be present to avoid
> Fatal Python error: _Py_HashRandomization_Init: failed to
> get random numbers to initialize Python
"""
if env and platform.system() == 'Windows':
env['SYSTEMROOT'] = os.environ['SYSTEMROOT']
return env
def find_distutils(venv, imports='distutils', env=None, **kwargs):
py_cmd = 'import {imports}; print(distutils.__file__)'.format(**locals())
cmd = ['python', '-c', py_cmd]
return venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS, **kwargs)
def count_meta_path(venv, env=None):
py_cmd = textwrap.dedent(
"""
import sys
is_distutils = lambda finder: finder.__class__.__name__ == "DistutilsMetaFinder"
print(len(list(filter(is_distutils, sys.meta_path))))
"""
)
cmd = ['python', '-c', py_cmd]
return int(venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS))
skip_without_stdlib_distutils = pytest.mark.skipif(
sys.version_info >= (3, 12),
reason='stdlib distutils is removed from Python 3.12+',
)
@skip_without_stdlib_distutils
def test_distutils_stdlib(venv):
"""
Ensure stdlib distutils is used when appropriate.
"""
env = dict(SETUPTOOLS_USE_DISTUTILS='stdlib')
assert venv.name not in find_distutils(venv, env=env).split(os.sep)
assert count_meta_path(venv, env=env) == 0
def test_distutils_local_with_setuptools(venv):
"""
Ensure local distutils is used when appropriate.
"""
env = dict(SETUPTOOLS_USE_DISTUTILS='local')
loc = find_distutils(venv, imports='setuptools, distutils', env=env)
assert venv.name in loc.split(os.sep)
assert count_meta_path(venv, env=env) <= 1
@pytest.mark.xfail('IS_PYPY', reason='pypy imports distutils on startup')
def test_distutils_local(venv):
"""
Even without importing, the setuptools-local copy of distutils is
preferred.
"""
env = dict(SETUPTOOLS_USE_DISTUTILS='local')
assert venv.name in find_distutils(venv, env=env).split(os.sep)
assert count_meta_path(venv, env=env) <= 1
def test_pip_import(venv):
"""
Ensure pip can be imported.
Regression test for #3002.
"""
cmd = ['python', '-c', 'import pip']
venv.run(cmd, **_TEXT_KWARGS)
def test_distutils_has_origin():
"""
Distutils module spec should have an origin. #2990.
"""
assert __import__('distutils').__spec__.origin
ENSURE_IMPORTS_ARE_NOT_DUPLICATED = r"""
# Depending on the importlib machinery and _distutils_hack, some imports are
# duplicated resulting in different module objects being loaded, which prevents
# patches as shown in #3042.
# This script provides a way of verifying if this duplication is happening.
from distutils import cmd
import distutils.command.sdist as sdist
# import last to prevent caching
from distutils import {imported_module}
for mod in (cmd, sdist):
assert mod.{imported_module} == {imported_module}, (
f"\n{{mod.dir_util}}\n!=\n{{{imported_module}}}"
)
print("success")
"""
@pytest.mark.usefixtures("tmpdir_cwd")
@pytest.mark.parametrize(
('distutils_version', 'imported_module'),
[
pytest.param("stdlib", "dir_util", marks=skip_without_stdlib_distutils),
pytest.param("stdlib", "file_util", marks=skip_without_stdlib_distutils),
pytest.param("stdlib", "archive_util", marks=skip_without_stdlib_distutils),
("local", "dir_util"),
("local", "file_util"),
("local", "archive_util"),
],
)
def test_modules_are_not_duplicated_on_import(distutils_version, imported_module, venv):
env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
script = ENSURE_IMPORTS_ARE_NOT_DUPLICATED.format(imported_module=imported_module)
cmd = ['python', '-c', script]
output = venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS).strip()
assert output == "success"
ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED = r"""
import types
import distutils.dist as dist
from distutils import log
if isinstance(dist.log, types.ModuleType):
assert dist.log == log, f"\n{dist.log}\n!=\n{log}"
print("success")
"""
@pytest.mark.usefixtures("tmpdir_cwd")
@pytest.mark.parametrize(
"distutils_version",
[
"local",
pytest.param("stdlib", marks=skip_without_stdlib_distutils),
],
)
def test_log_module_is_not_duplicated_on_import(distutils_version, venv):
env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
cmd = ['python', '-c', ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED]
output = venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS).strip()
assert output == "success"
ENSURE_CONSISTENT_ERROR_FROM_MODIFIED_PY = r"""
from setuptools.modified import newer
from {imported_module}.errors import DistutilsError
# Can't use pytest.raises in this context
try:
newer("", "")
except DistutilsError:
print("success")
else:
raise AssertionError("Expected to raise")
"""
@pytest.mark.usefixtures("tmpdir_cwd")
@pytest.mark.parametrize(
('distutils_version', 'imported_module'),
[
("local", "distutils"),
# Unfortunately we still get ._distutils.errors.DistutilsError with SETUPTOOLS_USE_DISTUTILS=stdlib
# But that's a deprecated use-case we don't mind not fully supporting in newer code
pytest.param(
"stdlib", "setuptools._distutils", marks=skip_without_stdlib_distutils
),
],
)
def test_consistent_error_from_modified_py(distutils_version, imported_module, venv):
env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version)
cmd = [
'python',
'-c',
ENSURE_CONSISTENT_ERROR_FROM_MODIFIED_PY.format(
imported_module=imported_module
),
]
output = venv.run(cmd, env=win_sr(env), **_TEXT_KWARGS).strip()
assert output == "success"

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,15 @@
import importlib
import pickle
import packaging
from setuptools import Distribution
def test_reimport_extern():
packaging2 = importlib.import_module(packaging.__name__)
assert packaging is packaging2
def test_distribution_picklable():
pickle.loads(pickle.dumps(Distribution()))

View File

@@ -0,0 +1,218 @@
"""Tests for automatic package discovery"""
import os
import shutil
import tempfile
import pytest
from setuptools import find_namespace_packages, find_packages
from setuptools.discovery import FlatLayoutPackageFinder
from .compat.py39 import os_helper
class TestFindPackages:
def setup_method(self, method):
self.dist_dir = tempfile.mkdtemp()
self._make_pkg_structure()
def teardown_method(self, method):
shutil.rmtree(self.dist_dir)
def _make_pkg_structure(self):
"""Make basic package structure.
dist/
docs/
conf.py
pkg/
__pycache__/
nspkg/
mod.py
subpkg/
assets/
asset
__init__.py
setup.py
"""
self.docs_dir = self._mkdir('docs', self.dist_dir)
self._touch('conf.py', self.docs_dir)
self.pkg_dir = self._mkdir('pkg', self.dist_dir)
self._mkdir('__pycache__', self.pkg_dir)
self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir)
self._touch('mod.py', self.ns_pkg_dir)
self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir)
self.asset_dir = self._mkdir('assets', self.sub_pkg_dir)
self._touch('asset', self.asset_dir)
self._touch('__init__.py', self.sub_pkg_dir)
self._touch('setup.py', self.dist_dir)
def _mkdir(self, path, parent_dir=None):
if parent_dir:
path = os.path.join(parent_dir, path)
os.mkdir(path)
return path
def _touch(self, path, dir_=None):
if dir_:
path = os.path.join(dir_, path)
open(path, 'wb').close()
return path
def test_regular_package(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir)
assert packages == ['pkg', 'pkg.subpkg']
def test_exclude(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir, exclude=('pkg.*',))
assert packages == ['pkg']
def test_exclude_recursive(self):
"""
Excluding a parent package should not exclude child packages as well.
"""
self._touch('__init__.py', self.pkg_dir)
self._touch('__init__.py', self.sub_pkg_dir)
packages = find_packages(self.dist_dir, exclude=('pkg',))
assert packages == ['pkg.subpkg']
def test_include_excludes_other(self):
"""
If include is specified, other packages should be excluded.
"""
self._touch('__init__.py', self.pkg_dir)
alt_dir = self._mkdir('other_pkg', self.dist_dir)
self._touch('__init__.py', alt_dir)
packages = find_packages(self.dist_dir, include=['other_pkg'])
assert packages == ['other_pkg']
def test_dir_with_dot_is_skipped(self):
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
data_dir = self._mkdir('some.data', self.pkg_dir)
self._touch('__init__.py', data_dir)
self._touch('file.dat', data_dir)
packages = find_packages(self.dist_dir)
assert 'pkg.some.data' not in packages
def test_dir_with_packages_in_subdir_is_excluded(self):
"""
Ensure that a package in a non-package such as build/pkg/__init__.py
is excluded.
"""
build_dir = self._mkdir('build', self.dist_dir)
build_pkg_dir = self._mkdir('pkg', build_dir)
self._touch('__init__.py', build_pkg_dir)
packages = find_packages(self.dist_dir)
assert 'build.pkg' not in packages
@pytest.mark.skipif(not os_helper.can_symlink(), reason='Symlink support required')
def test_symlinked_packages_are_included(self):
"""
A symbolically-linked directory should be treated like any other
directory when matched as a package.
Create a link from lpkg -> pkg.
"""
self._touch('__init__.py', self.pkg_dir)
linked_pkg = os.path.join(self.dist_dir, 'lpkg')
os.symlink('pkg', linked_pkg)
assert os.path.isdir(linked_pkg)
packages = find_packages(self.dist_dir)
assert 'lpkg' in packages
def _assert_packages(self, actual, expected):
assert set(actual) == set(expected)
def test_pep420_ns_package(self):
packages = find_namespace_packages(
self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets']
)
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
def test_pep420_ns_package_no_includes(self):
packages = find_namespace_packages(self.dist_dir, exclude=['pkg.subpkg.assets'])
self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg'])
def test_pep420_ns_package_no_includes_or_excludes(self):
packages = find_namespace_packages(self.dist_dir)
expected = ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets']
self._assert_packages(packages, expected)
def test_regular_package_with_nested_pep420_ns_packages(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_namespace_packages(
self.dist_dir, exclude=['docs', 'pkg.subpkg.assets']
)
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
def test_pep420_ns_package_no_non_package_dirs(self):
shutil.rmtree(self.docs_dir)
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
packages = find_namespace_packages(self.dist_dir)
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
class TestFlatLayoutPackageFinder:
EXAMPLES = {
"hidden-folders": (
[".pkg/__init__.py", "pkg/__init__.py", "pkg/nested/file.txt"],
["pkg", "pkg.nested"],
),
"private-packages": (
["_pkg/__init__.py", "pkg/_private/__init__.py"],
["pkg", "pkg._private"],
),
"invalid-name": (
["invalid-pkg/__init__.py", "other.pkg/__init__.py", "yet,another/file.py"],
[],
),
"docs": (["pkg/__init__.py", "docs/conf.py", "docs/readme.rst"], ["pkg"]),
"tests": (
["pkg/__init__.py", "tests/test_pkg.py", "tests/__init__.py"],
["pkg"],
),
"examples": (
[
"pkg/__init__.py",
"examples/__init__.py",
"examples/file.py",
"example/other_file.py",
# Sub-packages should always be fine
"pkg/example/__init__.py",
"pkg/examples/__init__.py",
],
["pkg", "pkg.examples", "pkg.example"],
),
"tool-specific": (
[
"htmlcov/index.html",
"pkg/__init__.py",
"tasks/__init__.py",
"tasks/subpackage/__init__.py",
"fabfile/__init__.py",
"fabfile/subpackage/__init__.py",
# Sub-packages should always be fine
"pkg/tasks/__init__.py",
"pkg/fabfile/__init__.py",
],
["pkg", "pkg.tasks", "pkg.fabfile"],
),
}
@pytest.mark.parametrize("example", EXAMPLES.keys())
def test_unwanted_directories_not_included(self, tmp_path, example):
files, expected_packages = self.EXAMPLES[example]
ensure_files(tmp_path, files)
found_packages = FlatLayoutPackageFinder.find(str(tmp_path))
assert set(found_packages) == set(expected_packages)
def ensure_files(root_path, files):
for file in files:
path = root_path / file
path.parent.mkdir(parents=True, exist_ok=True)
path.touch()

View File

@@ -0,0 +1,73 @@
"""Tests for automatic discovery of modules"""
import os
import pytest
from setuptools.discovery import FlatLayoutModuleFinder, ModuleFinder
from .compat.py39 import os_helper
from .test_find_packages import ensure_files
class TestModuleFinder:
def find(self, path, *args, **kwargs):
return set(ModuleFinder.find(str(path), *args, **kwargs))
EXAMPLES = {
# circumstance: (files, kwargs, expected_modules)
"simple_folder": (
["file.py", "other.py"],
{}, # kwargs
["file", "other"],
),
"exclude": (
["file.py", "other.py"],
{"exclude": ["f*"]},
["other"],
),
"include": (
["file.py", "fole.py", "other.py"],
{"include": ["f*"], "exclude": ["fo*"]},
["file"],
),
"invalid-name": (["my-file.py", "other.file.py"], {}, []),
}
@pytest.mark.parametrize("example", EXAMPLES.keys())
def test_finder(self, tmp_path, example):
files, kwargs, expected_modules = self.EXAMPLES[example]
ensure_files(tmp_path, files)
assert self.find(tmp_path, **kwargs) == set(expected_modules)
@pytest.mark.skipif(not os_helper.can_symlink(), reason='Symlink support required')
def test_symlinked_packages_are_included(self, tmp_path):
src = "_myfiles/file.py"
ensure_files(tmp_path, [src])
os.symlink(tmp_path / src, tmp_path / "link.py")
assert self.find(tmp_path) == {"link"}
class TestFlatLayoutModuleFinder:
def find(self, path, *args, **kwargs):
return set(FlatLayoutModuleFinder.find(str(path)))
EXAMPLES = {
# circumstance: (files, expected_modules)
"hidden-files": ([".module.py"], []),
"private-modules": (["_module.py"], []),
"common-names": (
["setup.py", "conftest.py", "test.py", "tests.py", "example.py", "mod.py"],
["mod"],
),
"tool-specific": (
["tasks.py", "fabfile.py", "noxfile.py", "dodo.py", "manage.py", "mod.py"],
["mod"],
),
}
@pytest.mark.parametrize("example", EXAMPLES.keys())
def test_unwanted_files_not_included(self, tmp_path, example):
files, expected_modules = self.EXAMPLES[example]
ensure_files(tmp_path, files)
assert self.find(tmp_path) == set(expected_modules)

View File

@@ -0,0 +1,45 @@
import pytest
from jaraco import path
from setuptools.glob import glob
@pytest.mark.parametrize(
('tree', 'pattern', 'matches'),
(
('', b'', []),
('', '', []),
(
"""
appveyor.yml
CHANGES.rst
LICENSE
MANIFEST.in
pyproject.toml
README.rst
setup.cfg
setup.py
""",
'*.rst',
('CHANGES.rst', 'README.rst'),
),
(
"""
appveyor.yml
CHANGES.rst
LICENSE
MANIFEST.in
pyproject.toml
README.rst
setup.cfg
setup.py
""",
b'*.rst',
(b'CHANGES.rst', b'README.rst'),
),
),
)
def test_glob(monkeypatch, tmpdir, tree, pattern, matches):
monkeypatch.chdir(tmpdir)
path.build({name: '' for name in tree.split()})
assert list(sorted(glob(pattern))) == list(sorted(matches))

View File

@@ -0,0 +1,89 @@
"""install_scripts tests"""
import sys
import pytest
from setuptools.command.install_scripts import install_scripts
from setuptools.dist import Distribution
from . import contexts
class TestInstallScripts:
settings = dict(
name='foo',
entry_points={'console_scripts': ['foo=foo:foo']},
version='0.0',
)
unix_exe = '/usr/dummy-test-path/local/bin/python'
unix_spaces_exe = '/usr/bin/env dummy-test-python'
win32_exe = 'C:\\Dummy Test Path\\Program Files\\Python 3.6\\python.exe'
def _run_install_scripts(self, install_dir, executable=None):
dist = Distribution(self.settings)
dist.script_name = 'setup.py'
cmd = install_scripts(dist)
cmd.install_dir = install_dir
if executable is not None:
bs = cmd.get_finalized_command('build_scripts')
bs.executable = executable
cmd.ensure_finalized()
with contexts.quiet():
cmd.run()
@pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only')
def test_sys_executable_escaping_unix(self, tmpdir, monkeypatch):
"""
Ensure that shebang is not quoted on Unix when getting the Python exe
from sys.executable.
"""
expected = f'#!{self.unix_exe}\n'
monkeypatch.setattr('sys.executable', self.unix_exe)
with tmpdir.as_cwd():
self._run_install_scripts(str(tmpdir))
with open(str(tmpdir.join('foo')), 'r', encoding="utf-8") as f:
actual = f.readline()
assert actual == expected
@pytest.mark.skipif(sys.platform != 'win32', reason='Windows only')
def test_sys_executable_escaping_win32(self, tmpdir, monkeypatch):
"""
Ensure that shebang is quoted on Windows when getting the Python exe
from sys.executable and it contains a space.
"""
expected = f'#!"{self.win32_exe}"\n'
monkeypatch.setattr('sys.executable', self.win32_exe)
with tmpdir.as_cwd():
self._run_install_scripts(str(tmpdir))
with open(str(tmpdir.join('foo-script.py')), 'r', encoding="utf-8") as f:
actual = f.readline()
assert actual == expected
@pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only')
def test_executable_with_spaces_escaping_unix(self, tmpdir):
"""
Ensure that shebang on Unix is not quoted, even when
a value with spaces
is specified using --executable.
"""
expected = f'#!{self.unix_spaces_exe}\n'
with tmpdir.as_cwd():
self._run_install_scripts(str(tmpdir), self.unix_spaces_exe)
with open(str(tmpdir.join('foo')), 'r', encoding="utf-8") as f:
actual = f.readline()
assert actual == expected
@pytest.mark.skipif(sys.platform != 'win32', reason='Windows only')
def test_executable_arg_escaping_win32(self, tmpdir):
"""
Ensure that shebang on Windows is quoted when
getting a path with spaces
from --executable, that is itself properly quoted.
"""
expected = f'#!"{self.win32_exe}"\n'
with tmpdir.as_cwd():
self._run_install_scripts(str(tmpdir), '"' + self.win32_exe + '"')
with open(str(tmpdir.join('foo-script.py')), 'r', encoding="utf-8") as f:
actual = f.readline()
assert actual == expected

View File

@@ -0,0 +1,76 @@
import functools
import inspect
import logging
import sys
import pytest
IS_PYPY = '__pypy__' in sys.builtin_module_names
setup_py = """\
from setuptools import setup
setup(
name="test_logging",
version="0.0"
)
"""
@pytest.mark.parametrize(
('flag', 'expected_level'), [("--dry-run", "INFO"), ("--verbose", "DEBUG")]
)
def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level):
"""Make sure the correct verbosity level is set (issue #3038)"""
import setuptools # noqa: F401 # import setuptools to monkeypatch distutils
import distutils # <- load distutils after all the patches take place
logger = logging.Logger(__name__)
monkeypatch.setattr(logging, "root", logger)
unset_log_level = logger.getEffectiveLevel()
assert logging.getLevelName(unset_log_level) == "NOTSET"
setup_script = tmp_path / "setup.py"
setup_script.write_text(setup_py, encoding="utf-8")
dist = distutils.core.run_setup(setup_script, stop_after="init")
dist.script_args = [flag, "sdist"]
dist.parse_command_line() # <- where the log level is set
log_level = logger.getEffectiveLevel()
log_level_name = logging.getLevelName(log_level)
assert log_level_name == expected_level
def flaky_on_pypy(func):
@functools.wraps(func)
def _func():
try:
func()
except AssertionError: # pragma: no cover
if IS_PYPY:
msg = "Flaky monkeypatch on PyPy (#4124)"
pytest.xfail(f"{msg}. Original discussion in #3707, #3709.")
raise
return _func
@flaky_on_pypy
def test_patching_does_not_cause_problems():
# Ensure `dist.log` is only patched if necessary
import _distutils_hack
import setuptools.logging
from distutils import dist
setuptools.logging.configure()
if _distutils_hack.enabled():
# Modern logging infra, no problematic patching.
assert dist.__file__ is None or "setuptools" in dist.__file__
assert isinstance(dist.log, logging.Logger)
else:
assert inspect.ismodule(dist.log)

View File

@@ -0,0 +1,622 @@
"""sdist tests"""
from __future__ import annotations
import contextlib
import io
import itertools
import logging
import os
import shutil
import sys
import tempfile
import pytest
from setuptools.command.egg_info import FileList, egg_info, translate_pattern
from setuptools.dist import Distribution
from setuptools.tests.textwrap import DALS
from distutils import log
from distutils.errors import DistutilsTemplateError
IS_PYPY = '__pypy__' in sys.builtin_module_names
def make_local_path(s):
"""Converts '/' in a string to os.sep"""
return s.replace('/', os.sep)
SETUP_ATTRS = {
'name': 'app',
'version': '0.0',
'packages': ['app'],
}
SETUP_PY = f"""\
from setuptools import setup
setup(**{SETUP_ATTRS!r})
"""
@contextlib.contextmanager
def quiet():
old_stdout, old_stderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = io.StringIO(), io.StringIO()
try:
yield
finally:
sys.stdout, sys.stderr = old_stdout, old_stderr
def touch(filename):
open(filename, 'wb').close()
# The set of files always in the manifest, including all files in the
# .egg-info directory
default_files = frozenset(
map(
make_local_path,
[
'README.rst',
'MANIFEST.in',
'setup.py',
'app.egg-info/PKG-INFO',
'app.egg-info/SOURCES.txt',
'app.egg-info/dependency_links.txt',
'app.egg-info/top_level.txt',
'app/__init__.py',
],
)
)
translate_specs: list[tuple[str, list[str], list[str]]] = [
('foo', ['foo'], ['bar', 'foobar']),
('foo/bar', ['foo/bar'], ['foo/bar/baz', './foo/bar', 'foo']),
# Glob matching
('*.txt', ['foo.txt', 'bar.txt'], ['foo/foo.txt']),
('dir/*.txt', ['dir/foo.txt', 'dir/bar.txt', 'dir/.txt'], ['notdir/foo.txt']),
('*/*.py', ['bin/start.py'], []),
('docs/page-?.txt', ['docs/page-9.txt'], ['docs/page-10.txt']),
# Globstars change what they mean depending upon where they are
(
'foo/**/bar',
['foo/bing/bar', 'foo/bing/bang/bar', 'foo/bar'],
['foo/abar'],
),
(
'foo/**',
['foo/bar/bing.py', 'foo/x'],
['/foo/x'],
),
(
'**',
['x', 'abc/xyz', '@nything'],
[],
),
# Character classes
(
'pre[one]post',
['preopost', 'prenpost', 'preepost'],
['prepost', 'preonepost'],
),
(
'hello[!one]world',
['helloxworld', 'helloyworld'],
['hellooworld', 'helloworld', 'hellooneworld'],
),
(
'[]one].txt',
['o.txt', '].txt', 'e.txt'],
['one].txt'],
),
(
'foo[!]one]bar',
['fooybar'],
['foo]bar', 'fooobar', 'fooebar'],
),
]
"""
A spec of inputs for 'translate_pattern' and matches and mismatches
for that input.
"""
match_params = itertools.chain.from_iterable(
zip(itertools.repeat(pattern), matches)
for pattern, matches, mismatches in translate_specs
)
@pytest.fixture(params=match_params)
def pattern_match(request):
return map(make_local_path, request.param)
mismatch_params = itertools.chain.from_iterable(
zip(itertools.repeat(pattern), mismatches)
for pattern, matches, mismatches in translate_specs
)
@pytest.fixture(params=mismatch_params)
def pattern_mismatch(request):
return map(make_local_path, request.param)
def test_translated_pattern_match(pattern_match):
pattern, target = pattern_match
assert translate_pattern(pattern).match(target)
def test_translated_pattern_mismatch(pattern_mismatch):
pattern, target = pattern_mismatch
assert not translate_pattern(pattern).match(target)
class TempDirTestCase:
def setup_method(self, method):
self.temp_dir = tempfile.mkdtemp()
self.old_cwd = os.getcwd()
os.chdir(self.temp_dir)
def teardown_method(self, method):
os.chdir(self.old_cwd)
shutil.rmtree(self.temp_dir)
class TestManifestTest(TempDirTestCase):
def setup_method(self, method):
super().setup_method(method)
f = open(os.path.join(self.temp_dir, 'setup.py'), 'w', encoding="utf-8")
f.write(SETUP_PY)
f.close()
"""
Create a file tree like:
- LICENSE
- README.rst
- testing.rst
- .hidden.rst
- app/
- __init__.py
- a.txt
- b.txt
- c.rst
- static/
- app.js
- app.js.map
- app.css
- app.css.map
"""
for fname in ['README.rst', '.hidden.rst', 'testing.rst', 'LICENSE']:
touch(os.path.join(self.temp_dir, fname))
# Set up the rest of the test package
test_pkg = os.path.join(self.temp_dir, 'app')
os.mkdir(test_pkg)
for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
touch(os.path.join(test_pkg, fname))
# Some compiled front-end assets to include
static = os.path.join(test_pkg, 'static')
os.mkdir(static)
for fname in ['app.js', 'app.js.map', 'app.css', 'app.css.map']:
touch(os.path.join(static, fname))
def make_manifest(self, contents):
"""Write a MANIFEST.in."""
manifest = os.path.join(self.temp_dir, 'MANIFEST.in')
with open(manifest, 'w', encoding="utf-8") as f:
f.write(DALS(contents))
def get_files(self):
"""Run egg_info and get all the files to include, as a set"""
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = egg_info(dist)
cmd.ensure_finalized()
cmd.run()
return set(cmd.filelist.files)
def test_no_manifest(self):
"""Check a missing MANIFEST.in includes only the standard files."""
assert (default_files - set(['MANIFEST.in'])) == self.get_files()
def test_empty_files(self):
"""Check an empty MANIFEST.in includes only the standard files."""
self.make_manifest("")
assert default_files == self.get_files()
def test_include(self):
"""Include extra rst files in the project root."""
self.make_manifest("include *.rst")
files = default_files | set(['testing.rst', '.hidden.rst'])
assert files == self.get_files()
def test_exclude(self):
"""Include everything in app/ except the text files"""
ml = make_local_path
self.make_manifest(
"""
include app/*
exclude app/*.txt
"""
)
files = default_files | set([ml('app/c.rst')])
assert files == self.get_files()
def test_include_multiple(self):
"""Include with multiple patterns."""
ml = make_local_path
self.make_manifest("include app/*.txt app/static/*")
files = default_files | set([
ml('app/a.txt'),
ml('app/b.txt'),
ml('app/static/app.js'),
ml('app/static/app.js.map'),
ml('app/static/app.css'),
ml('app/static/app.css.map'),
])
assert files == self.get_files()
def test_graft(self):
"""Include the whole app/static/ directory."""
ml = make_local_path
self.make_manifest("graft app/static")
files = default_files | set([
ml('app/static/app.js'),
ml('app/static/app.js.map'),
ml('app/static/app.css'),
ml('app/static/app.css.map'),
])
assert files == self.get_files()
def test_graft_glob_syntax(self):
"""Include the whole app/static/ directory."""
ml = make_local_path
self.make_manifest("graft */static")
files = default_files | set([
ml('app/static/app.js'),
ml('app/static/app.js.map'),
ml('app/static/app.css'),
ml('app/static/app.css.map'),
])
assert files == self.get_files()
def test_graft_global_exclude(self):
"""Exclude all *.map files in the project."""
ml = make_local_path
self.make_manifest(
"""
graft app/static
global-exclude *.map
"""
)
files = default_files | set([ml('app/static/app.js'), ml('app/static/app.css')])
assert files == self.get_files()
def test_global_include(self):
"""Include all *.rst, *.js, and *.css files in the whole tree."""
ml = make_local_path
self.make_manifest(
"""
global-include *.rst *.js *.css
"""
)
files = default_files | set([
'.hidden.rst',
'testing.rst',
ml('app/c.rst'),
ml('app/static/app.js'),
ml('app/static/app.css'),
])
assert files == self.get_files()
def test_graft_prune(self):
"""Include all files in app/, except for the whole app/static/ dir."""
ml = make_local_path
self.make_manifest(
"""
graft app
prune app/static
"""
)
files = default_files | set([ml('app/a.txt'), ml('app/b.txt'), ml('app/c.rst')])
assert files == self.get_files()
class TestFileListTest(TempDirTestCase):
"""
A copy of the relevant bits of distutils/tests/test_filelist.py,
to ensure setuptools' version of FileList keeps parity with distutils.
"""
@pytest.fixture(autouse=os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib")
def _compat_record_logs(self, monkeypatch, caplog):
"""Account for stdlib compatibility"""
def _log(_logger, level, msg, args):
exc = sys.exc_info()
rec = logging.LogRecord("distutils", level, "", 0, msg, args, exc)
caplog.records.append(rec)
monkeypatch.setattr(log.Log, "_log", _log)
def get_records(self, caplog, *levels):
return [r for r in caplog.records if r.levelno in levels]
def assertNoWarnings(self, caplog):
assert self.get_records(caplog, log.WARN) == []
caplog.clear()
def assertWarnings(self, caplog):
if IS_PYPY and not caplog.records:
pytest.xfail("caplog checks may not work well in PyPy")
else:
assert len(self.get_records(caplog, log.WARN)) > 0
caplog.clear()
def make_files(self, files):
for file in files:
file = os.path.join(self.temp_dir, file)
dirname, _basename = os.path.split(file)
os.makedirs(dirname, exist_ok=True)
touch(file)
def test_process_template_line(self):
# testing all MANIFEST.in template patterns
file_list = FileList()
ml = make_local_path
# simulated file list
self.make_files([
'foo.tmp',
'ok',
'xo',
'four.txt',
'buildout.cfg',
# filelist does not filter out VCS directories,
# it's sdist that does
ml('.hg/last-message.txt'),
ml('global/one.txt'),
ml('global/two.txt'),
ml('global/files.x'),
ml('global/here.tmp'),
ml('f/o/f.oo'),
ml('dir/graft-one'),
ml('dir/dir2/graft2'),
ml('dir3/ok'),
ml('dir3/sub/ok.txt'),
])
MANIFEST_IN = DALS(
"""\
include ok
include xo
exclude xo
include foo.tmp
include buildout.cfg
global-include *.x
global-include *.txt
global-exclude *.tmp
recursive-include f *.oo
recursive-exclude global *.x
graft dir
prune dir3
"""
)
for line in MANIFEST_IN.split('\n'):
if not line:
continue
file_list.process_template_line(line)
wanted = [
'buildout.cfg',
'four.txt',
'ok',
ml('.hg/last-message.txt'),
ml('dir/graft-one'),
ml('dir/dir2/graft2'),
ml('f/o/f.oo'),
ml('global/one.txt'),
ml('global/two.txt'),
]
file_list.sort()
assert file_list.files == wanted
def test_exclude_pattern(self):
# return False if no match
file_list = FileList()
assert not file_list.exclude_pattern('*.py')
# return True if files match
file_list = FileList()
file_list.files = ['a.py', 'b.py']
assert file_list.exclude_pattern('*.py')
# test excludes
file_list = FileList()
file_list.files = ['a.py', 'a.txt']
file_list.exclude_pattern('*.py')
file_list.sort()
assert file_list.files == ['a.txt']
def test_include_pattern(self):
# return False if no match
file_list = FileList()
self.make_files([])
assert not file_list.include_pattern('*.py')
# return True if files match
file_list = FileList()
self.make_files(['a.py', 'b.txt'])
assert file_list.include_pattern('*.py')
# test * matches all files
file_list = FileList()
self.make_files(['a.py', 'b.txt'])
file_list.include_pattern('*')
file_list.sort()
assert file_list.files == ['a.py', 'b.txt']
def test_process_template_line_invalid(self):
# invalid lines
file_list = FileList()
for action in (
'include',
'exclude',
'global-include',
'global-exclude',
'recursive-include',
'recursive-exclude',
'graft',
'prune',
'blarg',
):
with pytest.raises(DistutilsTemplateError):
file_list.process_template_line(action)
def test_include(self, caplog):
caplog.set_level(logging.DEBUG)
ml = make_local_path
# include
file_list = FileList()
self.make_files(['a.py', 'b.txt', ml('d/c.py')])
file_list.process_template_line('include *.py')
file_list.sort()
assert file_list.files == ['a.py']
self.assertNoWarnings(caplog)
file_list.process_template_line('include *.rb')
file_list.sort()
assert file_list.files == ['a.py']
self.assertWarnings(caplog)
def test_exclude(self, caplog):
caplog.set_level(logging.DEBUG)
ml = make_local_path
# exclude
file_list = FileList()
file_list.files = ['a.py', 'b.txt', ml('d/c.py')]
file_list.process_template_line('exclude *.py')
file_list.sort()
assert file_list.files == ['b.txt', ml('d/c.py')]
self.assertNoWarnings(caplog)
file_list.process_template_line('exclude *.rb')
file_list.sort()
assert file_list.files == ['b.txt', ml('d/c.py')]
self.assertWarnings(caplog)
def test_global_include(self, caplog):
caplog.set_level(logging.DEBUG)
ml = make_local_path
# global-include
file_list = FileList()
self.make_files(['a.py', 'b.txt', ml('d/c.py')])
file_list.process_template_line('global-include *.py')
file_list.sort()
assert file_list.files == ['a.py', ml('d/c.py')]
self.assertNoWarnings(caplog)
file_list.process_template_line('global-include *.rb')
file_list.sort()
assert file_list.files == ['a.py', ml('d/c.py')]
self.assertWarnings(caplog)
def test_global_exclude(self, caplog):
caplog.set_level(logging.DEBUG)
ml = make_local_path
# global-exclude
file_list = FileList()
file_list.files = ['a.py', 'b.txt', ml('d/c.py')]
file_list.process_template_line('global-exclude *.py')
file_list.sort()
assert file_list.files == ['b.txt']
self.assertNoWarnings(caplog)
file_list.process_template_line('global-exclude *.rb')
file_list.sort()
assert file_list.files == ['b.txt']
self.assertWarnings(caplog)
def test_recursive_include(self, caplog):
caplog.set_level(logging.DEBUG)
ml = make_local_path
# recursive-include
file_list = FileList()
self.make_files(['a.py', ml('d/b.py'), ml('d/c.txt'), ml('d/d/e.py')])
file_list.process_template_line('recursive-include d *.py')
file_list.sort()
assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
self.assertNoWarnings(caplog)
file_list.process_template_line('recursive-include e *.py')
file_list.sort()
assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
self.assertWarnings(caplog)
def test_recursive_exclude(self, caplog):
caplog.set_level(logging.DEBUG)
ml = make_local_path
# recursive-exclude
file_list = FileList()
file_list.files = ['a.py', ml('d/b.py'), ml('d/c.txt'), ml('d/d/e.py')]
file_list.process_template_line('recursive-exclude d *.py')
file_list.sort()
assert file_list.files == ['a.py', ml('d/c.txt')]
self.assertNoWarnings(caplog)
file_list.process_template_line('recursive-exclude e *.py')
file_list.sort()
assert file_list.files == ['a.py', ml('d/c.txt')]
self.assertWarnings(caplog)
def test_graft(self, caplog):
caplog.set_level(logging.DEBUG)
ml = make_local_path
# graft
file_list = FileList()
self.make_files(['a.py', ml('d/b.py'), ml('d/d/e.py'), ml('f/f.py')])
file_list.process_template_line('graft d')
file_list.sort()
assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
self.assertNoWarnings(caplog)
file_list.process_template_line('graft e')
file_list.sort()
assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
self.assertWarnings(caplog)
def test_prune(self, caplog):
caplog.set_level(logging.DEBUG)
ml = make_local_path
# prune
file_list = FileList()
file_list.files = ['a.py', ml('d/b.py'), ml('d/d/e.py'), ml('f/f.py')]
file_list.process_template_line('prune d')
file_list.sort()
assert file_list.files == ['a.py', ml('f/f.py')]
self.assertNoWarnings(caplog)
file_list.process_template_line('prune e')
file_list.sort()
assert file_list.files == ['a.py', ml('f/f.py')]
self.assertWarnings(caplog)

View File

@@ -0,0 +1,138 @@
import subprocess
import sys
from setuptools._path import paths_on_pythonpath
from . import namespaces
class TestNamespaces:
def test_mixed_site_and_non_site(self, tmpdir):
"""
Installing two packages sharing the same namespace, one installed
to a site dir and the other installed just to a path on PYTHONPATH
should leave the namespace in tact and both packages reachable by
import.
"""
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
site_packages = tmpdir / 'site-packages'
path_packages = tmpdir / 'path-packages'
targets = site_packages, path_packages
# use pip to install to the target directory
install_cmd = [
sys.executable,
'-m',
'pip.__main__',
'install',
str(pkg_A),
'-t',
str(site_packages),
]
subprocess.check_call(install_cmd)
namespaces.make_site_dir(site_packages)
install_cmd = [
sys.executable,
'-m',
'pip.__main__',
'install',
str(pkg_B),
'-t',
str(path_packages),
]
subprocess.check_call(install_cmd)
try_import = [
sys.executable,
'-c',
'import myns.pkgA; import myns.pkgB',
]
with paths_on_pythonpath(map(str, targets)):
subprocess.check_call(try_import)
def test_pkg_resources_import(self, tmpdir):
"""
Ensure that a namespace package doesn't break on import
of pkg_resources.
"""
pkg = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
target = tmpdir / 'packages'
target.mkdir()
install_cmd = [
sys.executable,
'-m',
'pip',
'install',
'-t',
str(target),
str(pkg),
]
with paths_on_pythonpath([str(target)]):
subprocess.check_call(install_cmd)
namespaces.make_site_dir(target)
try_import = [
sys.executable,
'-c',
'import pkg_resources',
]
with paths_on_pythonpath([str(target)]):
subprocess.check_call(try_import)
def test_namespace_package_installed_and_cwd(self, tmpdir):
"""
Installing a namespace packages but also having it in the current
working directory, only one version should take precedence.
"""
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
target = tmpdir / 'packages'
# use pip to install to the target directory
install_cmd = [
sys.executable,
'-m',
'pip.__main__',
'install',
str(pkg_A),
'-t',
str(target),
]
subprocess.check_call(install_cmd)
namespaces.make_site_dir(target)
# ensure that package imports and pkg_resources imports
pkg_resources_imp = [
sys.executable,
'-c',
'import pkg_resources; import myns.pkgA',
]
with paths_on_pythonpath([str(target)]):
subprocess.check_call(pkg_resources_imp, cwd=str(pkg_A))
def test_packages_in_the_same_namespace_installed_and_cwd(self, tmpdir):
"""
Installing one namespace package and also have another in the same
namespace in the current working directory, both of them must be
importable.
"""
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
target = tmpdir / 'packages'
# use pip to install to the target directory
install_cmd = [
sys.executable,
'-m',
'pip.__main__',
'install',
str(pkg_A),
'-t',
str(target),
]
subprocess.check_call(install_cmd)
namespaces.make_site_dir(target)
# ensure that all packages import and pkg_resources imports
pkg_resources_imp = [
sys.executable,
'-c',
'import pkg_resources; import myns.pkgA; import myns.pkgB',
]
with paths_on_pythonpath([str(target)]):
subprocess.check_call(pkg_resources_imp, cwd=str(pkg_B))

View File

@@ -0,0 +1,12 @@
from setuptools import _scripts
class TestWindowsScriptWriter:
def test_header(self):
hdr = _scripts.WindowsScriptWriter.get_header('')
assert hdr.startswith('#!')
assert hdr.endswith('\n')
hdr = hdr.lstrip('#!')
hdr = hdr.rstrip('\n')
# header should not start with an escaped quote
assert not hdr.startswith('\\"')

View File

@@ -0,0 +1,984 @@
"""sdist tests"""
import contextlib
import io
import logging
import os
import pathlib
import sys
import tarfile
import tempfile
import unicodedata
from inspect import cleandoc
from pathlib import Path
from unittest import mock
import jaraco.path
import pytest
from setuptools import Command, SetuptoolsDeprecationWarning
from setuptools._importlib import metadata
from setuptools.command.egg_info import manifest_maker
from setuptools.command.sdist import sdist
from setuptools.dist import Distribution
from setuptools.extension import Extension
from setuptools.tests import fail_on_ascii
from .text import Filenames
import distutils
from distutils.core import run_setup
SETUP_ATTRS = {
'name': 'sdist_test',
'version': '0.0',
'packages': ['sdist_test'],
'package_data': {'sdist_test': ['*.txt']},
'data_files': [("data", [os.path.join("d", "e.dat")])],
}
SETUP_PY = f"""\
from setuptools import setup
setup(**{SETUP_ATTRS!r})
"""
EXTENSION = Extension(
name="sdist_test.f",
sources=[os.path.join("sdist_test", "f.c")],
depends=[os.path.join("sdist_test", "f.h")],
)
EXTENSION_SOURCES = EXTENSION.sources + EXTENSION.depends
@contextlib.contextmanager
def quiet():
old_stdout, old_stderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = io.StringIO(), io.StringIO()
try:
yield
finally:
sys.stdout, sys.stderr = old_stdout, old_stderr
# Convert to POSIX path
def posix(path):
if not isinstance(path, str):
return path.replace(os.sep.encode('ascii'), b'/')
else:
return path.replace(os.sep, '/')
# HFS Plus uses decomposed UTF-8
def decompose(path):
if isinstance(path, str):
return unicodedata.normalize('NFD', path)
try:
path = path.decode('utf-8')
path = unicodedata.normalize('NFD', path)
path = path.encode('utf-8')
except UnicodeError:
pass # Not UTF-8
return path
def read_all_bytes(filename):
with open(filename, 'rb') as fp:
return fp.read()
def latin1_fail():
try:
desc, filename = tempfile.mkstemp(suffix=Filenames.latin_1)
os.close(desc)
os.remove(filename)
except Exception:
return True
fail_on_latin1_encoded_filenames = pytest.mark.xfail(
latin1_fail(),
reason="System does not support latin-1 filenames",
)
skip_under_xdist = pytest.mark.skipif(
"os.environ.get('PYTEST_XDIST_WORKER')",
reason="pytest-dev/pytest-xdist#843",
)
skip_under_stdlib_distutils = pytest.mark.skipif(
not distutils.__package__.startswith('setuptools'),
reason="the test is not supported with stdlib distutils",
)
def touch(path):
open(path, 'wb').close()
return path
def symlink_or_skip_test(src, dst):
try:
os.symlink(src, dst)
except (OSError, NotImplementedError):
pytest.skip("symlink not supported in OS")
return None
return dst
class TestSdistTest:
@pytest.fixture(autouse=True)
def source_dir(self, tmpdir):
tmpdir = tmpdir / "project_root"
tmpdir.mkdir()
(tmpdir / 'setup.py').write_text(SETUP_PY, encoding='utf-8')
# Set up the rest of the test package
test_pkg = tmpdir / 'sdist_test'
test_pkg.mkdir()
data_folder = tmpdir / 'd'
data_folder.mkdir()
# *.rst was not included in package_data, so c.rst should not be
# automatically added to the manifest when not under version control
for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
touch(test_pkg / fname)
touch(data_folder / 'e.dat')
# C sources are not included by default, but they will be,
# if an extension module uses them as sources or depends
for fname in EXTENSION_SOURCES:
touch(tmpdir / fname)
with tmpdir.as_cwd():
yield tmpdir
def assert_package_data_in_manifest(self, cmd):
manifest = cmd.filelist.files
assert os.path.join('sdist_test', 'a.txt') in manifest
assert os.path.join('sdist_test', 'b.txt') in manifest
assert os.path.join('sdist_test', 'c.rst') not in manifest
assert os.path.join('d', 'e.dat') in manifest
def setup_with_extension(self):
setup_attrs = {**SETUP_ATTRS, 'ext_modules': [EXTENSION]}
dist = Distribution(setup_attrs)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
return cmd
def test_package_data_in_sdist(self):
"""Regression test for pull request #4: ensures that files listed in
package_data are included in the manifest even if they're not added to
version control.
"""
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
self.assert_package_data_in_manifest(cmd)
def test_package_data_and_include_package_data_in_sdist(self):
"""
Ensure package_data and include_package_data work
together.
"""
setup_attrs = {**SETUP_ATTRS, 'include_package_data': True}
assert setup_attrs['package_data']
dist = Distribution(setup_attrs)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
self.assert_package_data_in_manifest(cmd)
def test_extension_sources_in_sdist(self):
"""
Ensure that the files listed in Extension.sources and Extension.depends
are automatically included in the manifest.
"""
cmd = self.setup_with_extension()
self.assert_package_data_in_manifest(cmd)
manifest = cmd.filelist.files
for path in EXTENSION_SOURCES:
assert path in manifest
def test_missing_extension_sources(self):
"""
Similar to test_extension_sources_in_sdist but the referenced files don't exist.
Missing files should not be included in distribution (with no error raised).
"""
for path in EXTENSION_SOURCES:
os.remove(path)
cmd = self.setup_with_extension()
self.assert_package_data_in_manifest(cmd)
manifest = cmd.filelist.files
for path in EXTENSION_SOURCES:
assert path not in manifest
def test_symlinked_extension_sources(self):
"""
Similar to test_extension_sources_in_sdist but the referenced files are
instead symbolic links to project-local files. Referenced file paths
should be included. Symlink targets themselves should NOT be included.
"""
symlinked = []
for path in EXTENSION_SOURCES:
base, ext = os.path.splitext(path)
target = base + "_target." + ext
os.rename(path, target)
symlink_or_skip_test(os.path.basename(target), path)
symlinked.append(target)
cmd = self.setup_with_extension()
self.assert_package_data_in_manifest(cmd)
manifest = cmd.filelist.files
for path in EXTENSION_SOURCES:
assert path in manifest
for path in symlinked:
assert path not in manifest
_INVALID_PATHS = {
"must be relative": lambda: (
os.path.abspath(os.path.join("sdist_test", "f.h"))
),
"can't have `..` segments": lambda: (
os.path.join("sdist_test", "..", "sdist_test", "f.h")
),
"doesn't exist": lambda: (
os.path.join("sdist_test", "this_file_does_not_exist.h")
),
"must be inside the project root": lambda: (
symlink_or_skip_test(
touch(os.path.join("..", "outside_of_project_root.h")),
"symlink.h",
)
),
}
@skip_under_stdlib_distutils
@pytest.mark.parametrize("reason", _INVALID_PATHS.keys())
def test_invalid_extension_depends(self, reason, caplog):
"""
Due to backwards compatibility reasons, `Extension.depends` should accept
invalid/weird paths, but then ignore them when building a sdist.
This test verifies that the source distribution is still built
successfully with such paths, but that instead of adding these paths to
the manifest, we emit an informational message, notifying the user that
the invalid path won't be automatically included.
"""
invalid_path = self._INVALID_PATHS[reason]()
extension = Extension(
name="sdist_test.f",
sources=[],
depends=[invalid_path],
)
setup_attrs = {**SETUP_ATTRS, 'ext_modules': [extension]}
dist = Distribution(setup_attrs)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet(), caplog.at_level(logging.INFO):
cmd.run()
self.assert_package_data_in_manifest(cmd)
manifest = cmd.filelist.files
assert invalid_path not in manifest
expected_message = [
message
for (logger, level, message) in caplog.record_tuples
if (
logger == "root" #
and level == logging.INFO #
and invalid_path in message #
)
]
assert len(expected_message) == 1
(expected_message,) = expected_message
assert reason in expected_message
def test_custom_build_py(self):
"""
Ensure projects defining custom build_py don't break
when creating sdists (issue #2849)
"""
from distutils.command.build_py import build_py as OrigBuildPy
using_custom_command_guard = mock.Mock()
class CustomBuildPy(OrigBuildPy):
"""
Some projects have custom commands inheriting from `distutils`
"""
def get_data_files(self):
using_custom_command_guard()
return super().get_data_files()
setup_attrs = {**SETUP_ATTRS, 'include_package_data': True}
assert setup_attrs['package_data']
dist = Distribution(setup_attrs)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# Make sure we use the custom command
cmd.cmdclass = {'build_py': CustomBuildPy}
cmd.distribution.cmdclass = {'build_py': CustomBuildPy}
assert cmd.distribution.get_command_class('build_py') == CustomBuildPy
msg = "setuptools instead of distutils"
with quiet(), pytest.warns(SetuptoolsDeprecationWarning, match=msg):
cmd.run()
using_custom_command_guard.assert_called()
self.assert_package_data_in_manifest(cmd)
def test_setup_py_exists(self):
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'foo.py'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
manifest = cmd.filelist.files
assert 'setup.py' in manifest
def test_setup_py_missing(self):
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'foo.py'
cmd = sdist(dist)
cmd.ensure_finalized()
if os.path.exists("setup.py"):
os.remove("setup.py")
with quiet():
cmd.run()
manifest = cmd.filelist.files
assert 'setup.py' not in manifest
def test_setup_py_excluded(self):
with open("MANIFEST.in", "w", encoding="utf-8") as manifest_file:
manifest_file.write("exclude setup.py")
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'foo.py'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
manifest = cmd.filelist.files
assert 'setup.py' not in manifest
def test_defaults_case_sensitivity(self, source_dir):
"""
Make sure default files (README.*, etc.) are added in a case-sensitive
way to avoid problems with packages built on Windows.
"""
touch(source_dir / 'readme.rst')
touch(source_dir / 'SETUP.cfg')
dist = Distribution(SETUP_ATTRS)
# the extension deliberately capitalized for this test
# to make sure the actual filename (not capitalized) gets added
# to the manifest
dist.script_name = 'setup.PY'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
# lowercase all names so we can test in a
# case-insensitive way to make sure the files
# are not included.
manifest = map(lambda x: x.lower(), cmd.filelist.files)
assert 'readme.rst' not in manifest, manifest
assert 'setup.py' not in manifest, manifest
assert 'setup.cfg' not in manifest, manifest
def test_exclude_dev_only_cache_folders(self, source_dir):
included = {
# Emulate problem in https://github.com/pypa/setuptools/issues/4601
"MANIFEST.in": (
"global-include LICEN[CS]E* COPYING* NOTICE* AUTHORS*\n"
"global-include *.txt\n"
),
# For the sake of being conservative and limiting unforeseen side-effects
# we just exclude dev-only cache folders at the root of the repository:
"test/.venv/lib/python3.9/site-packages/bar-2.dist-info/AUTHORS.rst": "",
"src/.nox/py/lib/python3.12/site-packages/bar-2.dist-info/COPYING.txt": "",
"doc/.tox/default/lib/python3.11/site-packages/foo-4.dist-info/LICENSE": "",
# Let's test against false positives with similarly named files:
".venv-requirements.txt": "",
".tox-coveragerc.txt": "",
".noxy/coveragerc.txt": "",
}
excluded = {
# .tox/.nox/.venv are well-know folders present at the root of Python repos
# and therefore should be excluded
".tox/release/lib/python3.11/site-packages/foo-4.dist-info/LICENSE": "",
".nox/py/lib/python3.12/site-packages/bar-2.dist-info/COPYING.txt": "",
".venv/lib/python3.9/site-packages/bar-2.dist-info/AUTHORS.rst": "",
}
for file, content in {**excluded, **included}.items():
Path(source_dir, file).parent.mkdir(parents=True, exist_ok=True)
Path(source_dir, file).write_text(content, encoding="utf-8")
cmd = self.setup_with_extension()
self.assert_package_data_in_manifest(cmd)
manifest = {f.replace(os.sep, '/') for f in cmd.filelist.files}
for path in excluded:
assert os.path.exists(path)
assert path not in manifest, (path, manifest)
for path in included:
assert os.path.exists(path)
assert path in manifest, (path, manifest)
@fail_on_ascii
def test_manifest_is_written_with_utf8_encoding(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
mm = manifest_maker(dist)
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
os.mkdir('sdist_test.egg-info')
# UTF-8 filename
filename = os.path.join('sdist_test', 'smörbröd.py')
# Must create the file or it will get stripped.
touch(filename)
# Add UTF-8 filename and write manifest
with quiet():
mm.run()
mm.filelist.append(filename)
mm.write_manifest()
contents = read_all_bytes(mm.manifest)
# The manifest should be UTF-8 encoded
u_contents = contents.decode('UTF-8')
# The manifest should contain the UTF-8 filename
assert posix(filename) in u_contents
@fail_on_ascii
def test_write_manifest_allows_utf8_filenames(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
mm = manifest_maker(dist)
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
os.mkdir('sdist_test.egg-info')
filename = os.path.join(b'sdist_test', Filenames.utf_8)
# Must touch the file or risk removal
touch(filename)
# Add filename and write manifest
with quiet():
mm.run()
u_filename = filename.decode('utf-8')
mm.filelist.files.append(u_filename)
# Re-write manifest
mm.write_manifest()
contents = read_all_bytes(mm.manifest)
# The manifest should be UTF-8 encoded
contents.decode('UTF-8')
# The manifest should contain the UTF-8 filename
assert posix(filename) in contents
# The filelist should have been updated as well
assert u_filename in mm.filelist.files
@skip_under_xdist
def test_write_manifest_skips_non_utf8_filenames(self):
"""
Files that cannot be encoded to UTF-8 (specifically, those that
weren't originally successfully decoded and have surrogate
escapes) should be omitted from the manifest.
See https://bitbucket.org/tarek/distribute/issue/303 for history.
"""
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
mm = manifest_maker(dist)
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
os.mkdir('sdist_test.egg-info')
# Latin-1 filename
filename = os.path.join(b'sdist_test', Filenames.latin_1)
# Add filename with surrogates and write manifest
with quiet():
mm.run()
u_filename = filename.decode('utf-8', 'surrogateescape')
mm.filelist.append(u_filename)
# Re-write manifest
mm.write_manifest()
contents = read_all_bytes(mm.manifest)
# The manifest should be UTF-8 encoded
contents.decode('UTF-8')
# The Latin-1 filename should have been skipped
assert posix(filename) not in contents
# The filelist should have been updated as well
assert u_filename not in mm.filelist.files
@fail_on_ascii
def test_manifest_is_read_with_utf8_encoding(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# Create manifest
with quiet():
cmd.run()
# Add UTF-8 filename to manifest
filename = os.path.join(b'sdist_test', Filenames.utf_8)
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
manifest = open(cmd.manifest, 'ab')
manifest.write(b'\n' + filename)
manifest.close()
# The file must exist to be included in the filelist
touch(filename)
# Re-read manifest
cmd.filelist.files = []
with quiet():
cmd.read_manifest()
# The filelist should contain the UTF-8 filename
filename = filename.decode('utf-8')
assert filename in cmd.filelist.files
@fail_on_latin1_encoded_filenames
def test_read_manifest_skips_non_utf8_filenames(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# Create manifest
with quiet():
cmd.run()
# Add Latin-1 filename to manifest
filename = os.path.join(b'sdist_test', Filenames.latin_1)
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
manifest = open(cmd.manifest, 'ab')
manifest.write(b'\n' + filename)
manifest.close()
# The file must exist to be included in the filelist
touch(filename)
# Re-read manifest
cmd.filelist.files = []
with quiet():
cmd.read_manifest()
# The Latin-1 filename should have been skipped
filename = filename.decode('latin-1')
assert filename not in cmd.filelist.files
@fail_on_ascii
@fail_on_latin1_encoded_filenames
def test_sdist_with_utf8_encoded_filename(self):
# Test for #303.
dist = Distribution(self.make_strings(SETUP_ATTRS))
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
filename = os.path.join(b'sdist_test', Filenames.utf_8)
touch(filename)
with quiet():
cmd.run()
if sys.platform == 'darwin':
filename = decompose(filename)
fs_enc = sys.getfilesystemencoding()
if sys.platform == 'win32':
if fs_enc == 'cp1252':
# Python mangles the UTF-8 filename
filename = filename.decode('cp1252')
assert filename in cmd.filelist.files
else:
filename = filename.decode('mbcs')
assert filename in cmd.filelist.files
else:
filename = filename.decode('utf-8')
assert filename in cmd.filelist.files
@classmethod
def make_strings(cls, item):
if isinstance(item, dict):
return {key: cls.make_strings(value) for key, value in item.items()}
if isinstance(item, list):
return list(map(cls.make_strings, item))
return str(item)
@fail_on_latin1_encoded_filenames
@skip_under_xdist
def test_sdist_with_latin1_encoded_filename(self):
# Test for #303.
dist = Distribution(self.make_strings(SETUP_ATTRS))
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# Latin-1 filename
filename = os.path.join(b'sdist_test', Filenames.latin_1)
touch(filename)
assert os.path.isfile(filename)
with quiet():
cmd.run()
# not all windows systems have a default FS encoding of cp1252
if sys.platform == 'win32':
# Latin-1 is similar to Windows-1252 however
# on mbcs filesys it is not in latin-1 encoding
fs_enc = sys.getfilesystemencoding()
if fs_enc != 'mbcs':
fs_enc = 'latin-1'
filename = filename.decode(fs_enc)
assert filename in cmd.filelist.files
else:
# The Latin-1 filename should have been skipped
filename = filename.decode('latin-1')
assert filename not in cmd.filelist.files
_EXAMPLE_DIRECTIVES = {
"setup.cfg - long_description and version": """
[metadata]
name = testing
version = file: src/VERSION.txt
license_files = DOWHATYOUWANT
long_description = file: README.rst, USAGE.rst
""",
"pyproject.toml - static readme/license files and dynamic version": """
[project]
name = "testing"
readme = "USAGE.rst"
license-files = ["DOWHATYOUWANT"]
dynamic = ["version"]
[tool.setuptools.dynamic]
version = {file = ["src/VERSION.txt"]}
""",
"pyproject.toml - directive with str instead of list": """
[project]
name = "testing"
readme = "USAGE.rst"
license-files = ["DOWHATYOUWANT"]
dynamic = ["version"]
[tool.setuptools.dynamic]
version = {file = "src/VERSION.txt"}
""",
"pyproject.toml - deprecated license table with file entry": """
[project]
name = "testing"
readme = "USAGE.rst"
license = {file = "DOWHATYOUWANT"}
dynamic = ["version"]
[tool.setuptools.dynamic]
version = {file = "src/VERSION.txt"}
""",
}
@pytest.mark.parametrize("config", _EXAMPLE_DIRECTIVES.keys())
@pytest.mark.filterwarnings(
"ignore:.project.license. as a TOML table is deprecated"
)
def test_add_files_referenced_by_config_directives(self, source_dir, config):
config_file, _, _ = config.partition(" - ")
config_text = self._EXAMPLE_DIRECTIVES[config]
(source_dir / 'src').mkdir()
(source_dir / 'src/VERSION.txt').write_text("0.42", encoding="utf-8")
(source_dir / 'README.rst').write_text("hello world!", encoding="utf-8")
(source_dir / 'USAGE.rst').write_text("hello world!", encoding="utf-8")
(source_dir / 'DOWHATYOUWANT').write_text("hello world!", encoding="utf-8")
(source_dir / config_file).write_text(config_text, encoding="utf-8")
dist = Distribution({"packages": []})
dist.script_name = 'setup.py'
dist.parse_config_files()
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
assert (
'src/VERSION.txt' in cmd.filelist.files
or 'src\\VERSION.txt' in cmd.filelist.files
)
assert 'USAGE.rst' in cmd.filelist.files
assert 'DOWHATYOUWANT' in cmd.filelist.files
assert '/' not in cmd.filelist.files
assert '\\' not in cmd.filelist.files
def test_pyproject_toml_in_sdist(self, source_dir):
"""
Check if pyproject.toml is included in source distribution if present
"""
touch(source_dir / 'pyproject.toml')
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
manifest = cmd.filelist.files
assert 'pyproject.toml' in manifest
def test_pyproject_toml_excluded(self, source_dir):
"""
Check that pyproject.toml can excluded even if present
"""
touch(source_dir / 'pyproject.toml')
with open('MANIFEST.in', 'w', encoding="utf-8") as mts:
print('exclude pyproject.toml', file=mts)
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
manifest = cmd.filelist.files
assert 'pyproject.toml' not in manifest
def test_build_subcommand_source_files(self, source_dir):
touch(source_dir / '.myfile~')
# Sanity check: without custom commands file list should not be affected
dist = Distribution({**SETUP_ATTRS, "script_name": "setup.py"})
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
manifest = cmd.filelist.files
assert '.myfile~' not in manifest
# Test: custom command should be able to augment file list
dist = Distribution({**SETUP_ATTRS, "script_name": "setup.py"})
build = dist.get_command_obj("build")
build.sub_commands = [*build.sub_commands, ("build_custom", None)]
class build_custom(Command):
def initialize_options(self): ...
def finalize_options(self): ...
def run(self): ...
def get_source_files(self):
return ['.myfile~']
dist.cmdclass.update(build_custom=build_custom)
cmd = sdist(dist)
cmd.use_defaults = True
cmd.ensure_finalized()
with quiet():
cmd.run()
manifest = cmd.filelist.files
assert '.myfile~' in manifest
@pytest.mark.skipif("os.environ.get('SETUPTOOLS_USE_DISTUTILS') == 'stdlib'")
def test_build_base_pathlib(self, source_dir):
"""
Ensure if build_base is a pathlib.Path, the build still succeeds.
"""
dist = Distribution({
**SETUP_ATTRS,
"script_name": "setup.py",
"options": {"build": {"build_base": pathlib.Path('build')}},
})
cmd = sdist(dist)
cmd.ensure_finalized()
with quiet():
cmd.run()
def test_default_revctrl():
"""
When _default_revctrl was removed from the `setuptools.command.sdist`
module in 10.0, it broke some systems which keep an old install of
setuptools (Distribute) around. Those old versions require that the
setuptools package continue to implement that interface, so this
function provides that interface, stubbed. See #320 for details.
This interface must be maintained until Ubuntu 12.04 is no longer
supported (by Setuptools).
"""
(ep,) = metadata.EntryPoints._from_text(
"""
[setuptools.file_finders]
svn_cvs = setuptools.command.sdist:_default_revctrl
"""
)
res = ep.load()
assert hasattr(res, '__iter__')
class TestRegressions:
"""
Can be removed/changed if the project decides to change how it handles symlinks
or external files.
"""
@staticmethod
def files_for_symlink_in_extension_depends(tmp_path, dep_path):
return {
"external": {
"dir": {"file.h": ""},
},
"project": {
"setup.py": cleandoc(
f"""
from setuptools import Extension, setup
setup(
name="myproj",
version="42",
ext_modules=[
Extension(
"hello", sources=["hello.pyx"],
depends=[{dep_path!r}]
)
],
)
"""
),
"hello.pyx": "",
"MANIFEST.in": "global-include *.h",
},
}
@pytest.mark.parametrize(
"dep_path", ("myheaders/dir/file.h", "myheaders/dir/../dir/file.h")
)
def test_symlink_in_extension_depends(self, monkeypatch, tmp_path, dep_path):
# Given a project with a symlinked dir and a "depends" targeting that dir
files = self.files_for_symlink_in_extension_depends(tmp_path, dep_path)
jaraco.path.build(files, prefix=str(tmp_path))
symlink_or_skip_test(tmp_path / "external", tmp_path / "project/myheaders")
# When `sdist` runs, there should be no error
members = run_sdist(monkeypatch, tmp_path / "project")
# and the sdist should contain the symlinked files
for expected in (
"myproj-42/hello.pyx",
"myproj-42/myheaders/dir/file.h",
):
assert expected in members
@staticmethod
def files_for_external_path_in_extension_depends(tmp_path, dep_path):
head, _, tail = dep_path.partition("$tmp_path$/")
dep_path = tmp_path / tail if tail else head
return {
"external": {
"dir": {"file.h": ""},
},
"project": {
"setup.py": cleandoc(
f"""
from setuptools import Extension, setup
setup(
name="myproj",
version="42",
ext_modules=[
Extension(
"hello", sources=["hello.pyx"],
depends=[{str(dep_path)!r}]
)
],
)
"""
),
"hello.pyx": "",
"MANIFEST.in": "global-include *.h",
},
}
@pytest.mark.parametrize(
"dep_path", ("$tmp_path$/external/dir/file.h", "../external/dir/file.h")
)
def test_external_path_in_extension_depends(self, monkeypatch, tmp_path, dep_path):
# Given a project with a "depends" targeting an external dir
files = self.files_for_external_path_in_extension_depends(tmp_path, dep_path)
jaraco.path.build(files, prefix=str(tmp_path))
# When `sdist` runs, there should be no error
members = run_sdist(monkeypatch, tmp_path / "project")
# and the sdist should not contain the external file
for name in members:
assert "file.h" not in name
def run_sdist(monkeypatch, project):
"""Given a project directory, run the sdist and return its contents"""
monkeypatch.chdir(project)
with quiet():
run_setup("setup.py", ["sdist"])
archive = next((project / "dist").glob("*.tar.gz"))
with tarfile.open(str(archive)) as tar:
return set(tar.getnames())
def test_sanity_check_setuptools_own_sdist(setuptools_sdist):
with tarfile.open(setuptools_sdist) as tar:
files = tar.getnames()
# setuptools sdist should not include the .tox folder
tox_files = [name for name in files if ".tox" in name]
assert len(tox_files) == 0, f"not empty {tox_files}"

View File

@@ -0,0 +1,40 @@
import configparser
from setuptools.command import setopt
class TestEdit:
@staticmethod
def parse_config(filename):
parser = configparser.ConfigParser()
with open(filename, encoding='utf-8') as reader:
parser.read_file(reader)
return parser
@staticmethod
def write_text(file, content):
with open(file, 'wb') as strm:
strm.write(content.encode('utf-8'))
def test_utf8_encoding_retained(self, tmpdir):
"""
When editing a file, non-ASCII characters encoded in
UTF-8 should be retained.
"""
config = tmpdir.join('setup.cfg')
self.write_text(str(config), '[names]\njaraco=джарако')
setopt.edit_config(str(config), dict(names=dict(other='yes')))
parser = self.parse_config(str(config))
assert parser.get('names', 'jaraco') == 'джарако'
assert parser.get('names', 'other') == 'yes'
def test_case_retained(self, tmpdir):
"""
When editing a file, case of keys should be retained.
"""
config = tmpdir.join('setup.cfg')
self.write_text(str(config), '[names]\nFoO=bAr')
setopt.edit_config(str(config), dict(names=dict(oTher='yes')))
actual = config.read_text(encoding='ascii')
assert 'FoO' in actual
assert 'oTher' in actual

View File

@@ -0,0 +1,290 @@
"""Tests for the 'setuptools' package"""
import os
import re
import sys
from zipfile import ZipFile
import pytest
from packaging.version import Version
import setuptools
import setuptools.depends as dep
import setuptools.dist
from setuptools.depends import Require
import distutils.cmd
import distutils.core
from distutils.core import Extension
from distutils.errors import DistutilsSetupError
@pytest.fixture(autouse=True)
def isolated_dir(tmpdir_cwd):
return
def makeSetup(**args):
"""Return distribution from 'setup(**args)', without executing commands"""
distutils.core._setup_stop_after = "commandline"
# Don't let system command line leak into tests!
args.setdefault('script_args', ['install'])
try:
return setuptools.setup(**args)
finally:
distutils.core._setup_stop_after = None
needs_bytecode = pytest.mark.skipif(
not hasattr(dep, 'get_module_constant'),
reason="bytecode support not available",
)
class TestDepends:
def testExtractConst(self):
if not hasattr(dep, 'extract_constant'):
# skip on non-bytecode platforms
return
def f1():
global x, y, z
x = "test"
y = z # pyright: ignore[reportUnboundVariable] # Explicitly testing for this runtime issue
fc = f1.__code__
# unrecognized name
assert dep.extract_constant(fc, 'q', -1) is None
# constant assigned
assert dep.extract_constant(fc, 'x', -1) == "test"
# expression assigned
assert dep.extract_constant(fc, 'y', -1) == -1
# recognized name, not assigned
assert dep.extract_constant(fc, 'z', -1) is None
def testFindModule(self):
with pytest.raises(ImportError):
dep.find_module('no-such.-thing')
with pytest.raises(ImportError):
dep.find_module('setuptools.non-existent')
f, _p, _i = dep.find_module('setuptools.tests')
f.close()
@needs_bytecode
def testModuleExtract(self):
from json import __version__
assert dep.get_module_constant('json', '__version__') == __version__
assert dep.get_module_constant('sys', 'version') == sys.version
assert (
dep.get_module_constant('setuptools.tests.test_setuptools', '__doc__')
== __doc__
)
@needs_bytecode
def testRequire(self):
req = Require('Json', '1.0.3', 'json')
assert req.name == 'Json'
assert req.module == 'json'
assert req.requested_version == Version('1.0.3')
assert req.attribute == '__version__'
assert req.full_name() == 'Json-1.0.3'
from json import __version__
assert str(req.get_version()) == __version__
assert req.version_ok('1.0.9')
assert not req.version_ok('0.9.1')
assert not req.version_ok('unknown')
assert req.is_present()
assert req.is_current()
req = Require('Do-what-I-mean', '1.0', 'd-w-i-m')
assert not req.is_present()
assert not req.is_current()
@needs_bytecode
def test_require_present(self):
# In #1896, this test was failing for months with the only
# complaint coming from test runners (not end users).
# TODO: Evaluate if this code is needed at all.
req = Require('Tests', None, 'tests', homepage="http://example.com")
assert req.format is None
assert req.attribute is None
assert req.requested_version is None
assert req.full_name() == 'Tests'
assert req.homepage == 'http://example.com'
from setuptools.tests import __path__
paths = [os.path.dirname(p) for p in __path__]
assert req.is_present(paths)
assert req.is_current(paths)
class TestDistro:
def setup_method(self, method):
self.e1 = Extension('bar.ext', ['bar.c'])
self.e2 = Extension('c.y', ['y.c'])
self.dist = makeSetup(
packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
py_modules=['b.d', 'x'],
ext_modules=(self.e1, self.e2),
package_dir={},
)
def testDistroType(self):
assert isinstance(self.dist, setuptools.dist.Distribution)
def testExcludePackage(self):
self.dist.exclude_package('a')
assert self.dist.packages == ['b', 'c']
self.dist.exclude_package('b')
assert self.dist.packages == ['c']
assert self.dist.py_modules == ['x']
assert self.dist.ext_modules == [self.e1, self.e2]
self.dist.exclude_package('c')
assert self.dist.packages == []
assert self.dist.py_modules == ['x']
assert self.dist.ext_modules == [self.e1]
# test removals from unspecified options
makeSetup().exclude_package('x')
def testIncludeExclude(self):
# remove an extension
self.dist.exclude(ext_modules=[self.e1])
assert self.dist.ext_modules == [self.e2]
# add it back in
self.dist.include(ext_modules=[self.e1])
assert self.dist.ext_modules == [self.e2, self.e1]
# should not add duplicate
self.dist.include(ext_modules=[self.e1])
assert self.dist.ext_modules == [self.e2, self.e1]
def testExcludePackages(self):
self.dist.exclude(packages=['c', 'b', 'a'])
assert self.dist.packages == []
assert self.dist.py_modules == ['x']
assert self.dist.ext_modules == [self.e1]
def testEmpty(self):
dist = makeSetup()
dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
dist = makeSetup()
dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
def testContents(self):
assert self.dist.has_contents_for('a')
self.dist.exclude_package('a')
assert not self.dist.has_contents_for('a')
assert self.dist.has_contents_for('b')
self.dist.exclude_package('b')
assert not self.dist.has_contents_for('b')
assert self.dist.has_contents_for('c')
self.dist.exclude_package('c')
assert not self.dist.has_contents_for('c')
def testInvalidIncludeExclude(self):
with pytest.raises(DistutilsSetupError):
self.dist.include(nonexistent_option='x')
with pytest.raises(DistutilsSetupError):
self.dist.exclude(nonexistent_option='x')
with pytest.raises(DistutilsSetupError):
self.dist.include(packages={'x': 'y'})
with pytest.raises(DistutilsSetupError):
self.dist.exclude(packages={'x': 'y'})
with pytest.raises(DistutilsSetupError):
self.dist.include(ext_modules={'x': 'y'})
with pytest.raises(DistutilsSetupError):
self.dist.exclude(ext_modules={'x': 'y'})
with pytest.raises(DistutilsSetupError):
self.dist.include(package_dir=['q'])
with pytest.raises(DistutilsSetupError):
self.dist.exclude(package_dir=['q'])
@pytest.fixture
def example_source(tmpdir):
tmpdir.mkdir('foo')
(tmpdir / 'foo/bar.py').write('')
(tmpdir / 'readme.txt').write('')
return tmpdir
def test_findall(example_source):
found = list(setuptools.findall(str(example_source)))
expected = ['readme.txt', 'foo/bar.py']
expected = [example_source.join(fn) for fn in expected]
assert found == expected
def test_findall_curdir(example_source):
with example_source.as_cwd():
found = list(setuptools.findall())
expected = ['readme.txt', os.path.join('foo', 'bar.py')]
assert found == expected
@pytest.fixture
def can_symlink(tmpdir):
"""
Skip if cannot create a symbolic link
"""
link_fn = 'link'
target_fn = 'target'
try:
os.symlink(target_fn, link_fn)
except (OSError, NotImplementedError, AttributeError):
pytest.skip("Cannot create symbolic links")
os.remove(link_fn)
@pytest.mark.usefixtures("can_symlink")
def test_findall_missing_symlink(tmpdir):
with tmpdir.as_cwd():
os.symlink('foo', 'bar')
found = list(setuptools.findall())
assert found == []
@pytest.mark.xfail(reason="unable to exclude tests; #4475 #3260")
def test_its_own_wheel_does_not_contain_tests(setuptools_wheel):
with ZipFile(setuptools_wheel) as zipfile:
contents = [f.replace(os.sep, '/') for f in zipfile.namelist()]
for member in contents:
assert '/tests/' not in member
def test_wheel_includes_cli_scripts(setuptools_wheel):
with ZipFile(setuptools_wheel) as zipfile:
contents = [f.replace(os.sep, '/') for f in zipfile.namelist()]
assert any('cli-64.exe' in member for member in contents)
def test_wheel_includes_vendored_metadata(setuptools_wheel):
with ZipFile(setuptools_wheel) as zipfile:
contents = [f.replace(os.sep, '/') for f in zipfile.namelist()]
assert any(
re.search(r'_vendor/.*\.dist-info/METADATA', member) for member in contents
)

View File

@@ -0,0 +1,23 @@
import stat
import sys
from unittest.mock import Mock
from setuptools import _shutil
def test_rmtree_readonly(monkeypatch, tmp_path):
"""Verify onerr works as expected"""
tmp_dir = tmp_path / "with_readonly"
tmp_dir.mkdir()
some_file = tmp_dir.joinpath("file.txt")
some_file.touch()
some_file.chmod(stat.S_IREAD)
expected_count = 1 if sys.platform.startswith("win") else 0
chmod_fn = Mock(wraps=_shutil.attempt_chmod_verbose)
monkeypatch.setattr(_shutil, "attempt_chmod_verbose", chmod_fn)
_shutil.rmtree(tmp_dir)
assert chmod_fn.call_count == expected_count
assert not tmp_dir.is_dir()

View File

@@ -0,0 +1,10 @@
from setuptools import unicode_utils
def test_filesys_decode_fs_encoding_is_None(monkeypatch):
"""
Test filesys_decode does not raise TypeError when
getfilesystemencoding returns None.
"""
monkeypatch.setattr('sys.getfilesystemencoding', lambda: None)
unicode_utils.filesys_decode(b'test')

View File

@@ -0,0 +1,113 @@
import os
import subprocess
import sys
from urllib.error import URLError
from urllib.request import urlopen
import pytest
@pytest.fixture(autouse=True)
def pytest_virtualenv_works(venv):
"""
pytest_virtualenv may not work. if it doesn't, skip these
tests. See #1284.
"""
venv_prefix = venv.run(["python", "-c", "import sys; print(sys.prefix)"]).strip()
if venv_prefix == sys.prefix:
pytest.skip("virtualenv is broken (see pypa/setuptools#1284)")
def test_clean_env_install(venv_without_setuptools, setuptools_wheel):
"""
Check setuptools can be installed in a clean environment.
"""
cmd = ["python", "-m", "pip", "install", str(setuptools_wheel)]
venv_without_setuptools.run(cmd)
def access_pypi():
# Detect if tests are being run without connectivity
if not os.environ.get('NETWORK_REQUIRED', False): # pragma: nocover
try:
urlopen('https://pypi.org', timeout=1)
except URLError:
# No network, disable most of these tests
return False
return True
@pytest.mark.skipif(
'platform.python_implementation() == "PyPy"',
reason="https://github.com/pypa/setuptools/pull/2865#issuecomment-965834995",
)
@pytest.mark.skipif(not access_pypi(), reason="no network")
# ^-- Even when it is not necessary to install a different version of `pip`
# the build process will still try to download `wheel`, see #3147 and #2986.
@pytest.mark.parametrize(
'pip_version',
[
None,
pytest.param(
'pip<20.1',
marks=pytest.mark.xfail(
'sys.version_info >= (3, 12)',
reason="pip 23.1.2 required for Python 3.12 and later",
),
),
pytest.param(
'pip<21',
marks=pytest.mark.xfail(
'sys.version_info >= (3, 12)',
reason="pip 23.1.2 required for Python 3.12 and later",
),
),
pytest.param(
'pip<22',
marks=pytest.mark.xfail(
'sys.version_info >= (3, 12)',
reason="pip 23.1.2 required for Python 3.12 and later",
),
),
pytest.param(
'pip<23',
marks=pytest.mark.xfail(
'sys.version_info >= (3, 12)',
reason="pip 23.1.2 required for Python 3.12 and later",
),
),
pytest.param(
'https://github.com/pypa/pip/archive/main.zip',
marks=pytest.mark.xfail(reason='#2975'),
),
],
)
def test_pip_upgrade_from_source(
pip_version, venv_without_setuptools, setuptools_wheel, setuptools_sdist
):
"""
Check pip can upgrade setuptools from source.
"""
# Install pip/wheel, in a venv without setuptools (as it
# should not be needed for bootstrapping from source)
venv = venv_without_setuptools
venv.run(["pip", "install", "-U", "wheel"])
if pip_version is not None:
venv.run(["python", "-m", "pip", "install", "-U", pip_version, "--retries=1"])
with pytest.raises(subprocess.CalledProcessError):
# Meta-test to make sure setuptools is not installed
venv.run(["python", "-c", "import setuptools"])
# Then install from wheel.
venv.run(["pip", "install", str(setuptools_wheel)])
# And finally try to upgrade from source.
venv.run(["pip", "install", "--no-cache-dir", "--upgrade", str(setuptools_sdist)])
def test_no_missing_dependencies(bare_venv, request):
"""
Quick and dirty test to ensure all external dependencies are vendored.
"""
setuptools_dir = request.config.rootdir
bare_venv.run(['python', 'setup.py', '--help'], cwd=setuptools_dir)

View File

@@ -0,0 +1,106 @@
from inspect import cleandoc
import pytest
from setuptools.warnings import SetuptoolsDeprecationWarning, SetuptoolsWarning
_EXAMPLES = {
"default": dict(
args=("Hello {x}", "\n\t{target} {v:.1f}"),
kwargs={"x": 5, "v": 3, "target": "World"},
expected="""
Hello 5
!!
********************************************************************************
World 3.0
********************************************************************************
!!
""",
),
"futue_due_date": dict(
args=("Summary", "Lorem ipsum"),
kwargs={"due_date": (9999, 11, 22)},
expected="""
Summary
!!
********************************************************************************
Lorem ipsum
By 9999-Nov-22, you need to update your project and remove deprecated calls
or your builds will no longer be supported.
********************************************************************************
!!
""",
),
"past_due_date_with_docs": dict(
args=("Summary", "Lorem ipsum"),
kwargs={"due_date": (2000, 11, 22), "see_docs": "some_page.html"},
expected="""
Summary
!!
********************************************************************************
Lorem ipsum
This deprecation is overdue, please update your project and remove deprecated
calls to avoid build errors in the future.
See https://setuptools.pypa.io/en/latest/some_page.html for details.
********************************************************************************
!!
""",
),
}
@pytest.mark.parametrize("example_name", _EXAMPLES.keys())
def test_formatting(monkeypatch, example_name):
"""
It should automatically handle indentation, interpolation and things like due date.
"""
args = _EXAMPLES[example_name]["args"]
kwargs = _EXAMPLES[example_name]["kwargs"]
expected = _EXAMPLES[example_name]["expected"]
monkeypatch.setenv("SETUPTOOLS_ENFORCE_DEPRECATION", "false")
with pytest.warns(SetuptoolsWarning) as warn_info:
SetuptoolsWarning.emit(*args, **kwargs)
assert _get_message(warn_info) == cleandoc(expected)
def test_due_date_enforcement(monkeypatch):
class _MyDeprecation(SetuptoolsDeprecationWarning):
_SUMMARY = "Summary"
_DETAILS = "Lorem ipsum"
_DUE_DATE = (2000, 11, 22)
_SEE_DOCS = "some_page.html"
monkeypatch.setenv("SETUPTOOLS_ENFORCE_DEPRECATION", "true")
with pytest.raises(SetuptoolsDeprecationWarning) as exc_info:
_MyDeprecation.emit()
expected = """
Summary
!!
********************************************************************************
Lorem ipsum
This deprecation is overdue, please update your project and remove deprecated
calls to avoid build errors in the future.
See https://setuptools.pypa.io/en/latest/some_page.html for details.
********************************************************************************
!!
"""
assert str(exc_info.value) == cleandoc(expected)
def _get_message(warn_info):
return next(warn.message.args[0] for warn in warn_info)

View File

@@ -0,0 +1,690 @@
"""wheel tests"""
from __future__ import annotations
import contextlib
import glob
import inspect
import os
import pathlib
import stat
import subprocess
import sys
import sysconfig
import zipfile
from typing import Any
import pytest
from jaraco import path
from packaging.tags import parse_tag
from setuptools._importlib import metadata
from setuptools.wheel import Wheel
from .contexts import tempdir
from .textwrap import DALS
from distutils.sysconfig import get_config_var
from distutils.util import get_platform
WHEEL_INFO_TESTS = (
('invalid.whl', ValueError),
(
'simplewheel-2.0-1-py2.py3-none-any.whl',
{
'project_name': 'simplewheel',
'version': '2.0',
'build': '1',
'py_version': 'py2.py3',
'abi': 'none',
'platform': 'any',
},
),
(
'simple.dist-0.1-py2.py3-none-any.whl',
{
'project_name': 'simple.dist',
'version': '0.1',
'build': None,
'py_version': 'py2.py3',
'abi': 'none',
'platform': 'any',
},
),
(
'example_pkg_a-1-py3-none-any.whl',
{
'project_name': 'example_pkg_a',
'version': '1',
'build': None,
'py_version': 'py3',
'abi': 'none',
'platform': 'any',
},
),
(
'PyQt5-5.9-5.9.1-cp35.cp36.cp37-abi3-manylinux1_x86_64.whl',
{
'project_name': 'PyQt5',
'version': '5.9',
'build': '5.9.1',
'py_version': 'cp35.cp36.cp37',
'abi': 'abi3',
'platform': 'manylinux1_x86_64',
},
),
)
@pytest.mark.parametrize(
('filename', 'info'), WHEEL_INFO_TESTS, ids=[t[0] for t in WHEEL_INFO_TESTS]
)
def test_wheel_info(filename, info):
if inspect.isclass(info):
with pytest.raises(info):
Wheel(filename)
return
w = Wheel(filename)
assert {k: getattr(w, k) for k in info.keys()} == info
@contextlib.contextmanager
def build_wheel(extra_file_defs=None, **kwargs):
file_defs = {
'setup.py': (
DALS(
"""
# -*- coding: utf-8 -*-
from setuptools import setup
import setuptools
setup(**%r)
"""
)
% kwargs
).encode('utf-8'),
}
if extra_file_defs:
file_defs.update(extra_file_defs)
with tempdir() as source_dir:
path.build(file_defs, source_dir)
subprocess.check_call(
(sys.executable, 'setup.py', '-q', 'bdist_wheel'), cwd=source_dir
)
yield glob.glob(os.path.join(source_dir, 'dist', '*.whl'))[0]
def tree_set(root):
contents = set()
for dirpath, dirnames, filenames in os.walk(root):
for filename in filenames:
contents.add(os.path.join(os.path.relpath(dirpath, root), filename))
return contents
def flatten_tree(tree):
"""Flatten nested dicts and lists into a full list of paths"""
output = set()
for node, contents in tree.items():
if isinstance(contents, dict):
contents = flatten_tree(contents)
for elem in contents:
if isinstance(elem, dict):
output |= {os.path.join(node, val) for val in flatten_tree(elem)}
else:
output.add(os.path.join(node, elem))
return output
def format_install_tree(tree):
return {
x.format(
py_version=sysconfig.get_python_version(),
platform=get_platform(),
shlib_ext=get_config_var('EXT_SUFFIX') or get_config_var('SO'),
)
for x in tree
}
def _check_wheel_install(
filename, install_dir, install_tree_includes, project_name, version, requires_txt
):
w = Wheel(filename)
egg_path = os.path.join(install_dir, w.egg_name())
w.install_as_egg(egg_path)
if install_tree_includes is not None:
install_tree = format_install_tree(install_tree_includes)
exp = tree_set(install_dir)
assert install_tree.issubset(exp), install_tree - exp
(dist,) = metadata.Distribution.discover(path=[egg_path])
# pyright is nitpicky; fine to assume dist.metadata.__getitem__ will fail or return None
# (https://github.com/pypa/setuptools/pull/5006#issuecomment-2894774288)
assert dist.metadata['Name'] == project_name # pyright: ignore # noqa: PGH003
assert dist.metadata['Version'] == version # pyright: ignore # noqa: PGH003
assert dist.read_text('requires.txt') == requires_txt
class Record:
def __init__(self, id, **kwargs):
self._id = id
self._fields = kwargs
def __repr__(self) -> str:
return f'{self._id}(**{self._fields!r})'
# Using Any to avoid possible type union issues later in test
# making a TypedDict is not worth in a test and anonymous/inline TypedDict are experimental
# https://github.com/python/mypy/issues/9884
WHEEL_INSTALL_TESTS: tuple[dict[str, Any], ...] = (
dict(
id='basic',
file_defs={'foo': {'__init__.py': ''}},
setup_kwargs=dict(
packages=['foo'],
),
install_tree=flatten_tree({
'foo-1.0-py{py_version}.egg': {
'EGG-INFO': ['PKG-INFO', 'RECORD', 'WHEEL', 'top_level.txt'],
'foo': ['__init__.py'],
}
}),
),
dict(
id='utf-8',
setup_kwargs=dict(
description='Description accentuée',
),
),
dict(
id='data',
file_defs={
'data.txt': DALS(
"""
Some data...
"""
),
},
setup_kwargs=dict(
data_files=[('data_dir', ['data.txt'])],
),
install_tree=flatten_tree({
'foo-1.0-py{py_version}.egg': {
'EGG-INFO': ['PKG-INFO', 'RECORD', 'WHEEL', 'top_level.txt'],
'data_dir': ['data.txt'],
}
}),
),
dict(
id='extension',
file_defs={
'extension.c': DALS(
"""
#include "Python.h"
#if PY_MAJOR_VERSION >= 3
static struct PyModuleDef moduledef = {
PyModuleDef_HEAD_INIT,
"extension",
NULL,
0,
NULL,
NULL,
NULL,
NULL,
NULL
};
#define INITERROR return NULL
PyMODINIT_FUNC PyInit_extension(void)
#else
#define INITERROR return
void initextension(void)
#endif
{
#if PY_MAJOR_VERSION >= 3
PyObject *module = PyModule_Create(&moduledef);
#else
PyObject *module = Py_InitModule("extension", NULL);
#endif
if (module == NULL)
INITERROR;
#if PY_MAJOR_VERSION >= 3
return module;
#endif
}
"""
),
},
setup_kwargs=dict(
ext_modules=[
Record(
'setuptools.Extension', name='extension', sources=['extension.c']
)
],
),
install_tree=flatten_tree({
'foo-1.0-py{py_version}-{platform}.egg': [
'extension{shlib_ext}',
{
'EGG-INFO': [
'PKG-INFO',
'RECORD',
'WHEEL',
'top_level.txt',
]
},
]
}),
),
dict(
id='header',
file_defs={
'header.h': DALS(
"""
"""
),
},
setup_kwargs=dict(
headers=['header.h'],
),
install_tree=flatten_tree({
'foo-1.0-py{py_version}.egg': [
'header.h',
{
'EGG-INFO': [
'PKG-INFO',
'RECORD',
'WHEEL',
'top_level.txt',
]
},
]
}),
),
dict(
id='script',
file_defs={
'script.py': DALS(
"""
#/usr/bin/python
print('hello world!')
"""
),
'script.sh': DALS(
"""
#/bin/sh
echo 'hello world!'
"""
),
},
setup_kwargs=dict(
scripts=['script.py', 'script.sh'],
),
install_tree=flatten_tree({
'foo-1.0-py{py_version}.egg': {
'EGG-INFO': [
'PKG-INFO',
'RECORD',
'WHEEL',
'top_level.txt',
{'scripts': ['script.py', 'script.sh']},
]
}
}),
),
dict(
id='requires1',
install_requires='foobar==2.0',
install_tree=flatten_tree({
'foo-1.0-py{py_version}.egg': {
'EGG-INFO': [
'PKG-INFO',
'RECORD',
'WHEEL',
'requires.txt',
'top_level.txt',
]
}
}),
requires_txt=DALS(
"""
foobar==2.0
"""
),
),
dict(
id='requires2',
install_requires=f"""
bar
foo<=2.0; {sys.platform!r} in sys_platform
""",
requires_txt=DALS(
"""
bar
foo<=2.0
"""
),
),
dict(
id='requires3',
install_requires=f"""
bar; {sys.platform!r} != sys_platform
""",
),
dict(
id='requires4',
install_requires="""
foo
""",
extras_require={
'extra': 'foobar>3',
},
requires_txt=DALS(
"""
foo
[extra]
foobar>3
"""
),
),
dict(
id='requires5',
extras_require={
'extra': f'foobar; {sys.platform!r} != sys_platform',
},
requires_txt='\n'
+ DALS(
"""
[extra]
"""
),
),
dict(
id='requires_ensure_order',
install_requires="""
foo
bar
baz
qux
""",
extras_require={
'extra': """
foobar>3
barbaz>4
bazqux>5
quxzap>6
""",
},
requires_txt=DALS(
"""
foo
bar
baz
qux
[extra]
foobar>3
barbaz>4
bazqux>5
quxzap>6
"""
),
),
dict(
id='namespace_package',
file_defs={
'foo': {
'bar': {'__init__.py': ''},
},
},
setup_kwargs=dict(
namespace_packages=['foo'],
packages=['foo.bar'],
),
install_tree=flatten_tree({
'foo-1.0-py{py_version}.egg': [
'foo-1.0-py{py_version}-nspkg.pth',
{
'EGG-INFO': [
'PKG-INFO',
'RECORD',
'WHEEL',
'namespace_packages.txt',
'top_level.txt',
]
},
{
'foo': [
'__init__.py',
{'bar': ['__init__.py']},
]
},
]
}),
),
dict(
id='empty_namespace_package',
file_defs={
'foobar': {
'__init__.py': (
"__import__('pkg_resources').declare_namespace(__name__)"
)
},
},
setup_kwargs=dict(
namespace_packages=['foobar'],
packages=['foobar'],
),
install_tree=flatten_tree({
'foo-1.0-py{py_version}.egg': [
'foo-1.0-py{py_version}-nspkg.pth',
{
'EGG-INFO': [
'PKG-INFO',
'RECORD',
'WHEEL',
'namespace_packages.txt',
'top_level.txt',
]
},
{
'foobar': [
'__init__.py',
]
},
]
}),
),
dict(
id='data_in_package',
file_defs={
'foo': {
'__init__.py': '',
'data_dir': {
'data.txt': DALS(
"""
Some data...
"""
),
},
}
},
setup_kwargs=dict(
packages=['foo'],
data_files=[('foo/data_dir', ['foo/data_dir/data.txt'])],
),
install_tree=flatten_tree({
'foo-1.0-py{py_version}.egg': {
'EGG-INFO': [
'PKG-INFO',
'RECORD',
'WHEEL',
'top_level.txt',
],
'foo': [
'__init__.py',
{
'data_dir': [
'data.txt',
]
},
],
}
}),
),
)
@pytest.mark.parametrize(
'params',
WHEEL_INSTALL_TESTS,
ids=[params['id'] for params in WHEEL_INSTALL_TESTS],
)
def test_wheel_install(params):
project_name = params.get('name', 'foo')
version = params.get('version', '1.0')
install_requires = params.get('install_requires', [])
extras_require = params.get('extras_require', {})
requires_txt = params.get('requires_txt', None)
install_tree = params.get('install_tree')
file_defs = params.get('file_defs', {})
setup_kwargs = params.get('setup_kwargs', {})
with (
build_wheel(
name=project_name,
version=version,
install_requires=install_requires,
extras_require=extras_require,
extra_file_defs=file_defs,
**setup_kwargs,
) as filename,
tempdir() as install_dir,
):
_check_wheel_install(
filename, install_dir, install_tree, project_name, version, requires_txt
)
def test_wheel_no_dist_dir():
project_name = 'nodistinfo'
version = '1.0'
wheel_name = f'{project_name}-{version}-py2.py3-none-any.whl'
with tempdir() as source_dir:
wheel_path = os.path.join(source_dir, wheel_name)
# create an empty zip file
zipfile.ZipFile(wheel_path, 'w').close()
with tempdir() as install_dir:
with pytest.raises(ValueError):
_check_wheel_install(
wheel_path, install_dir, None, project_name, version, None
)
def test_wheel_is_compatible(monkeypatch):
def sys_tags():
return {
(t.interpreter, t.abi, t.platform)
for t in parse_tag('cp36-cp36m-manylinux1_x86_64')
}
monkeypatch.setattr('setuptools.wheel._get_supported_tags', sys_tags)
assert Wheel('onnxruntime-0.1.2-cp36-cp36m-manylinux1_x86_64.whl').is_compatible()
def test_wheel_mode():
@contextlib.contextmanager
def build_wheel(extra_file_defs=None, **kwargs):
file_defs = {
'setup.py': (
DALS(
"""
# -*- coding: utf-8 -*-
from setuptools import setup
import setuptools
setup(**%r)
"""
)
% kwargs
).encode('utf-8'),
}
if extra_file_defs:
file_defs.update(extra_file_defs)
with tempdir() as source_dir:
path.build(file_defs, source_dir)
runsh = pathlib.Path(source_dir) / "script.sh"
os.chmod(runsh, 0o777)
subprocess.check_call(
(sys.executable, 'setup.py', '-q', 'bdist_wheel'), cwd=source_dir
)
yield glob.glob(os.path.join(source_dir, 'dist', '*.whl'))[0]
params = dict(
id='script',
file_defs={
'script.py': DALS(
"""
#/usr/bin/python
print('hello world!')
"""
),
'script.sh': DALS(
"""
#/bin/sh
echo 'hello world!'
"""
),
},
setup_kwargs=dict(
scripts=['script.py', 'script.sh'],
),
install_tree=flatten_tree({
'foo-1.0-py{py_version}.egg': {
'EGG-INFO': [
'PKG-INFO',
'RECORD',
'WHEEL',
'top_level.txt',
{'scripts': ['script.py', 'script.sh']},
]
}
}),
)
project_name = params.get('name', 'foo')
version = params.get('version', '1.0')
install_tree = params.get('install_tree')
file_defs = params.get('file_defs', {})
setup_kwargs = params.get('setup_kwargs', {})
with (
build_wheel(
name=project_name,
version=version,
install_requires=[],
extras_require={},
extra_file_defs=file_defs,
**setup_kwargs,
) as filename,
tempdir() as install_dir,
):
_check_wheel_install(
filename, install_dir, install_tree, project_name, version, None
)
w = Wheel(filename)
base = pathlib.Path(install_dir) / w.egg_name()
script_sh = base / "EGG-INFO" / "scripts" / "script.sh"
assert script_sh.exists()
if sys.platform != 'win32':
# Editable file mode has no effect on Windows
assert oct(stat.S_IMODE(script_sh.stat().st_mode)) == "0o777"

View File

@@ -0,0 +1,258 @@
"""
Python Script Wrapper for Windows
=================================
setuptools includes wrappers for Python scripts that allows them to be
executed like regular windows programs. There are 2 wrappers, one
for command-line programs, cli.exe, and one for graphical programs,
gui.exe. These programs are almost identical, function pretty much
the same way, and are generated from the same source file. The
wrapper programs are used by copying them to the directory containing
the script they are to wrap and with the same name as the script they
are to wrap.
"""
import pathlib
import platform
import subprocess
import sys
import textwrap
import pytest
from setuptools._importlib import resources
pytestmark = pytest.mark.skipif(sys.platform != 'win32', reason="Windows only")
class WrapperTester:
@classmethod
def prep_script(cls, template):
python_exe = subprocess.list2cmdline([sys.executable])
return template % locals()
@classmethod
def create_script(cls, tmpdir):
"""
Create a simple script, foo-script.py
Note that the script starts with a Unix-style '#!' line saying which
Python executable to run. The wrapper will use this line to find the
correct Python executable.
"""
script = cls.prep_script(cls.script_tmpl)
with (tmpdir / cls.script_name).open('w') as f:
f.write(script)
# also copy cli.exe to the sample directory
with (tmpdir / cls.wrapper_name).open('wb') as f:
w = resources.files('setuptools').joinpath(cls.wrapper_source).read_bytes()
f.write(w)
def win_launcher_exe(prefix):
"""A simple routine to select launcher script based on platform."""
assert prefix in ('cli', 'gui')
if platform.machine() == "ARM64":
return f"{prefix}-arm64.exe"
else:
return f"{prefix}-32.exe"
class TestCLI(WrapperTester):
script_name = 'foo-script.py'
wrapper_name = 'foo.exe'
wrapper_source = win_launcher_exe('cli')
script_tmpl = textwrap.dedent(
"""
#!%(python_exe)s
import sys
input = repr(sys.stdin.read())
print(sys.argv[0][-14:])
print(sys.argv[1:])
print(input)
if __debug__:
print('non-optimized')
"""
).lstrip()
def test_basic(self, tmpdir):
"""
When the copy of cli.exe, foo.exe in this example, runs, it examines
the path name it was run with and computes a Python script path name
by removing the '.exe' suffix and adding the '-script.py' suffix. (For
GUI programs, the suffix '-script.pyw' is added.) This is why we
named out script the way we did. Now we can run out script by running
the wrapper:
This example was a little pathological in that it exercised windows
(MS C runtime) quoting rules:
- Strings containing spaces are surrounded by double quotes.
- Double quotes in strings need to be escaped by preceding them with
back slashes.
- One or more backslashes preceding double quotes need to be escaped
by preceding each of them with back slashes.
"""
self.create_script(tmpdir)
cmd = [
str(tmpdir / 'foo.exe'),
'arg1',
'arg 2',
'arg "2\\"',
'arg 4\\',
'arg5 a\\\\b',
]
proc = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
text=True,
encoding="utf-8",
)
stdout, _stderr = proc.communicate('hello\nworld\n')
actual = stdout.replace('\r\n', '\n')
expected = textwrap.dedent(
r"""
\foo-script.py
['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b']
'hello\nworld\n'
non-optimized
"""
).lstrip()
assert actual == expected
def test_symlink(self, tmpdir):
"""
Ensure that symlink for the foo.exe is working correctly.
"""
script_dir = tmpdir / "script_dir"
script_dir.mkdir()
self.create_script(script_dir)
symlink = pathlib.Path(tmpdir / "foo.exe")
symlink.symlink_to(script_dir / "foo.exe")
cmd = [
str(tmpdir / 'foo.exe'),
'arg1',
'arg 2',
'arg "2\\"',
'arg 4\\',
'arg5 a\\\\b',
]
proc = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
text=True,
encoding="utf-8",
)
stdout, _stderr = proc.communicate('hello\nworld\n')
actual = stdout.replace('\r\n', '\n')
expected = textwrap.dedent(
r"""
\foo-script.py
['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b']
'hello\nworld\n'
non-optimized
"""
).lstrip()
assert actual == expected
def test_with_options(self, tmpdir):
"""
Specifying Python Command-line Options
--------------------------------------
You can specify a single argument on the '#!' line. This can be used
to specify Python options like -O, to run in optimized mode or -i
to start the interactive interpreter. You can combine multiple
options as usual. For example, to run in optimized mode and
enter the interpreter after running the script, you could use -Oi:
"""
self.create_script(tmpdir)
tmpl = textwrap.dedent(
"""
#!%(python_exe)s -Oi
import sys
input = repr(sys.stdin.read())
print(sys.argv[0][-14:])
print(sys.argv[1:])
print(input)
if __debug__:
print('non-optimized')
sys.ps1 = '---'
"""
).lstrip()
with (tmpdir / 'foo-script.py').open('w') as f:
f.write(self.prep_script(tmpl))
cmd = [str(tmpdir / 'foo.exe')]
proc = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
encoding="utf-8",
)
stdout, _stderr = proc.communicate()
actual = stdout.replace('\r\n', '\n')
expected = textwrap.dedent(
r"""
\foo-script.py
[]
''
---
"""
).lstrip()
assert actual == expected
class TestGUI(WrapperTester):
"""
Testing the GUI Version
-----------------------
"""
script_name = 'bar-script.pyw'
wrapper_source = win_launcher_exe('gui')
wrapper_name = 'bar.exe'
script_tmpl = textwrap.dedent(
"""
#!%(python_exe)s
import sys
f = open(sys.argv[1], 'wb')
bytes_written = f.write(repr(sys.argv[2]).encode('utf-8'))
f.close()
"""
).strip()
def test_basic(self, tmpdir):
"""Test the GUI version with the simple script, bar-script.py"""
self.create_script(tmpdir)
cmd = [
str(tmpdir / 'bar.exe'),
str(tmpdir / 'test_output.txt'),
'Test Argument',
]
proc = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
encoding="utf-8",
)
stdout, stderr = proc.communicate()
assert not stdout
assert not stderr
with (tmpdir / 'test_output.txt').open('rb') as f_out:
actual = f_out.read().decode('ascii')
assert actual == repr('Test Argument')

View File

@@ -0,0 +1,4 @@
class Filenames:
unicode = 'smörbröd.py'
latin_1 = unicode.encode('latin-1')
utf_8 = unicode.encode('utf-8')

View File

@@ -0,0 +1,6 @@
import textwrap
def DALS(s):
"dedent and left-strip"
return textwrap.dedent(s).lstrip()