fix
This commit is contained in:
@@ -0,0 +1,42 @@
|
||||
"""
|
||||
Test suite for distutils.
|
||||
|
||||
Tests for the command classes in the distutils.command package are
|
||||
included in distutils.tests as well, instead of using a separate
|
||||
distutils.command.tests package, since command identification is done
|
||||
by import rather than matching pre-defined names.
|
||||
"""
|
||||
|
||||
import shutil
|
||||
from typing import Sequence
|
||||
|
||||
|
||||
def missing_compiler_executable(cmd_names: Sequence[str] = []): # pragma: no cover
|
||||
"""Check if the compiler components used to build the interpreter exist.
|
||||
|
||||
Check for the existence of the compiler executables whose names are listed
|
||||
in 'cmd_names' or all the compiler executables when 'cmd_names' is empty
|
||||
and return the first missing executable or None when none is found
|
||||
missing.
|
||||
|
||||
"""
|
||||
from distutils import ccompiler, errors, sysconfig
|
||||
|
||||
compiler = ccompiler.new_compiler()
|
||||
sysconfig.customize_compiler(compiler)
|
||||
if compiler.compiler_type == "msvc":
|
||||
# MSVC has no executables, so check whether initialization succeeds
|
||||
try:
|
||||
compiler.initialize()
|
||||
except errors.DistutilsPlatformError:
|
||||
return "msvc"
|
||||
for name in compiler.executables:
|
||||
if cmd_names and name not in cmd_names:
|
||||
continue
|
||||
cmd = getattr(compiler, name)
|
||||
if cmd_names:
|
||||
assert cmd is not None, f"the '{name}' executable is not configured"
|
||||
elif not cmd:
|
||||
continue
|
||||
if shutil.which(cmd[0]) is None:
|
||||
return cmd[0]
|
||||
@@ -0,0 +1,50 @@
|
||||
# flake8: noqa
|
||||
|
||||
import contextlib
|
||||
import builtins
|
||||
import sys
|
||||
|
||||
from test.support import requires_zlib
|
||||
import test.support
|
||||
|
||||
|
||||
ModuleNotFoundError = getattr(builtins, 'ModuleNotFoundError', ImportError)
|
||||
|
||||
try:
|
||||
from test.support.warnings_helper import check_warnings
|
||||
except (ModuleNotFoundError, ImportError):
|
||||
from test.support import check_warnings
|
||||
|
||||
|
||||
try:
|
||||
from test.support.os_helper import (
|
||||
rmtree,
|
||||
EnvironmentVarGuard,
|
||||
unlink,
|
||||
skip_unless_symlink,
|
||||
temp_dir,
|
||||
)
|
||||
except (ModuleNotFoundError, ImportError):
|
||||
from test.support import (
|
||||
rmtree,
|
||||
EnvironmentVarGuard,
|
||||
unlink,
|
||||
skip_unless_symlink,
|
||||
temp_dir,
|
||||
)
|
||||
|
||||
|
||||
try:
|
||||
from test.support.import_helper import (
|
||||
DirsOnSysPath,
|
||||
CleanImport,
|
||||
)
|
||||
except (ModuleNotFoundError, ImportError):
|
||||
from test.support import (
|
||||
DirsOnSysPath,
|
||||
CleanImport,
|
||||
)
|
||||
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
requires_zlib = lambda: test.support.requires_zlib
|
||||
@@ -0,0 +1,134 @@
|
||||
"""Support code for distutils test cases."""
|
||||
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
import sysconfig
|
||||
import tempfile
|
||||
from distutils.core import Distribution
|
||||
|
||||
import pytest
|
||||
from more_itertools import always_iterable
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('distutils_managed_tempdir')
|
||||
class TempdirManager:
|
||||
"""
|
||||
Mix-in class that handles temporary directories for test cases.
|
||||
"""
|
||||
|
||||
def mkdtemp(self):
|
||||
"""Create a temporary directory that will be cleaned up.
|
||||
|
||||
Returns the path of the directory.
|
||||
"""
|
||||
d = tempfile.mkdtemp()
|
||||
self.tempdirs.append(d)
|
||||
return d
|
||||
|
||||
def write_file(self, path, content='xxx'):
|
||||
"""Writes a file in the given path.
|
||||
|
||||
path can be a string or a sequence.
|
||||
"""
|
||||
pathlib.Path(*always_iterable(path)).write_text(content, encoding='utf-8')
|
||||
|
||||
def create_dist(self, pkg_name='foo', **kw):
|
||||
"""Will generate a test environment.
|
||||
|
||||
This function creates:
|
||||
- a Distribution instance using keywords
|
||||
- a temporary directory with a package structure
|
||||
|
||||
It returns the package directory and the distribution
|
||||
instance.
|
||||
"""
|
||||
tmp_dir = self.mkdtemp()
|
||||
pkg_dir = os.path.join(tmp_dir, pkg_name)
|
||||
os.mkdir(pkg_dir)
|
||||
dist = Distribution(attrs=kw)
|
||||
|
||||
return pkg_dir, dist
|
||||
|
||||
|
||||
class DummyCommand:
|
||||
"""Class to store options for retrieval via set_undefined_options()."""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
vars(self).update(kwargs)
|
||||
|
||||
def ensure_finalized(self):
|
||||
pass
|
||||
|
||||
|
||||
def copy_xxmodule_c(directory):
|
||||
"""Helper for tests that need the xxmodule.c source file.
|
||||
|
||||
Example use:
|
||||
|
||||
def test_compile(self):
|
||||
copy_xxmodule_c(self.tmpdir)
|
||||
self.assertIn('xxmodule.c', os.listdir(self.tmpdir))
|
||||
|
||||
If the source file can be found, it will be copied to *directory*. If not,
|
||||
the test will be skipped. Errors during copy are not caught.
|
||||
"""
|
||||
shutil.copy(_get_xxmodule_path(), os.path.join(directory, 'xxmodule.c'))
|
||||
|
||||
|
||||
def _get_xxmodule_path():
|
||||
source_name = 'xxmodule.c' if sys.version_info > (3, 9) else 'xxmodule-3.8.c'
|
||||
return os.path.join(os.path.dirname(__file__), source_name)
|
||||
|
||||
|
||||
def fixup_build_ext(cmd):
|
||||
"""Function needed to make build_ext tests pass.
|
||||
|
||||
When Python was built with --enable-shared on Unix, -L. is not enough to
|
||||
find libpython<blah>.so, because regrtest runs in a tempdir, not in the
|
||||
source directory where the .so lives.
|
||||
|
||||
When Python was built with in debug mode on Windows, build_ext commands
|
||||
need their debug attribute set, and it is not done automatically for
|
||||
some reason.
|
||||
|
||||
This function handles both of these things. Example use:
|
||||
|
||||
cmd = build_ext(dist)
|
||||
support.fixup_build_ext(cmd)
|
||||
cmd.ensure_finalized()
|
||||
|
||||
Unlike most other Unix platforms, Mac OS X embeds absolute paths
|
||||
to shared libraries into executables, so the fixup is not needed there.
|
||||
"""
|
||||
if os.name == 'nt':
|
||||
cmd.debug = sys.executable.endswith('_d.exe')
|
||||
elif sysconfig.get_config_var('Py_ENABLE_SHARED'):
|
||||
# To further add to the shared builds fun on Unix, we can't just add
|
||||
# library_dirs to the Extension() instance because that doesn't get
|
||||
# plumbed through to the final compiler command.
|
||||
runshared = sysconfig.get_config_var('RUNSHARED')
|
||||
if runshared is None:
|
||||
cmd.library_dirs = ['.']
|
||||
else:
|
||||
if sys.platform == 'darwin':
|
||||
cmd.library_dirs = []
|
||||
else:
|
||||
name, equals, value = runshared.partition('=')
|
||||
cmd.library_dirs = [d for d in value.split(os.pathsep) if d]
|
||||
|
||||
|
||||
def combine_markers(cls):
|
||||
"""
|
||||
pytest will honor markers as found on the class, but when
|
||||
markers are on multiple subclasses, only one appears. Use
|
||||
this decorator to combine those markers.
|
||||
"""
|
||||
cls.pytestmark = [
|
||||
mark
|
||||
for base in itertools.chain([cls], cls.__bases__)
|
||||
for mark in getattr(base, 'pytestmark', [])
|
||||
]
|
||||
return cls
|
||||
@@ -0,0 +1,353 @@
|
||||
"""Tests for distutils.archive_util."""
|
||||
|
||||
import functools
|
||||
import operator
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import tarfile
|
||||
from distutils import archive_util
|
||||
from distutils.archive_util import (
|
||||
ARCHIVE_FORMATS,
|
||||
check_archive_formats,
|
||||
make_archive,
|
||||
make_tarball,
|
||||
make_zipfile,
|
||||
)
|
||||
from distutils.spawn import spawn
|
||||
from distutils.tests import support
|
||||
from os.path import splitdrive
|
||||
|
||||
import path
|
||||
import pytest
|
||||
from test.support import patch
|
||||
|
||||
from .unix_compat import UID_0_SUPPORT, grp, pwd, require_uid_0, require_unix_id
|
||||
|
||||
|
||||
def can_fs_encode(filename):
|
||||
"""
|
||||
Return True if the filename can be saved in the file system.
|
||||
"""
|
||||
if os.path.supports_unicode_filenames:
|
||||
return True
|
||||
try:
|
||||
filename.encode(sys.getfilesystemencoding())
|
||||
except UnicodeEncodeError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def all_equal(values):
|
||||
return functools.reduce(operator.eq, values)
|
||||
|
||||
|
||||
def same_drive(*paths):
|
||||
return all_equal(pathlib.Path(path).drive for path in paths)
|
||||
|
||||
|
||||
class ArchiveUtilTestCase(support.TempdirManager):
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
def test_make_tarball(self, name='archive'):
|
||||
# creating something to tar
|
||||
tmpdir = self._create_files()
|
||||
self._make_tarball(tmpdir, name, '.tar.gz')
|
||||
# trying an uncompressed one
|
||||
self._make_tarball(tmpdir, name, '.tar', compress=None)
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
def test_make_tarball_gzip(self):
|
||||
tmpdir = self._create_files()
|
||||
self._make_tarball(tmpdir, 'archive', '.tar.gz', compress='gzip')
|
||||
|
||||
def test_make_tarball_bzip2(self):
|
||||
pytest.importorskip('bz2')
|
||||
tmpdir = self._create_files()
|
||||
self._make_tarball(tmpdir, 'archive', '.tar.bz2', compress='bzip2')
|
||||
|
||||
def test_make_tarball_xz(self):
|
||||
pytest.importorskip('lzma')
|
||||
tmpdir = self._create_files()
|
||||
self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz')
|
||||
|
||||
@pytest.mark.skipif("not can_fs_encode('årchiv')")
|
||||
def test_make_tarball_latin1(self):
|
||||
"""
|
||||
Mirror test_make_tarball, except filename contains latin characters.
|
||||
"""
|
||||
self.test_make_tarball('årchiv') # note this isn't a real word
|
||||
|
||||
@pytest.mark.skipif("not can_fs_encode('のアーカイブ')")
|
||||
def test_make_tarball_extended(self):
|
||||
"""
|
||||
Mirror test_make_tarball, except filename contains extended
|
||||
characters outside the latin charset.
|
||||
"""
|
||||
self.test_make_tarball('のアーカイブ') # japanese for archive
|
||||
|
||||
def _make_tarball(self, tmpdir, target_name, suffix, **kwargs):
|
||||
tmpdir2 = self.mkdtemp()
|
||||
if same_drive(tmpdir, tmpdir2):
|
||||
pytest.skip("source and target should be on same drive")
|
||||
|
||||
base_name = os.path.join(tmpdir2, target_name)
|
||||
|
||||
# working with relative paths to avoid tar warnings
|
||||
with path.Path(tmpdir):
|
||||
make_tarball(splitdrive(base_name)[1], 'dist', **kwargs)
|
||||
|
||||
# check if the compressed tarball was created
|
||||
tarball = base_name + suffix
|
||||
assert os.path.exists(tarball)
|
||||
assert self._tarinfo(tarball) == self._created_files
|
||||
|
||||
def _tarinfo(self, path):
|
||||
tar = tarfile.open(path)
|
||||
try:
|
||||
names = tar.getnames()
|
||||
names.sort()
|
||||
return names
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
_zip_created_files = [
|
||||
'dist/',
|
||||
'dist/file1',
|
||||
'dist/file2',
|
||||
'dist/sub/',
|
||||
'dist/sub/file3',
|
||||
'dist/sub2/',
|
||||
]
|
||||
_created_files = [p.rstrip('/') for p in _zip_created_files]
|
||||
|
||||
def _create_files(self):
|
||||
# creating something to tar
|
||||
tmpdir = self.mkdtemp()
|
||||
dist = os.path.join(tmpdir, 'dist')
|
||||
os.mkdir(dist)
|
||||
self.write_file([dist, 'file1'], 'xxx')
|
||||
self.write_file([dist, 'file2'], 'xxx')
|
||||
os.mkdir(os.path.join(dist, 'sub'))
|
||||
self.write_file([dist, 'sub', 'file3'], 'xxx')
|
||||
os.mkdir(os.path.join(dist, 'sub2'))
|
||||
return tmpdir
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
@pytest.mark.skipif("not (shutil.which('tar') and shutil.which('gzip'))")
|
||||
def test_tarfile_vs_tar(self):
|
||||
tmpdir = self._create_files()
|
||||
tmpdir2 = self.mkdtemp()
|
||||
base_name = os.path.join(tmpdir2, 'archive')
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(tmpdir)
|
||||
try:
|
||||
make_tarball(base_name, 'dist')
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
|
||||
# check if the compressed tarball was created
|
||||
tarball = base_name + '.tar.gz'
|
||||
assert os.path.exists(tarball)
|
||||
|
||||
# now create another tarball using `tar`
|
||||
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
|
||||
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
|
||||
gzip_cmd = ['gzip', '-f', '-9', 'archive2.tar']
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(tmpdir)
|
||||
try:
|
||||
spawn(tar_cmd)
|
||||
spawn(gzip_cmd)
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
|
||||
assert os.path.exists(tarball2)
|
||||
# let's compare both tarballs
|
||||
assert self._tarinfo(tarball) == self._created_files
|
||||
assert self._tarinfo(tarball2) == self._created_files
|
||||
|
||||
# trying an uncompressed one
|
||||
base_name = os.path.join(tmpdir2, 'archive')
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(tmpdir)
|
||||
try:
|
||||
make_tarball(base_name, 'dist', compress=None)
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
tarball = base_name + '.tar'
|
||||
assert os.path.exists(tarball)
|
||||
|
||||
# now for a dry_run
|
||||
base_name = os.path.join(tmpdir2, 'archive')
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(tmpdir)
|
||||
try:
|
||||
make_tarball(base_name, 'dist', compress=None, dry_run=True)
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
tarball = base_name + '.tar'
|
||||
assert os.path.exists(tarball)
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
def test_make_zipfile(self):
|
||||
zipfile = pytest.importorskip('zipfile')
|
||||
# creating something to tar
|
||||
tmpdir = self._create_files()
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
with path.Path(tmpdir):
|
||||
make_zipfile(base_name, 'dist')
|
||||
|
||||
# check if the compressed tarball was created
|
||||
tarball = base_name + '.zip'
|
||||
assert os.path.exists(tarball)
|
||||
with zipfile.ZipFile(tarball) as zf:
|
||||
assert sorted(zf.namelist()) == self._zip_created_files
|
||||
|
||||
def test_make_zipfile_no_zlib(self):
|
||||
zipfile = pytest.importorskip('zipfile')
|
||||
patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError
|
||||
|
||||
called = []
|
||||
zipfile_class = zipfile.ZipFile
|
||||
|
||||
def fake_zipfile(*a, **kw):
|
||||
if kw.get('compression', None) == zipfile.ZIP_STORED:
|
||||
called.append((a, kw))
|
||||
return zipfile_class(*a, **kw)
|
||||
|
||||
patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile)
|
||||
|
||||
# create something to tar and compress
|
||||
tmpdir = self._create_files()
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
with path.Path(tmpdir):
|
||||
make_zipfile(base_name, 'dist')
|
||||
|
||||
tarball = base_name + '.zip'
|
||||
assert called == [((tarball, "w"), {'compression': zipfile.ZIP_STORED})]
|
||||
assert os.path.exists(tarball)
|
||||
with zipfile.ZipFile(tarball) as zf:
|
||||
assert sorted(zf.namelist()) == self._zip_created_files
|
||||
|
||||
def test_check_archive_formats(self):
|
||||
assert check_archive_formats(['gztar', 'xxx', 'zip']) == 'xxx'
|
||||
assert (
|
||||
check_archive_formats(['gztar', 'bztar', 'xztar', 'ztar', 'tar', 'zip'])
|
||||
is None
|
||||
)
|
||||
|
||||
def test_make_archive(self):
|
||||
tmpdir = self.mkdtemp()
|
||||
base_name = os.path.join(tmpdir, 'archive')
|
||||
with pytest.raises(ValueError):
|
||||
make_archive(base_name, 'xxx')
|
||||
|
||||
def test_make_archive_cwd(self):
|
||||
current_dir = os.getcwd()
|
||||
|
||||
def _breaks(*args, **kw):
|
||||
raise RuntimeError()
|
||||
|
||||
ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file')
|
||||
try:
|
||||
try:
|
||||
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
|
||||
except Exception:
|
||||
pass
|
||||
assert os.getcwd() == current_dir
|
||||
finally:
|
||||
ARCHIVE_FORMATS.pop('xxx')
|
||||
|
||||
def test_make_archive_tar(self):
|
||||
base_dir = self._create_files()
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
res = make_archive(base_name, 'tar', base_dir, 'dist')
|
||||
assert os.path.exists(res)
|
||||
assert os.path.basename(res) == 'archive.tar'
|
||||
assert self._tarinfo(res) == self._created_files
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
def test_make_archive_gztar(self):
|
||||
base_dir = self._create_files()
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
res = make_archive(base_name, 'gztar', base_dir, 'dist')
|
||||
assert os.path.exists(res)
|
||||
assert os.path.basename(res) == 'archive.tar.gz'
|
||||
assert self._tarinfo(res) == self._created_files
|
||||
|
||||
def test_make_archive_bztar(self):
|
||||
pytest.importorskip('bz2')
|
||||
base_dir = self._create_files()
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
res = make_archive(base_name, 'bztar', base_dir, 'dist')
|
||||
assert os.path.exists(res)
|
||||
assert os.path.basename(res) == 'archive.tar.bz2'
|
||||
assert self._tarinfo(res) == self._created_files
|
||||
|
||||
def test_make_archive_xztar(self):
|
||||
pytest.importorskip('lzma')
|
||||
base_dir = self._create_files()
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
res = make_archive(base_name, 'xztar', base_dir, 'dist')
|
||||
assert os.path.exists(res)
|
||||
assert os.path.basename(res) == 'archive.tar.xz'
|
||||
assert self._tarinfo(res) == self._created_files
|
||||
|
||||
def test_make_archive_owner_group(self):
|
||||
# testing make_archive with owner and group, with various combinations
|
||||
# this works even if there's not gid/uid support
|
||||
if UID_0_SUPPORT:
|
||||
group = grp.getgrgid(0)[0]
|
||||
owner = pwd.getpwuid(0)[0]
|
||||
else:
|
||||
group = owner = 'root'
|
||||
|
||||
base_dir = self._create_files()
|
||||
root_dir = self.mkdtemp()
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
res = make_archive(
|
||||
base_name, 'zip', root_dir, base_dir, owner=owner, group=group
|
||||
)
|
||||
assert os.path.exists(res)
|
||||
|
||||
res = make_archive(base_name, 'zip', root_dir, base_dir)
|
||||
assert os.path.exists(res)
|
||||
|
||||
res = make_archive(
|
||||
base_name, 'tar', root_dir, base_dir, owner=owner, group=group
|
||||
)
|
||||
assert os.path.exists(res)
|
||||
|
||||
res = make_archive(
|
||||
base_name, 'tar', root_dir, base_dir, owner='kjhkjhkjg', group='oihohoh'
|
||||
)
|
||||
assert os.path.exists(res)
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
@require_unix_id
|
||||
@require_uid_0
|
||||
def test_tarfile_root_owner(self):
|
||||
tmpdir = self._create_files()
|
||||
base_name = os.path.join(self.mkdtemp(), 'archive')
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(tmpdir)
|
||||
group = grp.getgrgid(0)[0]
|
||||
owner = pwd.getpwuid(0)[0]
|
||||
try:
|
||||
archive_name = make_tarball(
|
||||
base_name, 'dist', compress=None, owner=owner, group=group
|
||||
)
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
|
||||
# check if the compressed tarball was created
|
||||
assert os.path.exists(archive_name)
|
||||
|
||||
# now checks the rights
|
||||
archive = tarfile.open(archive_name)
|
||||
try:
|
||||
for member in archive.getmembers():
|
||||
assert member.uid == 0
|
||||
assert member.gid == 0
|
||||
finally:
|
||||
archive.close()
|
||||
@@ -0,0 +1,47 @@
|
||||
"""Tests for distutils.command.bdist."""
|
||||
|
||||
from distutils.command.bdist import bdist
|
||||
from distutils.tests import support
|
||||
|
||||
|
||||
class TestBuild(support.TempdirManager):
|
||||
def test_formats(self):
|
||||
# let's create a command and make sure
|
||||
# we can set the format
|
||||
dist = self.create_dist()[1]
|
||||
cmd = bdist(dist)
|
||||
cmd.formats = ['gztar']
|
||||
cmd.ensure_finalized()
|
||||
assert cmd.formats == ['gztar']
|
||||
|
||||
# what formats does bdist offer?
|
||||
formats = [
|
||||
'bztar',
|
||||
'gztar',
|
||||
'rpm',
|
||||
'tar',
|
||||
'xztar',
|
||||
'zip',
|
||||
'ztar',
|
||||
]
|
||||
found = sorted(cmd.format_commands)
|
||||
assert found == formats
|
||||
|
||||
def test_skip_build(self):
|
||||
# bug #10946: bdist --skip-build should trickle down to subcommands
|
||||
dist = self.create_dist()[1]
|
||||
cmd = bdist(dist)
|
||||
cmd.skip_build = True
|
||||
cmd.ensure_finalized()
|
||||
dist.command_obj['bdist'] = cmd
|
||||
|
||||
names = [
|
||||
'bdist_dumb',
|
||||
] # bdist_rpm does not support --skip-build
|
||||
|
||||
for name in names:
|
||||
subcmd = cmd.get_finalized_command(name)
|
||||
if getattr(subcmd, '_unsupported', False):
|
||||
# command is not supported on this build
|
||||
continue
|
||||
assert subcmd.skip_build, f'{name} should take --skip-build from bdist'
|
||||
@@ -0,0 +1,78 @@
|
||||
"""Tests for distutils.command.bdist_dumb."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import zipfile
|
||||
from distutils.command.bdist_dumb import bdist_dumb
|
||||
from distutils.core import Distribution
|
||||
from distutils.tests import support
|
||||
|
||||
import pytest
|
||||
|
||||
SETUP_PY = """\
|
||||
from distutils.core import setup
|
||||
import foo
|
||||
|
||||
setup(name='foo', version='0.1', py_modules=['foo'],
|
||||
url='xxx', author='xxx', author_email='xxx')
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@support.combine_markers
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
@pytest.mark.usefixtures('save_argv')
|
||||
@pytest.mark.usefixtures('save_cwd')
|
||||
class TestBuildDumb(
|
||||
support.TempdirManager,
|
||||
):
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
def test_simple_built(self):
|
||||
# let's create a simple package
|
||||
tmp_dir = self.mkdtemp()
|
||||
pkg_dir = os.path.join(tmp_dir, 'foo')
|
||||
os.mkdir(pkg_dir)
|
||||
self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
|
||||
self.write_file((pkg_dir, 'foo.py'), '#')
|
||||
self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
|
||||
self.write_file((pkg_dir, 'README'), '')
|
||||
|
||||
dist = Distribution({
|
||||
'name': 'foo',
|
||||
'version': '0.1',
|
||||
'py_modules': ['foo'],
|
||||
'url': 'xxx',
|
||||
'author': 'xxx',
|
||||
'author_email': 'xxx',
|
||||
})
|
||||
dist.script_name = 'setup.py'
|
||||
os.chdir(pkg_dir)
|
||||
|
||||
sys.argv = ['setup.py']
|
||||
cmd = bdist_dumb(dist)
|
||||
|
||||
# so the output is the same no matter
|
||||
# what is the platform
|
||||
cmd.format = 'zip'
|
||||
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
# see what we have
|
||||
dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
|
||||
base = f"{dist.get_fullname()}.{cmd.plat_name}.zip"
|
||||
|
||||
assert dist_created == [base]
|
||||
|
||||
# now let's check what we have in the zip file
|
||||
fp = zipfile.ZipFile(os.path.join('dist', base))
|
||||
try:
|
||||
contents = fp.namelist()
|
||||
finally:
|
||||
fp.close()
|
||||
|
||||
contents = sorted(filter(None, map(os.path.basename, contents)))
|
||||
wanted = ['foo-0.1-py{}.{}.egg-info'.format(*sys.version_info[:2]), 'foo.py']
|
||||
if not sys.dont_write_bytecode:
|
||||
wanted.append(f'foo.{sys.implementation.cache_tag}.pyc')
|
||||
assert contents == sorted(wanted)
|
||||
@@ -0,0 +1,128 @@
|
||||
"""Tests for distutils.command.bdist_rpm."""
|
||||
|
||||
import os
|
||||
import shutil # noqa: F401
|
||||
import sys
|
||||
from distutils.command.bdist_rpm import bdist_rpm
|
||||
from distutils.core import Distribution
|
||||
from distutils.tests import support
|
||||
|
||||
import pytest
|
||||
|
||||
from .compat.py38 import requires_zlib
|
||||
|
||||
SETUP_PY = """\
|
||||
from distutils.core import setup
|
||||
import foo
|
||||
|
||||
setup(name='foo', version='0.1', py_modules=['foo'],
|
||||
url='xxx', author='xxx', author_email='xxx')
|
||||
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def sys_executable_encodable():
|
||||
try:
|
||||
sys.executable.encode('UTF-8')
|
||||
except UnicodeEncodeError:
|
||||
pytest.skip("sys.executable is not encodable to UTF-8")
|
||||
|
||||
|
||||
mac_woes = pytest.mark.skipif(
|
||||
"not sys.platform.startswith('linux')",
|
||||
reason='spurious sdtout/stderr output under macOS',
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
@pytest.mark.usefixtures('save_argv')
|
||||
@pytest.mark.usefixtures('save_cwd')
|
||||
class TestBuildRpm(
|
||||
support.TempdirManager,
|
||||
):
|
||||
@mac_woes
|
||||
@requires_zlib()
|
||||
@pytest.mark.skipif("not shutil.which('rpm')")
|
||||
@pytest.mark.skipif("not shutil.which('rpmbuild')")
|
||||
def test_quiet(self):
|
||||
# let's create a package
|
||||
tmp_dir = self.mkdtemp()
|
||||
os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation
|
||||
pkg_dir = os.path.join(tmp_dir, 'foo')
|
||||
os.mkdir(pkg_dir)
|
||||
self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
|
||||
self.write_file((pkg_dir, 'foo.py'), '#')
|
||||
self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
|
||||
self.write_file((pkg_dir, 'README'), '')
|
||||
|
||||
dist = Distribution({
|
||||
'name': 'foo',
|
||||
'version': '0.1',
|
||||
'py_modules': ['foo'],
|
||||
'url': 'xxx',
|
||||
'author': 'xxx',
|
||||
'author_email': 'xxx',
|
||||
})
|
||||
dist.script_name = 'setup.py'
|
||||
os.chdir(pkg_dir)
|
||||
|
||||
sys.argv = ['setup.py']
|
||||
cmd = bdist_rpm(dist)
|
||||
cmd.fix_python = True
|
||||
|
||||
# running in quiet mode
|
||||
cmd.quiet = True
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
|
||||
assert 'foo-0.1-1.noarch.rpm' in dist_created
|
||||
|
||||
# bug #2945: upload ignores bdist_rpm files
|
||||
assert ('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm') in dist.dist_files
|
||||
assert ('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm') in dist.dist_files
|
||||
|
||||
@mac_woes
|
||||
@requires_zlib()
|
||||
# https://bugs.python.org/issue1533164
|
||||
@pytest.mark.skipif("not shutil.which('rpm')")
|
||||
@pytest.mark.skipif("not shutil.which('rpmbuild')")
|
||||
def test_no_optimize_flag(self):
|
||||
# let's create a package that breaks bdist_rpm
|
||||
tmp_dir = self.mkdtemp()
|
||||
os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation
|
||||
pkg_dir = os.path.join(tmp_dir, 'foo')
|
||||
os.mkdir(pkg_dir)
|
||||
self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
|
||||
self.write_file((pkg_dir, 'foo.py'), '#')
|
||||
self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
|
||||
self.write_file((pkg_dir, 'README'), '')
|
||||
|
||||
dist = Distribution({
|
||||
'name': 'foo',
|
||||
'version': '0.1',
|
||||
'py_modules': ['foo'],
|
||||
'url': 'xxx',
|
||||
'author': 'xxx',
|
||||
'author_email': 'xxx',
|
||||
})
|
||||
dist.script_name = 'setup.py'
|
||||
os.chdir(pkg_dir)
|
||||
|
||||
sys.argv = ['setup.py']
|
||||
cmd = bdist_rpm(dist)
|
||||
cmd.fix_python = True
|
||||
|
||||
cmd.quiet = True
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
|
||||
assert 'foo-0.1-1.noarch.rpm' in dist_created
|
||||
|
||||
# bug #2945: upload ignores bdist_rpm files
|
||||
assert ('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm') in dist.dist_files
|
||||
assert ('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm') in dist.dist_files
|
||||
|
||||
os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm'))
|
||||
@@ -0,0 +1,47 @@
|
||||
"""Tests for distutils.command.build."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from distutils.command.build import build
|
||||
from distutils.tests import support
|
||||
from sysconfig import get_config_var, get_platform
|
||||
|
||||
|
||||
class TestBuild(support.TempdirManager):
|
||||
def test_finalize_options(self):
|
||||
pkg_dir, dist = self.create_dist()
|
||||
cmd = build(dist)
|
||||
cmd.finalize_options()
|
||||
|
||||
# if not specified, plat_name gets the current platform
|
||||
assert cmd.plat_name == get_platform()
|
||||
|
||||
# build_purelib is build + lib
|
||||
wanted = os.path.join(cmd.build_base, 'lib')
|
||||
assert cmd.build_purelib == wanted
|
||||
|
||||
# build_platlib is 'build/lib.platform-cache_tag[-pydebug]'
|
||||
# examples:
|
||||
# build/lib.macosx-10.3-i386-cpython39
|
||||
plat_spec = f'.{cmd.plat_name}-{sys.implementation.cache_tag}'
|
||||
if get_config_var('Py_GIL_DISABLED'):
|
||||
plat_spec += 't'
|
||||
if hasattr(sys, 'gettotalrefcount'):
|
||||
assert cmd.build_platlib.endswith('-pydebug')
|
||||
plat_spec += '-pydebug'
|
||||
wanted = os.path.join(cmd.build_base, 'lib' + plat_spec)
|
||||
assert cmd.build_platlib == wanted
|
||||
|
||||
# by default, build_lib = build_purelib
|
||||
assert cmd.build_lib == cmd.build_purelib
|
||||
|
||||
# build_temp is build/temp.<plat>
|
||||
wanted = os.path.join(cmd.build_base, 'temp' + plat_spec)
|
||||
assert cmd.build_temp == wanted
|
||||
|
||||
# build_scripts is build/scripts-x.x
|
||||
wanted = os.path.join(cmd.build_base, 'scripts-%d.%d' % sys.version_info[:2])
|
||||
assert cmd.build_scripts == wanted
|
||||
|
||||
# executable is os.path.normpath(sys.executable)
|
||||
assert cmd.executable == os.path.normpath(sys.executable)
|
||||
@@ -0,0 +1,134 @@
|
||||
"""Tests for distutils.command.build_clib."""
|
||||
|
||||
import os
|
||||
from distutils.command.build_clib import build_clib
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils.tests import missing_compiler_executable, support
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class TestBuildCLib(support.TempdirManager):
|
||||
def test_check_library_dist(self):
|
||||
pkg_dir, dist = self.create_dist()
|
||||
cmd = build_clib(dist)
|
||||
|
||||
# 'libraries' option must be a list
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.check_library_list('foo')
|
||||
|
||||
# each element of 'libraries' must a 2-tuple
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.check_library_list(['foo1', 'foo2'])
|
||||
|
||||
# first element of each tuple in 'libraries'
|
||||
# must be a string (the library name)
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.check_library_list([(1, 'foo1'), ('name', 'foo2')])
|
||||
|
||||
# library name may not contain directory separators
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.check_library_list(
|
||||
[('name', 'foo1'), ('another/name', 'foo2')],
|
||||
)
|
||||
|
||||
# second element of each tuple must be a dictionary (build info)
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.check_library_list(
|
||||
[('name', {}), ('another', 'foo2')],
|
||||
)
|
||||
|
||||
# those work
|
||||
libs = [('name', {}), ('name', {'ok': 'good'})]
|
||||
cmd.check_library_list(libs)
|
||||
|
||||
def test_get_source_files(self):
|
||||
pkg_dir, dist = self.create_dist()
|
||||
cmd = build_clib(dist)
|
||||
|
||||
# "in 'libraries' option 'sources' must be present and must be
|
||||
# a list of source filenames
|
||||
cmd.libraries = [('name', {})]
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.get_source_files()
|
||||
|
||||
cmd.libraries = [('name', {'sources': 1})]
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.get_source_files()
|
||||
|
||||
cmd.libraries = [('name', {'sources': ['a', 'b']})]
|
||||
assert cmd.get_source_files() == ['a', 'b']
|
||||
|
||||
cmd.libraries = [('name', {'sources': ('a', 'b')})]
|
||||
assert cmd.get_source_files() == ['a', 'b']
|
||||
|
||||
cmd.libraries = [
|
||||
('name', {'sources': ('a', 'b')}),
|
||||
('name2', {'sources': ['c', 'd']}),
|
||||
]
|
||||
assert cmd.get_source_files() == ['a', 'b', 'c', 'd']
|
||||
|
||||
def test_build_libraries(self):
|
||||
pkg_dir, dist = self.create_dist()
|
||||
cmd = build_clib(dist)
|
||||
|
||||
class FakeCompiler:
|
||||
def compile(*args, **kw):
|
||||
pass
|
||||
|
||||
create_static_lib = compile
|
||||
|
||||
cmd.compiler = FakeCompiler()
|
||||
|
||||
# build_libraries is also doing a bit of typo checking
|
||||
lib = [('name', {'sources': 'notvalid'})]
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.build_libraries(lib)
|
||||
|
||||
lib = [('name', {'sources': list()})]
|
||||
cmd.build_libraries(lib)
|
||||
|
||||
lib = [('name', {'sources': tuple()})]
|
||||
cmd.build_libraries(lib)
|
||||
|
||||
def test_finalize_options(self):
|
||||
pkg_dir, dist = self.create_dist()
|
||||
cmd = build_clib(dist)
|
||||
|
||||
cmd.include_dirs = 'one-dir'
|
||||
cmd.finalize_options()
|
||||
assert cmd.include_dirs == ['one-dir']
|
||||
|
||||
cmd.include_dirs = None
|
||||
cmd.finalize_options()
|
||||
assert cmd.include_dirs == []
|
||||
|
||||
cmd.distribution.libraries = 'WONTWORK'
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.finalize_options()
|
||||
|
||||
@pytest.mark.skipif('platform.system() == "Windows"')
|
||||
def test_run(self):
|
||||
pkg_dir, dist = self.create_dist()
|
||||
cmd = build_clib(dist)
|
||||
|
||||
foo_c = os.path.join(pkg_dir, 'foo.c')
|
||||
self.write_file(foo_c, 'int main(void) { return 1;}\n')
|
||||
cmd.libraries = [('foo', {'sources': [foo_c]})]
|
||||
|
||||
build_temp = os.path.join(pkg_dir, 'build')
|
||||
os.mkdir(build_temp)
|
||||
cmd.build_temp = build_temp
|
||||
cmd.build_clib = build_temp
|
||||
|
||||
# Before we run the command, we want to make sure
|
||||
# all commands are present on the system.
|
||||
ccmd = missing_compiler_executable()
|
||||
if ccmd is not None:
|
||||
self.skipTest(f'The {ccmd!r} command is not found')
|
||||
|
||||
# this should work
|
||||
cmd.run()
|
||||
|
||||
# let's check the result
|
||||
assert 'libfoo.a' in os.listdir(build_temp)
|
||||
@@ -0,0 +1,563 @@
|
||||
import contextlib
|
||||
import importlib
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import site
|
||||
import sys
|
||||
import tempfile
|
||||
import textwrap
|
||||
from distutils import sysconfig
|
||||
from distutils.command.build_ext import build_ext
|
||||
from distutils.core import Distribution
|
||||
from distutils.errors import (
|
||||
CompileError,
|
||||
DistutilsPlatformError,
|
||||
DistutilsSetupError,
|
||||
UnknownFileError,
|
||||
)
|
||||
from distutils.extension import Extension
|
||||
from distutils.tests import missing_compiler_executable
|
||||
from distutils.tests.support import (
|
||||
TempdirManager,
|
||||
copy_xxmodule_c,
|
||||
fixup_build_ext,
|
||||
)
|
||||
from io import StringIO
|
||||
|
||||
import jaraco.path
|
||||
import path
|
||||
import pytest
|
||||
from test import support
|
||||
|
||||
from .compat import py38 as import_helper
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def user_site_dir(request):
|
||||
self = request.instance
|
||||
self.tmp_dir = self.mkdtemp()
|
||||
self.tmp_path = path.Path(self.tmp_dir)
|
||||
from distutils.command import build_ext
|
||||
|
||||
orig_user_base = site.USER_BASE
|
||||
|
||||
site.USER_BASE = self.mkdtemp()
|
||||
build_ext.USER_BASE = site.USER_BASE
|
||||
|
||||
# bpo-30132: On Windows, a .pdb file may be created in the current
|
||||
# working directory. Create a temporary working directory to cleanup
|
||||
# everything at the end of the test.
|
||||
with self.tmp_path:
|
||||
yield
|
||||
|
||||
site.USER_BASE = orig_user_base
|
||||
build_ext.USER_BASE = orig_user_base
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def safe_extension_import(name, path):
|
||||
with import_helper.CleanImport(name):
|
||||
with extension_redirect(name, path) as new_path:
|
||||
with import_helper.DirsOnSysPath(new_path):
|
||||
yield
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def extension_redirect(mod, path):
|
||||
"""
|
||||
Tests will fail to tear down an extension module if it's been imported.
|
||||
|
||||
Before importing, copy the file to a temporary directory that won't
|
||||
be cleaned up. Yield the new path.
|
||||
"""
|
||||
if platform.system() != "Windows" and sys.platform != "cygwin":
|
||||
yield path
|
||||
return
|
||||
with import_helper.DirsOnSysPath(path):
|
||||
spec = importlib.util.find_spec(mod)
|
||||
filename = os.path.basename(spec.origin)
|
||||
trash_dir = tempfile.mkdtemp(prefix='deleteme')
|
||||
dest = os.path.join(trash_dir, os.path.basename(filename))
|
||||
shutil.copy(spec.origin, dest)
|
||||
yield trash_dir
|
||||
# TODO: can the file be scheduled for deletion?
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('user_site_dir')
|
||||
class TestBuildExt(TempdirManager):
|
||||
def build_ext(self, *args, **kwargs):
|
||||
return build_ext(*args, **kwargs)
|
||||
|
||||
def test_build_ext(self):
|
||||
missing_compiler_executable()
|
||||
copy_xxmodule_c(self.tmp_dir)
|
||||
xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
|
||||
xx_ext = Extension('xx', [xx_c])
|
||||
dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
|
||||
dist.package_dir = self.tmp_dir
|
||||
cmd = self.build_ext(dist)
|
||||
fixup_build_ext(cmd)
|
||||
cmd.build_lib = self.tmp_dir
|
||||
cmd.build_temp = self.tmp_dir
|
||||
|
||||
old_stdout = sys.stdout
|
||||
if not support.verbose:
|
||||
# silence compiler output
|
||||
sys.stdout = StringIO()
|
||||
try:
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
finally:
|
||||
sys.stdout = old_stdout
|
||||
|
||||
with safe_extension_import('xx', self.tmp_dir):
|
||||
self._test_xx()
|
||||
|
||||
@staticmethod
|
||||
def _test_xx():
|
||||
import xx
|
||||
|
||||
for attr in ('error', 'foo', 'new', 'roj'):
|
||||
assert hasattr(xx, attr)
|
||||
|
||||
assert xx.foo(2, 5) == 7
|
||||
assert xx.foo(13, 15) == 28
|
||||
assert xx.new().demo() is None
|
||||
if support.HAVE_DOCSTRINGS:
|
||||
doc = 'This is a template module just for instruction.'
|
||||
assert xx.__doc__ == doc
|
||||
assert isinstance(xx.Null(), xx.Null)
|
||||
assert isinstance(xx.Str(), xx.Str)
|
||||
|
||||
def test_solaris_enable_shared(self):
|
||||
dist = Distribution({'name': 'xx'})
|
||||
cmd = self.build_ext(dist)
|
||||
old = sys.platform
|
||||
|
||||
sys.platform = 'sunos' # fooling finalize_options
|
||||
from distutils.sysconfig import _config_vars
|
||||
|
||||
old_var = _config_vars.get('Py_ENABLE_SHARED')
|
||||
_config_vars['Py_ENABLE_SHARED'] = True
|
||||
try:
|
||||
cmd.ensure_finalized()
|
||||
finally:
|
||||
sys.platform = old
|
||||
if old_var is None:
|
||||
del _config_vars['Py_ENABLE_SHARED']
|
||||
else:
|
||||
_config_vars['Py_ENABLE_SHARED'] = old_var
|
||||
|
||||
# make sure we get some library dirs under solaris
|
||||
assert len(cmd.library_dirs) > 0
|
||||
|
||||
def test_user_site(self):
|
||||
import site
|
||||
|
||||
dist = Distribution({'name': 'xx'})
|
||||
cmd = self.build_ext(dist)
|
||||
|
||||
# making sure the user option is there
|
||||
options = [name for name, short, label in cmd.user_options]
|
||||
assert 'user' in options
|
||||
|
||||
# setting a value
|
||||
cmd.user = True
|
||||
|
||||
# setting user based lib and include
|
||||
lib = os.path.join(site.USER_BASE, 'lib')
|
||||
incl = os.path.join(site.USER_BASE, 'include')
|
||||
os.mkdir(lib)
|
||||
os.mkdir(incl)
|
||||
|
||||
# let's run finalize
|
||||
cmd.ensure_finalized()
|
||||
|
||||
# see if include_dirs and library_dirs
|
||||
# were set
|
||||
assert lib in cmd.library_dirs
|
||||
assert lib in cmd.rpath
|
||||
assert incl in cmd.include_dirs
|
||||
|
||||
def test_optional_extension(self):
|
||||
# this extension will fail, but let's ignore this failure
|
||||
# with the optional argument.
|
||||
modules = [Extension('foo', ['xxx'], optional=False)]
|
||||
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.ensure_finalized()
|
||||
with pytest.raises((UnknownFileError, CompileError)):
|
||||
cmd.run() # should raise an error
|
||||
|
||||
modules = [Extension('foo', ['xxx'], optional=True)]
|
||||
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.ensure_finalized()
|
||||
cmd.run() # should pass
|
||||
|
||||
def test_finalize_options(self):
|
||||
# Make sure Python's include directories (for Python.h, pyconfig.h,
|
||||
# etc.) are in the include search path.
|
||||
modules = [Extension('foo', ['xxx'], optional=False)]
|
||||
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.finalize_options()
|
||||
|
||||
py_include = sysconfig.get_python_inc()
|
||||
for p in py_include.split(os.path.pathsep):
|
||||
assert p in cmd.include_dirs
|
||||
|
||||
plat_py_include = sysconfig.get_python_inc(plat_specific=True)
|
||||
for p in plat_py_include.split(os.path.pathsep):
|
||||
assert p in cmd.include_dirs
|
||||
|
||||
# make sure cmd.libraries is turned into a list
|
||||
# if it's a string
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.libraries = 'my_lib, other_lib lastlib'
|
||||
cmd.finalize_options()
|
||||
assert cmd.libraries == ['my_lib', 'other_lib', 'lastlib']
|
||||
|
||||
# make sure cmd.library_dirs is turned into a list
|
||||
# if it's a string
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.library_dirs = f'my_lib_dir{os.pathsep}other_lib_dir'
|
||||
cmd.finalize_options()
|
||||
assert 'my_lib_dir' in cmd.library_dirs
|
||||
assert 'other_lib_dir' in cmd.library_dirs
|
||||
|
||||
# make sure rpath is turned into a list
|
||||
# if it's a string
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.rpath = f'one{os.pathsep}two'
|
||||
cmd.finalize_options()
|
||||
assert cmd.rpath == ['one', 'two']
|
||||
|
||||
# make sure cmd.link_objects is turned into a list
|
||||
# if it's a string
|
||||
cmd = build_ext(dist)
|
||||
cmd.link_objects = 'one two,three'
|
||||
cmd.finalize_options()
|
||||
assert cmd.link_objects == ['one', 'two', 'three']
|
||||
|
||||
# XXX more tests to perform for win32
|
||||
|
||||
# make sure define is turned into 2-tuples
|
||||
# strings if they are ','-separated strings
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.define = 'one,two'
|
||||
cmd.finalize_options()
|
||||
assert cmd.define == [('one', '1'), ('two', '1')]
|
||||
|
||||
# make sure undef is turned into a list of
|
||||
# strings if they are ','-separated strings
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.undef = 'one,two'
|
||||
cmd.finalize_options()
|
||||
assert cmd.undef == ['one', 'two']
|
||||
|
||||
# make sure swig_opts is turned into a list
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.swig_opts = None
|
||||
cmd.finalize_options()
|
||||
assert cmd.swig_opts == []
|
||||
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.swig_opts = '1 2'
|
||||
cmd.finalize_options()
|
||||
assert cmd.swig_opts == ['1', '2']
|
||||
|
||||
def test_check_extensions_list(self):
|
||||
dist = Distribution()
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.finalize_options()
|
||||
|
||||
# 'extensions' option must be a list of Extension instances
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.check_extensions_list('foo')
|
||||
|
||||
# each element of 'ext_modules' option must be an
|
||||
# Extension instance or 2-tuple
|
||||
exts = [('bar', 'foo', 'bar'), 'foo']
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.check_extensions_list(exts)
|
||||
|
||||
# first element of each tuple in 'ext_modules'
|
||||
# must be the extension name (a string) and match
|
||||
# a python dotted-separated name
|
||||
exts = [('foo-bar', '')]
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.check_extensions_list(exts)
|
||||
|
||||
# second element of each tuple in 'ext_modules'
|
||||
# must be a dictionary (build info)
|
||||
exts = [('foo.bar', '')]
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.check_extensions_list(exts)
|
||||
|
||||
# ok this one should pass
|
||||
exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', 'some': 'bar'})]
|
||||
cmd.check_extensions_list(exts)
|
||||
ext = exts[0]
|
||||
assert isinstance(ext, Extension)
|
||||
|
||||
# check_extensions_list adds in ext the values passed
|
||||
# when they are in ('include_dirs', 'library_dirs', 'libraries'
|
||||
# 'extra_objects', 'extra_compile_args', 'extra_link_args')
|
||||
assert ext.libraries == 'foo'
|
||||
assert not hasattr(ext, 'some')
|
||||
|
||||
# 'macros' element of build info dict must be 1- or 2-tuple
|
||||
exts = [
|
||||
(
|
||||
'foo.bar',
|
||||
{
|
||||
'sources': [''],
|
||||
'libraries': 'foo',
|
||||
'some': 'bar',
|
||||
'macros': [('1', '2', '3'), 'foo'],
|
||||
},
|
||||
)
|
||||
]
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
cmd.check_extensions_list(exts)
|
||||
|
||||
exts[0][1]['macros'] = [('1', '2'), ('3',)]
|
||||
cmd.check_extensions_list(exts)
|
||||
assert exts[0].undef_macros == ['3']
|
||||
assert exts[0].define_macros == [('1', '2')]
|
||||
|
||||
def test_get_source_files(self):
|
||||
modules = [Extension('foo', ['xxx'], optional=False)]
|
||||
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.ensure_finalized()
|
||||
assert cmd.get_source_files() == ['xxx']
|
||||
|
||||
def test_unicode_module_names(self):
|
||||
modules = [
|
||||
Extension('foo', ['aaa'], optional=False),
|
||||
Extension('föö', ['uuu'], optional=False),
|
||||
]
|
||||
dist = Distribution({'name': 'xx', 'ext_modules': modules})
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.ensure_finalized()
|
||||
assert re.search(r'foo(_d)?\..*', cmd.get_ext_filename(modules[0].name))
|
||||
assert re.search(r'föö(_d)?\..*', cmd.get_ext_filename(modules[1].name))
|
||||
assert cmd.get_export_symbols(modules[0]) == ['PyInit_foo']
|
||||
assert cmd.get_export_symbols(modules[1]) == ['PyInitU_f_1gaa']
|
||||
|
||||
def test_compiler_option(self):
|
||||
# cmd.compiler is an option and
|
||||
# should not be overridden by a compiler instance
|
||||
# when the command is run
|
||||
dist = Distribution()
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.compiler = 'unix'
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
assert cmd.compiler == 'unix'
|
||||
|
||||
def test_get_outputs(self):
|
||||
missing_compiler_executable()
|
||||
tmp_dir = self.mkdtemp()
|
||||
c_file = os.path.join(tmp_dir, 'foo.c')
|
||||
self.write_file(c_file, 'void PyInit_foo(void) {}\n')
|
||||
ext = Extension('foo', [c_file], optional=False)
|
||||
dist = Distribution({'name': 'xx', 'ext_modules': [ext]})
|
||||
cmd = self.build_ext(dist)
|
||||
fixup_build_ext(cmd)
|
||||
cmd.ensure_finalized()
|
||||
assert len(cmd.get_outputs()) == 1
|
||||
|
||||
cmd.build_lib = os.path.join(self.tmp_dir, 'build')
|
||||
cmd.build_temp = os.path.join(self.tmp_dir, 'tempt')
|
||||
|
||||
# issue #5977 : distutils build_ext.get_outputs
|
||||
# returns wrong result with --inplace
|
||||
other_tmp_dir = os.path.realpath(self.mkdtemp())
|
||||
old_wd = os.getcwd()
|
||||
os.chdir(other_tmp_dir)
|
||||
try:
|
||||
cmd.inplace = True
|
||||
cmd.run()
|
||||
so_file = cmd.get_outputs()[0]
|
||||
finally:
|
||||
os.chdir(old_wd)
|
||||
assert os.path.exists(so_file)
|
||||
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
|
||||
assert so_file.endswith(ext_suffix)
|
||||
so_dir = os.path.dirname(so_file)
|
||||
assert so_dir == other_tmp_dir
|
||||
|
||||
cmd.inplace = False
|
||||
cmd.compiler = None
|
||||
cmd.run()
|
||||
so_file = cmd.get_outputs()[0]
|
||||
assert os.path.exists(so_file)
|
||||
assert so_file.endswith(ext_suffix)
|
||||
so_dir = os.path.dirname(so_file)
|
||||
assert so_dir == cmd.build_lib
|
||||
|
||||
# inplace = False, cmd.package = 'bar'
|
||||
build_py = cmd.get_finalized_command('build_py')
|
||||
build_py.package_dir = {'': 'bar'}
|
||||
path = cmd.get_ext_fullpath('foo')
|
||||
# checking that the last directory is the build_dir
|
||||
path = os.path.split(path)[0]
|
||||
assert path == cmd.build_lib
|
||||
|
||||
# inplace = True, cmd.package = 'bar'
|
||||
cmd.inplace = True
|
||||
other_tmp_dir = os.path.realpath(self.mkdtemp())
|
||||
old_wd = os.getcwd()
|
||||
os.chdir(other_tmp_dir)
|
||||
try:
|
||||
path = cmd.get_ext_fullpath('foo')
|
||||
finally:
|
||||
os.chdir(old_wd)
|
||||
# checking that the last directory is bar
|
||||
path = os.path.split(path)[0]
|
||||
lastdir = os.path.split(path)[-1]
|
||||
assert lastdir == 'bar'
|
||||
|
||||
def test_ext_fullpath(self):
|
||||
ext = sysconfig.get_config_var('EXT_SUFFIX')
|
||||
# building lxml.etree inplace
|
||||
# etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c')
|
||||
# etree_ext = Extension('lxml.etree', [etree_c])
|
||||
# dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]})
|
||||
dist = Distribution()
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.inplace = True
|
||||
cmd.distribution.package_dir = {'': 'src'}
|
||||
cmd.distribution.packages = ['lxml', 'lxml.html']
|
||||
curdir = os.getcwd()
|
||||
wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
|
||||
path = cmd.get_ext_fullpath('lxml.etree')
|
||||
assert wanted == path
|
||||
|
||||
# building lxml.etree not inplace
|
||||
cmd.inplace = False
|
||||
cmd.build_lib = os.path.join(curdir, 'tmpdir')
|
||||
wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
|
||||
path = cmd.get_ext_fullpath('lxml.etree')
|
||||
assert wanted == path
|
||||
|
||||
# building twisted.runner.portmap not inplace
|
||||
build_py = cmd.get_finalized_command('build_py')
|
||||
build_py.package_dir = {}
|
||||
cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
|
||||
path = cmd.get_ext_fullpath('twisted.runner.portmap')
|
||||
wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', 'portmap' + ext)
|
||||
assert wanted == path
|
||||
|
||||
# building twisted.runner.portmap inplace
|
||||
cmd.inplace = True
|
||||
path = cmd.get_ext_fullpath('twisted.runner.portmap')
|
||||
wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
|
||||
assert wanted == path
|
||||
|
||||
@pytest.mark.skipif('platform.system() != "Darwin"')
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
def test_deployment_target_default(self):
|
||||
# Issue 9516: Test that, in the absence of the environment variable,
|
||||
# an extension module is compiled with the same deployment target as
|
||||
# the interpreter.
|
||||
self._try_compile_deployment_target('==', None)
|
||||
|
||||
@pytest.mark.skipif('platform.system() != "Darwin"')
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
def test_deployment_target_too_low(self):
|
||||
# Issue 9516: Test that an extension module is not allowed to be
|
||||
# compiled with a deployment target less than that of the interpreter.
|
||||
with pytest.raises(DistutilsPlatformError):
|
||||
self._try_compile_deployment_target('>', '10.1')
|
||||
|
||||
@pytest.mark.skipif('platform.system() != "Darwin"')
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
def test_deployment_target_higher_ok(self): # pragma: no cover
|
||||
# Issue 9516: Test that an extension module can be compiled with a
|
||||
# deployment target higher than that of the interpreter: the ext
|
||||
# module may depend on some newer OS feature.
|
||||
deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
|
||||
if deptarget:
|
||||
# increment the minor version number (i.e. 10.6 -> 10.7)
|
||||
deptarget = [int(x) for x in deptarget.split('.')]
|
||||
deptarget[-1] += 1
|
||||
deptarget = '.'.join(str(i) for i in deptarget)
|
||||
self._try_compile_deployment_target('<', deptarget)
|
||||
|
||||
def _try_compile_deployment_target(self, operator, target): # pragma: no cover
|
||||
if target is None:
|
||||
if os.environ.get('MACOSX_DEPLOYMENT_TARGET'):
|
||||
del os.environ['MACOSX_DEPLOYMENT_TARGET']
|
||||
else:
|
||||
os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
|
||||
|
||||
jaraco.path.build(
|
||||
{
|
||||
'deptargetmodule.c': textwrap.dedent(f"""\
|
||||
#include <AvailabilityMacros.h>
|
||||
|
||||
int dummy;
|
||||
|
||||
#if TARGET {operator} MAC_OS_X_VERSION_MIN_REQUIRED
|
||||
#else
|
||||
#error "Unexpected target"
|
||||
#endif
|
||||
|
||||
"""),
|
||||
},
|
||||
self.tmp_path,
|
||||
)
|
||||
|
||||
# get the deployment target that the interpreter was built with
|
||||
target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
|
||||
target = tuple(map(int, target.split('.')[0:2]))
|
||||
# format the target value as defined in the Apple
|
||||
# Availability Macros. We can't use the macro names since
|
||||
# at least one value we test with will not exist yet.
|
||||
if target[:2] < (10, 10):
|
||||
# for 10.1 through 10.9.x -> "10n0"
|
||||
target = '%02d%01d0' % target
|
||||
else:
|
||||
# for 10.10 and beyond -> "10nn00"
|
||||
if len(target) >= 2:
|
||||
target = '%02d%02d00' % target
|
||||
else:
|
||||
# 11 and later can have no minor version (11 instead of 11.0)
|
||||
target = '%02d0000' % target
|
||||
deptarget_ext = Extension(
|
||||
'deptarget',
|
||||
[self.tmp_path / 'deptargetmodule.c'],
|
||||
extra_compile_args=[f'-DTARGET={target}'],
|
||||
)
|
||||
dist = Distribution({'name': 'deptarget', 'ext_modules': [deptarget_ext]})
|
||||
dist.package_dir = self.tmp_dir
|
||||
cmd = self.build_ext(dist)
|
||||
cmd.build_lib = self.tmp_dir
|
||||
cmd.build_temp = self.tmp_dir
|
||||
|
||||
try:
|
||||
old_stdout = sys.stdout
|
||||
if not support.verbose:
|
||||
# silence compiler output
|
||||
sys.stdout = StringIO()
|
||||
try:
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
finally:
|
||||
sys.stdout = old_stdout
|
||||
|
||||
except CompileError:
|
||||
self.fail("Wrong deployment target during compilation")
|
||||
|
||||
|
||||
class TestParallelBuildExt(TestBuildExt):
|
||||
def build_ext(self, *args, **kwargs):
|
||||
build_ext = super().build_ext(*args, **kwargs)
|
||||
build_ext.parallel = True
|
||||
return build_ext
|
||||
@@ -0,0 +1,196 @@
|
||||
"""Tests for distutils.command.build_py."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from distutils.command.build_py import build_py
|
||||
from distutils.core import Distribution
|
||||
from distutils.errors import DistutilsFileError
|
||||
from distutils.tests import support
|
||||
|
||||
import jaraco.path
|
||||
import pytest
|
||||
|
||||
|
||||
@support.combine_markers
|
||||
class TestBuildPy(support.TempdirManager):
|
||||
def test_package_data(self):
|
||||
sources = self.mkdtemp()
|
||||
jaraco.path.build(
|
||||
{
|
||||
'__init__.py': "# Pretend this is a package.",
|
||||
'README.txt': 'Info about this package',
|
||||
},
|
||||
sources,
|
||||
)
|
||||
|
||||
destination = self.mkdtemp()
|
||||
|
||||
dist = Distribution({"packages": ["pkg"], "package_dir": {"pkg": sources}})
|
||||
# script_name need not exist, it just need to be initialized
|
||||
dist.script_name = os.path.join(sources, "setup.py")
|
||||
dist.command_obj["build"] = support.DummyCommand(
|
||||
force=False, build_lib=destination
|
||||
)
|
||||
dist.packages = ["pkg"]
|
||||
dist.package_data = {"pkg": ["README.txt"]}
|
||||
dist.package_dir = {"pkg": sources}
|
||||
|
||||
cmd = build_py(dist)
|
||||
cmd.compile = True
|
||||
cmd.ensure_finalized()
|
||||
assert cmd.package_data == dist.package_data
|
||||
|
||||
cmd.run()
|
||||
|
||||
# This makes sure the list of outputs includes byte-compiled
|
||||
# files for Python modules but not for package data files
|
||||
# (there shouldn't *be* byte-code files for those!).
|
||||
assert len(cmd.get_outputs()) == 3
|
||||
pkgdest = os.path.join(destination, "pkg")
|
||||
files = os.listdir(pkgdest)
|
||||
pycache_dir = os.path.join(pkgdest, "__pycache__")
|
||||
assert "__init__.py" in files
|
||||
assert "README.txt" in files
|
||||
if sys.dont_write_bytecode:
|
||||
assert not os.path.exists(pycache_dir)
|
||||
else:
|
||||
pyc_files = os.listdir(pycache_dir)
|
||||
assert f"__init__.{sys.implementation.cache_tag}.pyc" in pyc_files
|
||||
|
||||
def test_empty_package_dir(self):
|
||||
# See bugs #1668596/#1720897
|
||||
sources = self.mkdtemp()
|
||||
jaraco.path.build({'__init__.py': '', 'doc': {'testfile': ''}}, sources)
|
||||
|
||||
os.chdir(sources)
|
||||
dist = Distribution({
|
||||
"packages": ["pkg"],
|
||||
"package_dir": {"pkg": ""},
|
||||
"package_data": {"pkg": ["doc/*"]},
|
||||
})
|
||||
# script_name need not exist, it just need to be initialized
|
||||
dist.script_name = os.path.join(sources, "setup.py")
|
||||
dist.script_args = ["build"]
|
||||
dist.parse_command_line()
|
||||
|
||||
try:
|
||||
dist.run_commands()
|
||||
except DistutilsFileError:
|
||||
self.fail("failed package_data test when package_dir is ''")
|
||||
|
||||
@pytest.mark.skipif('sys.dont_write_bytecode')
|
||||
def test_byte_compile(self):
|
||||
project_dir, dist = self.create_dist(py_modules=['boiledeggs'])
|
||||
os.chdir(project_dir)
|
||||
self.write_file('boiledeggs.py', 'import antigravity')
|
||||
cmd = build_py(dist)
|
||||
cmd.compile = True
|
||||
cmd.build_lib = 'here'
|
||||
cmd.finalize_options()
|
||||
cmd.run()
|
||||
|
||||
found = os.listdir(cmd.build_lib)
|
||||
assert sorted(found) == ['__pycache__', 'boiledeggs.py']
|
||||
found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
|
||||
assert found == [f'boiledeggs.{sys.implementation.cache_tag}.pyc']
|
||||
|
||||
@pytest.mark.skipif('sys.dont_write_bytecode')
|
||||
def test_byte_compile_optimized(self):
|
||||
project_dir, dist = self.create_dist(py_modules=['boiledeggs'])
|
||||
os.chdir(project_dir)
|
||||
self.write_file('boiledeggs.py', 'import antigravity')
|
||||
cmd = build_py(dist)
|
||||
cmd.compile = False
|
||||
cmd.optimize = 1
|
||||
cmd.build_lib = 'here'
|
||||
cmd.finalize_options()
|
||||
cmd.run()
|
||||
|
||||
found = os.listdir(cmd.build_lib)
|
||||
assert sorted(found) == ['__pycache__', 'boiledeggs.py']
|
||||
found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
|
||||
expect = f'boiledeggs.{sys.implementation.cache_tag}.opt-1.pyc'
|
||||
assert sorted(found) == [expect]
|
||||
|
||||
def test_dir_in_package_data(self):
|
||||
"""
|
||||
A directory in package_data should not be added to the filelist.
|
||||
"""
|
||||
# See bug 19286
|
||||
sources = self.mkdtemp()
|
||||
jaraco.path.build(
|
||||
{
|
||||
'pkg': {
|
||||
'__init__.py': '',
|
||||
'doc': {
|
||||
'testfile': '',
|
||||
# create a directory that could be incorrectly detected as a file
|
||||
'otherdir': {},
|
||||
},
|
||||
}
|
||||
},
|
||||
sources,
|
||||
)
|
||||
|
||||
os.chdir(sources)
|
||||
dist = Distribution({"packages": ["pkg"], "package_data": {"pkg": ["doc/*"]}})
|
||||
# script_name need not exist, it just need to be initialized
|
||||
dist.script_name = os.path.join(sources, "setup.py")
|
||||
dist.script_args = ["build"]
|
||||
dist.parse_command_line()
|
||||
|
||||
try:
|
||||
dist.run_commands()
|
||||
except DistutilsFileError:
|
||||
self.fail("failed package_data when data dir includes a dir")
|
||||
|
||||
def test_dont_write_bytecode(self, caplog):
|
||||
# makes sure byte_compile is not used
|
||||
dist = self.create_dist()[1]
|
||||
cmd = build_py(dist)
|
||||
cmd.compile = True
|
||||
cmd.optimize = 1
|
||||
|
||||
old_dont_write_bytecode = sys.dont_write_bytecode
|
||||
sys.dont_write_bytecode = True
|
||||
try:
|
||||
cmd.byte_compile([])
|
||||
finally:
|
||||
sys.dont_write_bytecode = old_dont_write_bytecode
|
||||
|
||||
assert 'byte-compiling is disabled' in caplog.records[0].message
|
||||
|
||||
def test_namespace_package_does_not_warn(self, caplog):
|
||||
"""
|
||||
Originally distutils implementation did not account for PEP 420
|
||||
and included warns for package directories that did not contain
|
||||
``__init__.py`` files.
|
||||
After the acceptance of PEP 420, these warnings don't make more sense
|
||||
so we want to ensure there are not displayed to not confuse the users.
|
||||
"""
|
||||
# Create a fake project structure with a package namespace:
|
||||
tmp = self.mkdtemp()
|
||||
jaraco.path.build({'ns': {'pkg': {'module.py': ''}}}, tmp)
|
||||
os.chdir(tmp)
|
||||
|
||||
# Configure the package:
|
||||
attrs = {
|
||||
"name": "ns.pkg",
|
||||
"packages": ["ns", "ns.pkg"],
|
||||
"script_name": "setup.py",
|
||||
}
|
||||
dist = Distribution(attrs)
|
||||
|
||||
# Run code paths that would trigger the trap:
|
||||
cmd = dist.get_command_obj("build_py")
|
||||
cmd.finalize_options()
|
||||
modules = cmd.find_all_modules()
|
||||
assert len(modules) == 1
|
||||
module_path = modules[0][-1]
|
||||
assert module_path.replace(os.sep, "/") == "ns/pkg/module.py"
|
||||
|
||||
cmd.run()
|
||||
|
||||
assert not any(
|
||||
"package init file" in msg and "not found" in msg for msg in caplog.messages
|
||||
)
|
||||
@@ -0,0 +1,96 @@
|
||||
"""Tests for distutils.command.build_scripts."""
|
||||
|
||||
import os
|
||||
import textwrap
|
||||
from distutils import sysconfig
|
||||
from distutils.command.build_scripts import build_scripts
|
||||
from distutils.core import Distribution
|
||||
from distutils.tests import support
|
||||
|
||||
import jaraco.path
|
||||
|
||||
|
||||
class TestBuildScripts(support.TempdirManager):
|
||||
def test_default_settings(self):
|
||||
cmd = self.get_build_scripts_cmd("/foo/bar", [])
|
||||
assert not cmd.force
|
||||
assert cmd.build_dir is None
|
||||
|
||||
cmd.finalize_options()
|
||||
|
||||
assert cmd.force
|
||||
assert cmd.build_dir == "/foo/bar"
|
||||
|
||||
def test_build(self):
|
||||
source = self.mkdtemp()
|
||||
target = self.mkdtemp()
|
||||
expected = self.write_sample_scripts(source)
|
||||
|
||||
cmd = self.get_build_scripts_cmd(
|
||||
target, [os.path.join(source, fn) for fn in expected]
|
||||
)
|
||||
cmd.finalize_options()
|
||||
cmd.run()
|
||||
|
||||
built = os.listdir(target)
|
||||
for name in expected:
|
||||
assert name in built
|
||||
|
||||
def get_build_scripts_cmd(self, target, scripts):
|
||||
import sys
|
||||
|
||||
dist = Distribution()
|
||||
dist.scripts = scripts
|
||||
dist.command_obj["build"] = support.DummyCommand(
|
||||
build_scripts=target, force=True, executable=sys.executable
|
||||
)
|
||||
return build_scripts(dist)
|
||||
|
||||
@staticmethod
|
||||
def write_sample_scripts(dir):
|
||||
spec = {
|
||||
'script1.py': textwrap.dedent("""
|
||||
#! /usr/bin/env python2.3
|
||||
# bogus script w/ Python sh-bang
|
||||
pass
|
||||
""").lstrip(),
|
||||
'script2.py': textwrap.dedent("""
|
||||
#!/usr/bin/python
|
||||
# bogus script w/ Python sh-bang
|
||||
pass
|
||||
""").lstrip(),
|
||||
'shell.sh': textwrap.dedent("""
|
||||
#!/bin/sh
|
||||
# bogus shell script w/ sh-bang
|
||||
exit 0
|
||||
""").lstrip(),
|
||||
}
|
||||
jaraco.path.build(spec, dir)
|
||||
return list(spec)
|
||||
|
||||
def test_version_int(self):
|
||||
source = self.mkdtemp()
|
||||
target = self.mkdtemp()
|
||||
expected = self.write_sample_scripts(source)
|
||||
|
||||
cmd = self.get_build_scripts_cmd(
|
||||
target, [os.path.join(source, fn) for fn in expected]
|
||||
)
|
||||
cmd.finalize_options()
|
||||
|
||||
# https://bugs.python.org/issue4524
|
||||
#
|
||||
# On linux-g++-32 with command line `./configure --enable-ipv6
|
||||
# --with-suffix=3`, python is compiled okay but the build scripts
|
||||
# failed when writing the name of the executable
|
||||
old = sysconfig.get_config_vars().get('VERSION')
|
||||
sysconfig._config_vars['VERSION'] = 4
|
||||
try:
|
||||
cmd.run()
|
||||
finally:
|
||||
if old is not None:
|
||||
sysconfig._config_vars['VERSION'] = old
|
||||
|
||||
built = os.listdir(target)
|
||||
for name in expected:
|
||||
assert name in built
|
||||
@@ -0,0 +1,91 @@
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import sysconfig
|
||||
import textwrap
|
||||
from distutils import ccompiler
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def _make_strs(paths):
|
||||
"""
|
||||
Convert paths to strings for legacy compatibility.
|
||||
"""
|
||||
if sys.version_info > (3, 8) and platform.system() != "Windows":
|
||||
return paths
|
||||
return list(map(os.fspath, paths))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def c_file(tmp_path):
|
||||
c_file = tmp_path / 'foo.c'
|
||||
gen_headers = ('Python.h',)
|
||||
is_windows = platform.system() == "Windows"
|
||||
plat_headers = ('windows.h',) * is_windows
|
||||
all_headers = gen_headers + plat_headers
|
||||
headers = '\n'.join(f'#include <{header}>\n' for header in all_headers)
|
||||
payload = (
|
||||
textwrap.dedent(
|
||||
"""
|
||||
#headers
|
||||
void PyInit_foo(void) {}
|
||||
"""
|
||||
)
|
||||
.lstrip()
|
||||
.replace('#headers', headers)
|
||||
)
|
||||
c_file.write_text(payload, encoding='utf-8')
|
||||
return c_file
|
||||
|
||||
|
||||
def test_set_include_dirs(c_file):
|
||||
"""
|
||||
Extensions should build even if set_include_dirs is invoked.
|
||||
In particular, compiler-specific paths should not be overridden.
|
||||
"""
|
||||
compiler = ccompiler.new_compiler()
|
||||
python = sysconfig.get_paths()['include']
|
||||
compiler.set_include_dirs([python])
|
||||
compiler.compile(_make_strs([c_file]))
|
||||
|
||||
# do it again, setting include dirs after any initialization
|
||||
compiler.set_include_dirs([python])
|
||||
compiler.compile(_make_strs([c_file]))
|
||||
|
||||
|
||||
def test_has_function_prototype():
|
||||
# Issue https://github.com/pypa/setuptools/issues/3648
|
||||
# Test prototype-generating behavior.
|
||||
|
||||
compiler = ccompiler.new_compiler()
|
||||
|
||||
# Every C implementation should have these.
|
||||
assert compiler.has_function('abort')
|
||||
assert compiler.has_function('exit')
|
||||
with pytest.deprecated_call(match='includes is deprecated'):
|
||||
# abort() is a valid expression with the <stdlib.h> prototype.
|
||||
assert compiler.has_function('abort', includes=['stdlib.h'])
|
||||
with pytest.deprecated_call(match='includes is deprecated'):
|
||||
# But exit() is not valid with the actual prototype in scope.
|
||||
assert not compiler.has_function('exit', includes=['stdlib.h'])
|
||||
# And setuptools_does_not_exist is not declared or defined at all.
|
||||
assert not compiler.has_function('setuptools_does_not_exist')
|
||||
with pytest.deprecated_call(match='includes is deprecated'):
|
||||
assert not compiler.has_function(
|
||||
'setuptools_does_not_exist', includes=['stdio.h']
|
||||
)
|
||||
|
||||
|
||||
def test_include_dirs_after_multiple_compile_calls(c_file):
|
||||
"""
|
||||
Calling compile multiple times should not change the include dirs
|
||||
(regression test for setuptools issue #3591).
|
||||
"""
|
||||
compiler = ccompiler.new_compiler()
|
||||
python = sysconfig.get_paths()['include']
|
||||
compiler.set_include_dirs([python])
|
||||
compiler.compile(_make_strs([c_file]))
|
||||
assert compiler.include_dirs == [python]
|
||||
compiler.compile(_make_strs([c_file]))
|
||||
assert compiler.include_dirs == [python]
|
||||
@@ -0,0 +1,194 @@
|
||||
"""Tests for distutils.command.check."""
|
||||
|
||||
import os
|
||||
import textwrap
|
||||
from distutils.command.check import check
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils.tests import support
|
||||
|
||||
import pytest
|
||||
|
||||
try:
|
||||
import pygments
|
||||
except ImportError:
|
||||
pygments = None
|
||||
|
||||
|
||||
HERE = os.path.dirname(__file__)
|
||||
|
||||
|
||||
@support.combine_markers
|
||||
class TestCheck(support.TempdirManager):
|
||||
def _run(self, metadata=None, cwd=None, **options):
|
||||
if metadata is None:
|
||||
metadata = {}
|
||||
if cwd is not None:
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(cwd)
|
||||
pkg_info, dist = self.create_dist(**metadata)
|
||||
cmd = check(dist)
|
||||
cmd.initialize_options()
|
||||
for name, value in options.items():
|
||||
setattr(cmd, name, value)
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
if cwd is not None:
|
||||
os.chdir(old_dir)
|
||||
return cmd
|
||||
|
||||
def test_check_metadata(self):
|
||||
# let's run the command with no metadata at all
|
||||
# by default, check is checking the metadata
|
||||
# should have some warnings
|
||||
cmd = self._run()
|
||||
assert cmd._warnings == 1
|
||||
|
||||
# now let's add the required fields
|
||||
# and run it again, to make sure we don't get
|
||||
# any warning anymore
|
||||
metadata = {
|
||||
'url': 'xxx',
|
||||
'author': 'xxx',
|
||||
'author_email': 'xxx',
|
||||
'name': 'xxx',
|
||||
'version': 'xxx',
|
||||
}
|
||||
cmd = self._run(metadata)
|
||||
assert cmd._warnings == 0
|
||||
|
||||
# now with the strict mode, we should
|
||||
# get an error if there are missing metadata
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
self._run({}, **{'strict': 1})
|
||||
|
||||
# and of course, no error when all metadata are present
|
||||
cmd = self._run(metadata, strict=True)
|
||||
assert cmd._warnings == 0
|
||||
|
||||
# now a test with non-ASCII characters
|
||||
metadata = {
|
||||
'url': 'xxx',
|
||||
'author': '\u00c9ric',
|
||||
'author_email': 'xxx',
|
||||
'name': 'xxx',
|
||||
'version': 'xxx',
|
||||
'description': 'Something about esszet \u00df',
|
||||
'long_description': 'More things about esszet \u00df',
|
||||
}
|
||||
cmd = self._run(metadata)
|
||||
assert cmd._warnings == 0
|
||||
|
||||
def test_check_author_maintainer(self):
|
||||
for kind in ("author", "maintainer"):
|
||||
# ensure no warning when author_email or maintainer_email is given
|
||||
# (the spec allows these fields to take the form "Name <email>")
|
||||
metadata = {
|
||||
'url': 'xxx',
|
||||
kind + '_email': 'Name <name@email.com>',
|
||||
'name': 'xxx',
|
||||
'version': 'xxx',
|
||||
}
|
||||
cmd = self._run(metadata)
|
||||
assert cmd._warnings == 0
|
||||
|
||||
# the check should not warn if only email is given
|
||||
metadata[kind + '_email'] = 'name@email.com'
|
||||
cmd = self._run(metadata)
|
||||
assert cmd._warnings == 0
|
||||
|
||||
# the check should not warn if only the name is given
|
||||
metadata[kind] = "Name"
|
||||
del metadata[kind + '_email']
|
||||
cmd = self._run(metadata)
|
||||
assert cmd._warnings == 0
|
||||
|
||||
def test_check_document(self):
|
||||
pytest.importorskip('docutils')
|
||||
pkg_info, dist = self.create_dist()
|
||||
cmd = check(dist)
|
||||
|
||||
# let's see if it detects broken rest
|
||||
broken_rest = 'title\n===\n\ntest'
|
||||
msgs = cmd._check_rst_data(broken_rest)
|
||||
assert len(msgs) == 1
|
||||
|
||||
# and non-broken rest
|
||||
rest = 'title\n=====\n\ntest'
|
||||
msgs = cmd._check_rst_data(rest)
|
||||
assert len(msgs) == 0
|
||||
|
||||
def test_check_restructuredtext(self):
|
||||
pytest.importorskip('docutils')
|
||||
# let's see if it detects broken rest in long_description
|
||||
broken_rest = 'title\n===\n\ntest'
|
||||
pkg_info, dist = self.create_dist(long_description=broken_rest)
|
||||
cmd = check(dist)
|
||||
cmd.check_restructuredtext()
|
||||
assert cmd._warnings == 1
|
||||
|
||||
# let's see if we have an error with strict=True
|
||||
metadata = {
|
||||
'url': 'xxx',
|
||||
'author': 'xxx',
|
||||
'author_email': 'xxx',
|
||||
'name': 'xxx',
|
||||
'version': 'xxx',
|
||||
'long_description': broken_rest,
|
||||
}
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
self._run(metadata, **{'strict': 1, 'restructuredtext': 1})
|
||||
|
||||
# and non-broken rest, including a non-ASCII character to test #12114
|
||||
metadata['long_description'] = 'title\n=====\n\ntest \u00df'
|
||||
cmd = self._run(metadata, strict=True, restructuredtext=True)
|
||||
assert cmd._warnings == 0
|
||||
|
||||
# check that includes work to test #31292
|
||||
metadata['long_description'] = 'title\n=====\n\n.. include:: includetest.rst'
|
||||
cmd = self._run(metadata, cwd=HERE, strict=True, restructuredtext=True)
|
||||
assert cmd._warnings == 0
|
||||
|
||||
def test_check_restructuredtext_with_syntax_highlight(self):
|
||||
pytest.importorskip('docutils')
|
||||
# Don't fail if there is a `code` or `code-block` directive
|
||||
|
||||
example_rst_docs = [
|
||||
textwrap.dedent(
|
||||
"""\
|
||||
Here's some code:
|
||||
|
||||
.. code:: python
|
||||
|
||||
def foo():
|
||||
pass
|
||||
"""
|
||||
),
|
||||
textwrap.dedent(
|
||||
"""\
|
||||
Here's some code:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def foo():
|
||||
pass
|
||||
"""
|
||||
),
|
||||
]
|
||||
|
||||
for rest_with_code in example_rst_docs:
|
||||
pkg_info, dist = self.create_dist(long_description=rest_with_code)
|
||||
cmd = check(dist)
|
||||
cmd.check_restructuredtext()
|
||||
msgs = cmd._check_rst_data(rest_with_code)
|
||||
if pygments is not None:
|
||||
assert len(msgs) == 0
|
||||
else:
|
||||
assert len(msgs) == 1
|
||||
assert (
|
||||
str(msgs[0][1])
|
||||
== 'Cannot analyze code. Pygments package not found.'
|
||||
)
|
||||
|
||||
def test_check_all(self):
|
||||
with pytest.raises(DistutilsSetupError):
|
||||
self._run({}, **{'strict': 1, 'restructuredtext': 1})
|
||||
@@ -0,0 +1,45 @@
|
||||
"""Tests for distutils.command.clean."""
|
||||
|
||||
import os
|
||||
from distutils.command.clean import clean
|
||||
from distutils.tests import support
|
||||
|
||||
|
||||
class TestClean(support.TempdirManager):
|
||||
def test_simple_run(self):
|
||||
pkg_dir, dist = self.create_dist()
|
||||
cmd = clean(dist)
|
||||
|
||||
# let's add some elements clean should remove
|
||||
dirs = [
|
||||
(d, os.path.join(pkg_dir, d))
|
||||
for d in (
|
||||
'build_temp',
|
||||
'build_lib',
|
||||
'bdist_base',
|
||||
'build_scripts',
|
||||
'build_base',
|
||||
)
|
||||
]
|
||||
|
||||
for name, path in dirs:
|
||||
os.mkdir(path)
|
||||
setattr(cmd, name, path)
|
||||
if name == 'build_base':
|
||||
continue
|
||||
for f in ('one', 'two', 'three'):
|
||||
self.write_file(os.path.join(path, f))
|
||||
|
||||
# let's run the command
|
||||
cmd.all = 1
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
# make sure the files where removed
|
||||
for _name, path in dirs:
|
||||
assert not os.path.exists(path), f'{path} was not removed'
|
||||
|
||||
# let's run the command again (should spit warnings but succeed)
|
||||
cmd.all = 1
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
@@ -0,0 +1,107 @@
|
||||
"""Tests for distutils.cmd."""
|
||||
|
||||
import os
|
||||
from distutils import debug
|
||||
from distutils.cmd import Command
|
||||
from distutils.dist import Distribution
|
||||
from distutils.errors import DistutilsOptionError
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class MyCmd(Command):
|
||||
def initialize_options(self):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cmd(request):
|
||||
return MyCmd(Distribution())
|
||||
|
||||
|
||||
class TestCommand:
|
||||
def test_ensure_string_list(self, cmd):
|
||||
cmd.not_string_list = ['one', 2, 'three']
|
||||
cmd.yes_string_list = ['one', 'two', 'three']
|
||||
cmd.not_string_list2 = object()
|
||||
cmd.yes_string_list2 = 'ok'
|
||||
cmd.ensure_string_list('yes_string_list')
|
||||
cmd.ensure_string_list('yes_string_list2')
|
||||
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.ensure_string_list('not_string_list')
|
||||
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.ensure_string_list('not_string_list2')
|
||||
|
||||
cmd.option1 = 'ok,dok'
|
||||
cmd.ensure_string_list('option1')
|
||||
assert cmd.option1 == ['ok', 'dok']
|
||||
|
||||
cmd.option2 = ['xxx', 'www']
|
||||
cmd.ensure_string_list('option2')
|
||||
|
||||
cmd.option3 = ['ok', 2]
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.ensure_string_list('option3')
|
||||
|
||||
def test_make_file(self, cmd):
|
||||
# making sure it raises when infiles is not a string or a list/tuple
|
||||
with pytest.raises(TypeError):
|
||||
cmd.make_file(infiles=True, outfile='', func='func', args=())
|
||||
|
||||
# making sure execute gets called properly
|
||||
def _execute(func, args, exec_msg, level):
|
||||
assert exec_msg == 'generating out from in'
|
||||
|
||||
cmd.force = True
|
||||
cmd.execute = _execute
|
||||
cmd.make_file(infiles='in', outfile='out', func='func', args=())
|
||||
|
||||
def test_dump_options(self, cmd):
|
||||
msgs = []
|
||||
|
||||
def _announce(msg, level):
|
||||
msgs.append(msg)
|
||||
|
||||
cmd.announce = _announce
|
||||
cmd.option1 = 1
|
||||
cmd.option2 = 1
|
||||
cmd.user_options = [('option1', '', ''), ('option2', '', '')]
|
||||
cmd.dump_options()
|
||||
|
||||
wanted = ["command options for 'MyCmd':", ' option1 = 1', ' option2 = 1']
|
||||
assert msgs == wanted
|
||||
|
||||
def test_ensure_string(self, cmd):
|
||||
cmd.option1 = 'ok'
|
||||
cmd.ensure_string('option1')
|
||||
|
||||
cmd.option2 = None
|
||||
cmd.ensure_string('option2', 'xxx')
|
||||
assert hasattr(cmd, 'option2')
|
||||
|
||||
cmd.option3 = 1
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.ensure_string('option3')
|
||||
|
||||
def test_ensure_filename(self, cmd):
|
||||
cmd.option1 = __file__
|
||||
cmd.ensure_filename('option1')
|
||||
cmd.option2 = 'xxx'
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.ensure_filename('option2')
|
||||
|
||||
def test_ensure_dirname(self, cmd):
|
||||
cmd.option1 = os.path.dirname(__file__) or os.curdir
|
||||
cmd.ensure_dirname('option1')
|
||||
cmd.option2 = 'xxx'
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.ensure_dirname('option2')
|
||||
|
||||
def test_debug_print(self, cmd, capsys, monkeypatch):
|
||||
cmd.debug_print('xxx')
|
||||
assert capsys.readouterr().out == ''
|
||||
monkeypatch.setattr(debug, 'DEBUG', True)
|
||||
cmd.debug_print('xxx')
|
||||
assert capsys.readouterr().out == 'xxx\n'
|
||||
@@ -0,0 +1,87 @@
|
||||
"""Tests for distutils.command.config."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from distutils._log import log
|
||||
from distutils.command.config import config, dump_file
|
||||
from distutils.tests import missing_compiler_executable, support
|
||||
|
||||
import more_itertools
|
||||
import path
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def info_log(request, monkeypatch):
|
||||
self = request.instance
|
||||
self._logs = []
|
||||
monkeypatch.setattr(log, 'info', self._info)
|
||||
|
||||
|
||||
@support.combine_markers
|
||||
class TestConfig(support.TempdirManager):
|
||||
def _info(self, msg, *args):
|
||||
for line in msg.splitlines():
|
||||
self._logs.append(line)
|
||||
|
||||
def test_dump_file(self):
|
||||
this_file = path.Path(__file__).with_suffix('.py')
|
||||
with this_file.open(encoding='utf-8') as f:
|
||||
numlines = more_itertools.ilen(f)
|
||||
|
||||
dump_file(this_file, 'I am the header')
|
||||
assert len(self._logs) == numlines + 1
|
||||
|
||||
@pytest.mark.skipif('platform.system() == "Windows"')
|
||||
def test_search_cpp(self):
|
||||
cmd = missing_compiler_executable(['preprocessor'])
|
||||
if cmd is not None:
|
||||
self.skipTest(f'The {cmd!r} command is not found')
|
||||
pkg_dir, dist = self.create_dist()
|
||||
cmd = config(dist)
|
||||
cmd._check_compiler()
|
||||
compiler = cmd.compiler
|
||||
if sys.platform[:3] == "aix" and "xlc" in compiler.preprocessor[0].lower():
|
||||
self.skipTest(
|
||||
'xlc: The -E option overrides the -P, -o, and -qsyntaxonly options'
|
||||
)
|
||||
|
||||
# simple pattern searches
|
||||
match = cmd.search_cpp(pattern='xxx', body='/* xxx */')
|
||||
assert match == 0
|
||||
|
||||
match = cmd.search_cpp(pattern='_configtest', body='/* xxx */')
|
||||
assert match == 1
|
||||
|
||||
def test_finalize_options(self):
|
||||
# finalize_options does a bit of transformation
|
||||
# on options
|
||||
pkg_dir, dist = self.create_dist()
|
||||
cmd = config(dist)
|
||||
cmd.include_dirs = f'one{os.pathsep}two'
|
||||
cmd.libraries = 'one'
|
||||
cmd.library_dirs = f'three{os.pathsep}four'
|
||||
cmd.ensure_finalized()
|
||||
|
||||
assert cmd.include_dirs == ['one', 'two']
|
||||
assert cmd.libraries == ['one']
|
||||
assert cmd.library_dirs == ['three', 'four']
|
||||
|
||||
def test_clean(self):
|
||||
# _clean removes files
|
||||
tmp_dir = self.mkdtemp()
|
||||
f1 = os.path.join(tmp_dir, 'one')
|
||||
f2 = os.path.join(tmp_dir, 'two')
|
||||
|
||||
self.write_file(f1, 'xxx')
|
||||
self.write_file(f2, 'xxx')
|
||||
|
||||
for f in (f1, f2):
|
||||
assert os.path.exists(f)
|
||||
|
||||
pkg_dir, dist = self.create_dist()
|
||||
cmd = config(dist)
|
||||
cmd._clean(f1, f2)
|
||||
|
||||
for f in (f1, f2):
|
||||
assert not os.path.exists(f)
|
||||
@@ -0,0 +1,130 @@
|
||||
"""Tests for distutils.core."""
|
||||
|
||||
import distutils.core
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from distutils.dist import Distribution
|
||||
|
||||
import pytest
|
||||
|
||||
# setup script that uses __file__
|
||||
setup_using___file__ = """\
|
||||
|
||||
__file__
|
||||
|
||||
from distutils.core import setup
|
||||
setup()
|
||||
"""
|
||||
|
||||
setup_prints_cwd = """\
|
||||
|
||||
import os
|
||||
print(os.getcwd())
|
||||
|
||||
from distutils.core import setup
|
||||
setup()
|
||||
"""
|
||||
|
||||
setup_does_nothing = """\
|
||||
from distutils.core import setup
|
||||
setup()
|
||||
"""
|
||||
|
||||
|
||||
setup_defines_subclass = """\
|
||||
from distutils.core import setup
|
||||
from distutils.command.install import install as _install
|
||||
|
||||
class install(_install):
|
||||
sub_commands = _install.sub_commands + ['cmd']
|
||||
|
||||
setup(cmdclass={'install': install})
|
||||
"""
|
||||
|
||||
setup_within_if_main = """\
|
||||
from distutils.core import setup
|
||||
|
||||
def main():
|
||||
return setup(name="setup_within_if_main")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def save_stdout(monkeypatch):
|
||||
monkeypatch.setattr(sys, 'stdout', sys.stdout)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_file(tmp_path):
|
||||
return tmp_path / 'file'
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
@pytest.mark.usefixtures('save_argv')
|
||||
class TestCore:
|
||||
def test_run_setup_provides_file(self, temp_file):
|
||||
# Make sure the script can use __file__; if that's missing, the test
|
||||
# setup.py script will raise NameError.
|
||||
temp_file.write_text(setup_using___file__, encoding='utf-8')
|
||||
distutils.core.run_setup(temp_file)
|
||||
|
||||
def test_run_setup_preserves_sys_argv(self, temp_file):
|
||||
# Make sure run_setup does not clobber sys.argv
|
||||
argv_copy = sys.argv.copy()
|
||||
temp_file.write_text(setup_does_nothing, encoding='utf-8')
|
||||
distutils.core.run_setup(temp_file)
|
||||
assert sys.argv == argv_copy
|
||||
|
||||
def test_run_setup_defines_subclass(self, temp_file):
|
||||
# Make sure the script can use __file__; if that's missing, the test
|
||||
# setup.py script will raise NameError.
|
||||
temp_file.write_text(setup_defines_subclass, encoding='utf-8')
|
||||
dist = distutils.core.run_setup(temp_file)
|
||||
install = dist.get_command_obj('install')
|
||||
assert 'cmd' in install.sub_commands
|
||||
|
||||
def test_run_setup_uses_current_dir(self, tmp_path):
|
||||
"""
|
||||
Test that the setup script is run with the current directory
|
||||
as its own current directory.
|
||||
"""
|
||||
sys.stdout = io.StringIO()
|
||||
cwd = os.getcwd()
|
||||
|
||||
# Create a directory and write the setup.py file there:
|
||||
setup_py = tmp_path / 'setup.py'
|
||||
setup_py.write_text(setup_prints_cwd, encoding='utf-8')
|
||||
distutils.core.run_setup(setup_py)
|
||||
|
||||
output = sys.stdout.getvalue()
|
||||
if output.endswith("\n"):
|
||||
output = output[:-1]
|
||||
assert cwd == output
|
||||
|
||||
def test_run_setup_within_if_main(self, temp_file):
|
||||
temp_file.write_text(setup_within_if_main, encoding='utf-8')
|
||||
dist = distutils.core.run_setup(temp_file, stop_after="config")
|
||||
assert isinstance(dist, Distribution)
|
||||
assert dist.get_name() == "setup_within_if_main"
|
||||
|
||||
def test_run_commands(self, temp_file):
|
||||
sys.argv = ['setup.py', 'build']
|
||||
temp_file.write_text(setup_within_if_main, encoding='utf-8')
|
||||
dist = distutils.core.run_setup(temp_file, stop_after="commandline")
|
||||
assert 'build' not in dist.have_run
|
||||
distutils.core.run_commands(dist)
|
||||
assert 'build' in dist.have_run
|
||||
|
||||
def test_debug_mode(self, capsys, monkeypatch):
|
||||
# this covers the code called when DEBUG is set
|
||||
sys.argv = ['setup.py', '--name']
|
||||
distutils.core.setup(name='bar')
|
||||
assert capsys.readouterr().out == 'bar\n'
|
||||
monkeypatch.setattr(distutils.core, 'DEBUG', True)
|
||||
distutils.core.setup(name='bar')
|
||||
wanted = "options (after parsing config files):\n"
|
||||
assert capsys.readouterr().out.startswith(wanted)
|
||||
@@ -0,0 +1,81 @@
|
||||
"""Tests for distutils.cygwinccompiler."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from distutils import sysconfig
|
||||
from distutils.cygwinccompiler import (
|
||||
CONFIG_H_NOTOK,
|
||||
CONFIG_H_OK,
|
||||
CONFIG_H_UNCERTAIN,
|
||||
check_config_h,
|
||||
get_msvcr,
|
||||
)
|
||||
from distutils.tests import support
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def stuff(request, monkeypatch, distutils_managed_tempdir):
|
||||
self = request.instance
|
||||
self.python_h = os.path.join(self.mkdtemp(), 'python.h')
|
||||
monkeypatch.setattr(sysconfig, 'get_config_h_filename', self._get_config_h_filename)
|
||||
monkeypatch.setattr(sys, 'version', sys.version)
|
||||
|
||||
|
||||
class TestCygwinCCompiler(support.TempdirManager):
|
||||
def _get_config_h_filename(self):
|
||||
return self.python_h
|
||||
|
||||
@pytest.mark.skipif('sys.platform != "cygwin"')
|
||||
@pytest.mark.skipif('not os.path.exists("/usr/lib/libbash.dll.a")')
|
||||
def test_find_library_file(self):
|
||||
from distutils.cygwinccompiler import CygwinCCompiler
|
||||
|
||||
compiler = CygwinCCompiler()
|
||||
link_name = "bash"
|
||||
linkable_file = compiler.find_library_file(["/usr/lib"], link_name)
|
||||
assert linkable_file is not None
|
||||
assert os.path.exists(linkable_file)
|
||||
assert linkable_file == f"/usr/lib/lib{link_name:s}.dll.a"
|
||||
|
||||
@pytest.mark.skipif('sys.platform != "cygwin"')
|
||||
def test_runtime_library_dir_option(self):
|
||||
from distutils.cygwinccompiler import CygwinCCompiler
|
||||
|
||||
compiler = CygwinCCompiler()
|
||||
assert compiler.runtime_library_dir_option('/foo') == []
|
||||
|
||||
def test_check_config_h(self):
|
||||
# check_config_h looks for "GCC" in sys.version first
|
||||
# returns CONFIG_H_OK if found
|
||||
sys.version = (
|
||||
'2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC '
|
||||
'4.0.1 (Apple Computer, Inc. build 5370)]'
|
||||
)
|
||||
|
||||
assert check_config_h()[0] == CONFIG_H_OK
|
||||
|
||||
# then it tries to see if it can find "__GNUC__" in pyconfig.h
|
||||
sys.version = 'something without the *CC word'
|
||||
|
||||
# if the file doesn't exist it returns CONFIG_H_UNCERTAIN
|
||||
assert check_config_h()[0] == CONFIG_H_UNCERTAIN
|
||||
|
||||
# if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK
|
||||
self.write_file(self.python_h, 'xxx')
|
||||
assert check_config_h()[0] == CONFIG_H_NOTOK
|
||||
|
||||
# and CONFIG_H_OK if __GNUC__ is found
|
||||
self.write_file(self.python_h, 'xxx __GNUC__ xxx')
|
||||
assert check_config_h()[0] == CONFIG_H_OK
|
||||
|
||||
def test_get_msvcr(self):
|
||||
assert get_msvcr() == []
|
||||
|
||||
@pytest.mark.skipif('sys.platform != "cygwin"')
|
||||
def test_dll_libraries_not_none(self):
|
||||
from distutils.cygwinccompiler import CygwinCCompiler
|
||||
|
||||
compiler = CygwinCCompiler()
|
||||
assert compiler.dll_libraries is not None
|
||||
@@ -0,0 +1,134 @@
|
||||
"""Tests for distutils.dir_util."""
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import stat
|
||||
import unittest.mock as mock
|
||||
from distutils import dir_util, errors
|
||||
from distutils.dir_util import (
|
||||
copy_tree,
|
||||
create_tree,
|
||||
ensure_relative,
|
||||
mkpath,
|
||||
remove_tree,
|
||||
)
|
||||
from distutils.tests import support
|
||||
|
||||
import jaraco.path
|
||||
import path
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def stuff(request, monkeypatch, distutils_managed_tempdir):
|
||||
self = request.instance
|
||||
tmp_dir = self.mkdtemp()
|
||||
self.root_target = os.path.join(tmp_dir, 'deep')
|
||||
self.target = os.path.join(self.root_target, 'here')
|
||||
self.target2 = os.path.join(tmp_dir, 'deep2')
|
||||
|
||||
|
||||
class TestDirUtil(support.TempdirManager):
|
||||
def test_mkpath_remove_tree_verbosity(self, caplog):
|
||||
mkpath(self.target, verbose=False)
|
||||
assert not caplog.records
|
||||
remove_tree(self.root_target, verbose=False)
|
||||
|
||||
mkpath(self.target, verbose=True)
|
||||
wanted = [f'creating {self.target}']
|
||||
assert caplog.messages == wanted
|
||||
caplog.clear()
|
||||
|
||||
remove_tree(self.root_target, verbose=True)
|
||||
wanted = [f"removing '{self.root_target}' (and everything under it)"]
|
||||
assert caplog.messages == wanted
|
||||
|
||||
@pytest.mark.skipif("platform.system() == 'Windows'")
|
||||
def test_mkpath_with_custom_mode(self):
|
||||
# Get and set the current umask value for testing mode bits.
|
||||
umask = os.umask(0o002)
|
||||
os.umask(umask)
|
||||
mkpath(self.target, 0o700)
|
||||
assert stat.S_IMODE(os.stat(self.target).st_mode) == 0o700 & ~umask
|
||||
mkpath(self.target2, 0o555)
|
||||
assert stat.S_IMODE(os.stat(self.target2).st_mode) == 0o555 & ~umask
|
||||
|
||||
def test_create_tree_verbosity(self, caplog):
|
||||
create_tree(self.root_target, ['one', 'two', 'three'], verbose=False)
|
||||
assert caplog.messages == []
|
||||
remove_tree(self.root_target, verbose=False)
|
||||
|
||||
wanted = [f'creating {self.root_target}']
|
||||
create_tree(self.root_target, ['one', 'two', 'three'], verbose=True)
|
||||
assert caplog.messages == wanted
|
||||
|
||||
remove_tree(self.root_target, verbose=False)
|
||||
|
||||
def test_copy_tree_verbosity(self, caplog):
|
||||
mkpath(self.target, verbose=False)
|
||||
|
||||
copy_tree(self.target, self.target2, verbose=False)
|
||||
assert caplog.messages == []
|
||||
|
||||
remove_tree(self.root_target, verbose=False)
|
||||
|
||||
mkpath(self.target, verbose=False)
|
||||
a_file = path.Path(self.target) / 'ok.txt'
|
||||
jaraco.path.build({'ok.txt': 'some content'}, self.target)
|
||||
|
||||
wanted = [f'copying {a_file} -> {self.target2}']
|
||||
copy_tree(self.target, self.target2, verbose=True)
|
||||
assert caplog.messages == wanted
|
||||
|
||||
remove_tree(self.root_target, verbose=False)
|
||||
remove_tree(self.target2, verbose=False)
|
||||
|
||||
def test_copy_tree_skips_nfs_temp_files(self):
|
||||
mkpath(self.target, verbose=False)
|
||||
|
||||
jaraco.path.build({'ok.txt': 'some content', '.nfs123abc': ''}, self.target)
|
||||
|
||||
copy_tree(self.target, self.target2)
|
||||
assert os.listdir(self.target2) == ['ok.txt']
|
||||
|
||||
remove_tree(self.root_target, verbose=False)
|
||||
remove_tree(self.target2, verbose=False)
|
||||
|
||||
def test_ensure_relative(self):
|
||||
if os.sep == '/':
|
||||
assert ensure_relative('/home/foo') == 'home/foo'
|
||||
assert ensure_relative('some/path') == 'some/path'
|
||||
else: # \\
|
||||
assert ensure_relative('c:\\home\\foo') == 'c:home\\foo'
|
||||
assert ensure_relative('home\\foo') == 'home\\foo'
|
||||
|
||||
def test_copy_tree_exception_in_listdir(self):
|
||||
"""
|
||||
An exception in listdir should raise a DistutilsFileError
|
||||
"""
|
||||
with mock.patch("os.listdir", side_effect=OSError()), pytest.raises(
|
||||
errors.DistutilsFileError
|
||||
):
|
||||
src = self.tempdirs[-1]
|
||||
dir_util.copy_tree(src, None)
|
||||
|
||||
def test_mkpath_exception_uncached(self, monkeypatch, tmp_path):
|
||||
"""
|
||||
Caching should not remember failed attempts.
|
||||
|
||||
pypa/distutils#304
|
||||
"""
|
||||
|
||||
class FailPath(pathlib.Path):
|
||||
def mkdir(self, *args, **kwargs):
|
||||
raise OSError("Failed to create directory")
|
||||
|
||||
target = tmp_path / 'foodir'
|
||||
|
||||
with pytest.raises(errors.DistutilsFileError):
|
||||
mkpath(FailPath(target))
|
||||
|
||||
assert not target.exists()
|
||||
|
||||
mkpath(target)
|
||||
assert target.exists()
|
||||
@@ -0,0 +1,545 @@
|
||||
"""Tests for distutils.dist."""
|
||||
|
||||
import email
|
||||
import email.generator
|
||||
import email.policy
|
||||
import functools
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
import unittest.mock as mock
|
||||
import warnings
|
||||
from distutils.cmd import Command
|
||||
from distutils.dist import Distribution, fix_help_options
|
||||
from distutils.tests import support
|
||||
|
||||
import jaraco.path
|
||||
import pytest
|
||||
|
||||
pydistutils_cfg = '.' * (os.name == 'posix') + 'pydistutils.cfg'
|
||||
|
||||
|
||||
class test_dist(Command):
|
||||
"""Sample distutils extension command."""
|
||||
|
||||
user_options = [
|
||||
("sample-option=", "S", "help text"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.sample_option = None
|
||||
|
||||
|
||||
class TestDistribution(Distribution):
|
||||
"""Distribution subclasses that avoids the default search for
|
||||
configuration files.
|
||||
|
||||
The ._config_files attribute must be set before
|
||||
.parse_config_files() is called.
|
||||
"""
|
||||
|
||||
def find_config_files(self):
|
||||
return self._config_files
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clear_argv():
|
||||
del sys.argv[1:]
|
||||
|
||||
|
||||
@support.combine_markers
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
@pytest.mark.usefixtures('save_argv')
|
||||
class TestDistributionBehavior(support.TempdirManager):
|
||||
def create_distribution(self, configfiles=()):
|
||||
d = TestDistribution()
|
||||
d._config_files = configfiles
|
||||
d.parse_config_files()
|
||||
d.parse_command_line()
|
||||
return d
|
||||
|
||||
def test_command_packages_unspecified(self, clear_argv):
|
||||
sys.argv.append("build")
|
||||
d = self.create_distribution()
|
||||
assert d.get_command_packages() == ["distutils.command"]
|
||||
|
||||
def test_command_packages_cmdline(self, clear_argv):
|
||||
from distutils.tests.test_dist import test_dist
|
||||
|
||||
sys.argv.extend([
|
||||
"--command-packages",
|
||||
"foo.bar,distutils.tests",
|
||||
"test_dist",
|
||||
"-Ssometext",
|
||||
])
|
||||
d = self.create_distribution()
|
||||
# let's actually try to load our test command:
|
||||
assert d.get_command_packages() == [
|
||||
"distutils.command",
|
||||
"foo.bar",
|
||||
"distutils.tests",
|
||||
]
|
||||
cmd = d.get_command_obj("test_dist")
|
||||
assert isinstance(cmd, test_dist)
|
||||
assert cmd.sample_option == "sometext"
|
||||
|
||||
@pytest.mark.skipif(
|
||||
'distutils' not in Distribution.parse_config_files.__module__,
|
||||
reason='Cannot test when virtualenv has monkey-patched Distribution',
|
||||
)
|
||||
def test_venv_install_options(self, tmp_path, clear_argv):
|
||||
sys.argv.append("install")
|
||||
file = str(tmp_path / 'file')
|
||||
|
||||
fakepath = '/somedir'
|
||||
|
||||
jaraco.path.build({
|
||||
file: f"""
|
||||
[install]
|
||||
install-base = {fakepath}
|
||||
install-platbase = {fakepath}
|
||||
install-lib = {fakepath}
|
||||
install-platlib = {fakepath}
|
||||
install-purelib = {fakepath}
|
||||
install-headers = {fakepath}
|
||||
install-scripts = {fakepath}
|
||||
install-data = {fakepath}
|
||||
prefix = {fakepath}
|
||||
exec-prefix = {fakepath}
|
||||
home = {fakepath}
|
||||
user = {fakepath}
|
||||
root = {fakepath}
|
||||
""",
|
||||
})
|
||||
|
||||
# Base case: Not in a Virtual Environment
|
||||
with mock.patch.multiple(sys, prefix='/a', base_prefix='/a'):
|
||||
d = self.create_distribution([file])
|
||||
|
||||
option_tuple = (file, fakepath)
|
||||
|
||||
result_dict = {
|
||||
'install_base': option_tuple,
|
||||
'install_platbase': option_tuple,
|
||||
'install_lib': option_tuple,
|
||||
'install_platlib': option_tuple,
|
||||
'install_purelib': option_tuple,
|
||||
'install_headers': option_tuple,
|
||||
'install_scripts': option_tuple,
|
||||
'install_data': option_tuple,
|
||||
'prefix': option_tuple,
|
||||
'exec_prefix': option_tuple,
|
||||
'home': option_tuple,
|
||||
'user': option_tuple,
|
||||
'root': option_tuple,
|
||||
}
|
||||
|
||||
assert sorted(d.command_options.get('install').keys()) == sorted(
|
||||
result_dict.keys()
|
||||
)
|
||||
|
||||
for key, value in d.command_options.get('install').items():
|
||||
assert value == result_dict[key]
|
||||
|
||||
# Test case: In a Virtual Environment
|
||||
with mock.patch.multiple(sys, prefix='/a', base_prefix='/b'):
|
||||
d = self.create_distribution([file])
|
||||
|
||||
for key in result_dict.keys():
|
||||
assert key not in d.command_options.get('install', {})
|
||||
|
||||
def test_command_packages_configfile(self, tmp_path, clear_argv):
|
||||
sys.argv.append("build")
|
||||
file = str(tmp_path / "file")
|
||||
jaraco.path.build({
|
||||
file: """
|
||||
[global]
|
||||
command_packages = foo.bar, splat
|
||||
""",
|
||||
})
|
||||
|
||||
d = self.create_distribution([file])
|
||||
assert d.get_command_packages() == ["distutils.command", "foo.bar", "splat"]
|
||||
|
||||
# ensure command line overrides config:
|
||||
sys.argv[1:] = ["--command-packages", "spork", "build"]
|
||||
d = self.create_distribution([file])
|
||||
assert d.get_command_packages() == ["distutils.command", "spork"]
|
||||
|
||||
# Setting --command-packages to '' should cause the default to
|
||||
# be used even if a config file specified something else:
|
||||
sys.argv[1:] = ["--command-packages", "", "build"]
|
||||
d = self.create_distribution([file])
|
||||
assert d.get_command_packages() == ["distutils.command"]
|
||||
|
||||
def test_empty_options(self, request):
|
||||
# an empty options dictionary should not stay in the
|
||||
# list of attributes
|
||||
|
||||
# catching warnings
|
||||
warns = []
|
||||
|
||||
def _warn(msg):
|
||||
warns.append(msg)
|
||||
|
||||
request.addfinalizer(
|
||||
functools.partial(setattr, warnings, 'warn', warnings.warn)
|
||||
)
|
||||
warnings.warn = _warn
|
||||
dist = Distribution(
|
||||
attrs={
|
||||
'author': 'xxx',
|
||||
'name': 'xxx',
|
||||
'version': 'xxx',
|
||||
'url': 'xxxx',
|
||||
'options': {},
|
||||
}
|
||||
)
|
||||
|
||||
assert len(warns) == 0
|
||||
assert 'options' not in dir(dist)
|
||||
|
||||
def test_finalize_options(self):
|
||||
attrs = {'keywords': 'one,two', 'platforms': 'one,two'}
|
||||
|
||||
dist = Distribution(attrs=attrs)
|
||||
dist.finalize_options()
|
||||
|
||||
# finalize_option splits platforms and keywords
|
||||
assert dist.metadata.platforms == ['one', 'two']
|
||||
assert dist.metadata.keywords == ['one', 'two']
|
||||
|
||||
attrs = {'keywords': 'foo bar', 'platforms': 'foo bar'}
|
||||
dist = Distribution(attrs=attrs)
|
||||
dist.finalize_options()
|
||||
assert dist.metadata.platforms == ['foo bar']
|
||||
assert dist.metadata.keywords == ['foo bar']
|
||||
|
||||
def test_get_command_packages(self):
|
||||
dist = Distribution()
|
||||
assert dist.command_packages is None
|
||||
cmds = dist.get_command_packages()
|
||||
assert cmds == ['distutils.command']
|
||||
assert dist.command_packages == ['distutils.command']
|
||||
|
||||
dist.command_packages = 'one,two'
|
||||
cmds = dist.get_command_packages()
|
||||
assert cmds == ['distutils.command', 'one', 'two']
|
||||
|
||||
def test_announce(self):
|
||||
# make sure the level is known
|
||||
dist = Distribution()
|
||||
with pytest.raises(TypeError):
|
||||
dist.announce('ok', level='ok2')
|
||||
|
||||
def test_find_config_files_disable(self, temp_home):
|
||||
# Ticket #1180: Allow user to disable their home config file.
|
||||
jaraco.path.build({pydistutils_cfg: '[distutils]\n'}, temp_home)
|
||||
|
||||
d = Distribution()
|
||||
all_files = d.find_config_files()
|
||||
|
||||
d = Distribution(attrs={'script_args': ['--no-user-cfg']})
|
||||
files = d.find_config_files()
|
||||
|
||||
# make sure --no-user-cfg disables the user cfg file
|
||||
assert len(all_files) - 1 == len(files)
|
||||
|
||||
@pytest.mark.skipif(
|
||||
'platform.system() == "Windows"',
|
||||
reason='Windows does not honor chmod 000',
|
||||
)
|
||||
def test_find_config_files_permission_error(self, fake_home):
|
||||
"""
|
||||
Finding config files should not fail when directory is inaccessible.
|
||||
"""
|
||||
fake_home.joinpath(pydistutils_cfg).write_text('', encoding='utf-8')
|
||||
fake_home.chmod(0o000)
|
||||
Distribution().find_config_files()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
@pytest.mark.usefixtures('save_argv')
|
||||
class TestMetadata(support.TempdirManager):
|
||||
def format_metadata(self, dist):
|
||||
sio = io.StringIO()
|
||||
dist.metadata.write_pkg_file(sio)
|
||||
return sio.getvalue()
|
||||
|
||||
def test_simple_metadata(self):
|
||||
attrs = {"name": "package", "version": "1.0"}
|
||||
dist = Distribution(attrs)
|
||||
meta = self.format_metadata(dist)
|
||||
assert "Metadata-Version: 1.0" in meta
|
||||
assert "provides:" not in meta.lower()
|
||||
assert "requires:" not in meta.lower()
|
||||
assert "obsoletes:" not in meta.lower()
|
||||
|
||||
def test_provides(self):
|
||||
attrs = {
|
||||
"name": "package",
|
||||
"version": "1.0",
|
||||
"provides": ["package", "package.sub"],
|
||||
}
|
||||
dist = Distribution(attrs)
|
||||
assert dist.metadata.get_provides() == ["package", "package.sub"]
|
||||
assert dist.get_provides() == ["package", "package.sub"]
|
||||
meta = self.format_metadata(dist)
|
||||
assert "Metadata-Version: 1.1" in meta
|
||||
assert "requires:" not in meta.lower()
|
||||
assert "obsoletes:" not in meta.lower()
|
||||
|
||||
def test_provides_illegal(self):
|
||||
with pytest.raises(ValueError):
|
||||
Distribution(
|
||||
{"name": "package", "version": "1.0", "provides": ["my.pkg (splat)"]},
|
||||
)
|
||||
|
||||
def test_requires(self):
|
||||
attrs = {
|
||||
"name": "package",
|
||||
"version": "1.0",
|
||||
"requires": ["other", "another (==1.0)"],
|
||||
}
|
||||
dist = Distribution(attrs)
|
||||
assert dist.metadata.get_requires() == ["other", "another (==1.0)"]
|
||||
assert dist.get_requires() == ["other", "another (==1.0)"]
|
||||
meta = self.format_metadata(dist)
|
||||
assert "Metadata-Version: 1.1" in meta
|
||||
assert "provides:" not in meta.lower()
|
||||
assert "Requires: other" in meta
|
||||
assert "Requires: another (==1.0)" in meta
|
||||
assert "obsoletes:" not in meta.lower()
|
||||
|
||||
def test_requires_illegal(self):
|
||||
with pytest.raises(ValueError):
|
||||
Distribution(
|
||||
{"name": "package", "version": "1.0", "requires": ["my.pkg (splat)"]},
|
||||
)
|
||||
|
||||
def test_requires_to_list(self):
|
||||
attrs = {"name": "package", "requires": iter(["other"])}
|
||||
dist = Distribution(attrs)
|
||||
assert isinstance(dist.metadata.requires, list)
|
||||
|
||||
def test_obsoletes(self):
|
||||
attrs = {
|
||||
"name": "package",
|
||||
"version": "1.0",
|
||||
"obsoletes": ["other", "another (<1.0)"],
|
||||
}
|
||||
dist = Distribution(attrs)
|
||||
assert dist.metadata.get_obsoletes() == ["other", "another (<1.0)"]
|
||||
assert dist.get_obsoletes() == ["other", "another (<1.0)"]
|
||||
meta = self.format_metadata(dist)
|
||||
assert "Metadata-Version: 1.1" in meta
|
||||
assert "provides:" not in meta.lower()
|
||||
assert "requires:" not in meta.lower()
|
||||
assert "Obsoletes: other" in meta
|
||||
assert "Obsoletes: another (<1.0)" in meta
|
||||
|
||||
def test_obsoletes_illegal(self):
|
||||
with pytest.raises(ValueError):
|
||||
Distribution(
|
||||
{"name": "package", "version": "1.0", "obsoletes": ["my.pkg (splat)"]},
|
||||
)
|
||||
|
||||
def test_obsoletes_to_list(self):
|
||||
attrs = {"name": "package", "obsoletes": iter(["other"])}
|
||||
dist = Distribution(attrs)
|
||||
assert isinstance(dist.metadata.obsoletes, list)
|
||||
|
||||
def test_classifier(self):
|
||||
attrs = {
|
||||
'name': 'Boa',
|
||||
'version': '3.0',
|
||||
'classifiers': ['Programming Language :: Python :: 3'],
|
||||
}
|
||||
dist = Distribution(attrs)
|
||||
assert dist.get_classifiers() == ['Programming Language :: Python :: 3']
|
||||
meta = self.format_metadata(dist)
|
||||
assert 'Metadata-Version: 1.1' in meta
|
||||
|
||||
def test_classifier_invalid_type(self, caplog):
|
||||
attrs = {
|
||||
'name': 'Boa',
|
||||
'version': '3.0',
|
||||
'classifiers': ('Programming Language :: Python :: 3',),
|
||||
}
|
||||
d = Distribution(attrs)
|
||||
# should have warning about passing a non-list
|
||||
assert 'should be a list' in caplog.messages[0]
|
||||
# should be converted to a list
|
||||
assert isinstance(d.metadata.classifiers, list)
|
||||
assert d.metadata.classifiers == list(attrs['classifiers'])
|
||||
|
||||
def test_keywords(self):
|
||||
attrs = {
|
||||
'name': 'Monty',
|
||||
'version': '1.0',
|
||||
'keywords': ['spam', 'eggs', 'life of brian'],
|
||||
}
|
||||
dist = Distribution(attrs)
|
||||
assert dist.get_keywords() == ['spam', 'eggs', 'life of brian']
|
||||
|
||||
def test_keywords_invalid_type(self, caplog):
|
||||
attrs = {
|
||||
'name': 'Monty',
|
||||
'version': '1.0',
|
||||
'keywords': ('spam', 'eggs', 'life of brian'),
|
||||
}
|
||||
d = Distribution(attrs)
|
||||
# should have warning about passing a non-list
|
||||
assert 'should be a list' in caplog.messages[0]
|
||||
# should be converted to a list
|
||||
assert isinstance(d.metadata.keywords, list)
|
||||
assert d.metadata.keywords == list(attrs['keywords'])
|
||||
|
||||
def test_platforms(self):
|
||||
attrs = {
|
||||
'name': 'Monty',
|
||||
'version': '1.0',
|
||||
'platforms': ['GNU/Linux', 'Some Evil Platform'],
|
||||
}
|
||||
dist = Distribution(attrs)
|
||||
assert dist.get_platforms() == ['GNU/Linux', 'Some Evil Platform']
|
||||
|
||||
def test_platforms_invalid_types(self, caplog):
|
||||
attrs = {
|
||||
'name': 'Monty',
|
||||
'version': '1.0',
|
||||
'platforms': ('GNU/Linux', 'Some Evil Platform'),
|
||||
}
|
||||
d = Distribution(attrs)
|
||||
# should have warning about passing a non-list
|
||||
assert 'should be a list' in caplog.messages[0]
|
||||
# should be converted to a list
|
||||
assert isinstance(d.metadata.platforms, list)
|
||||
assert d.metadata.platforms == list(attrs['platforms'])
|
||||
|
||||
def test_download_url(self):
|
||||
attrs = {
|
||||
'name': 'Boa',
|
||||
'version': '3.0',
|
||||
'download_url': 'http://example.org/boa',
|
||||
}
|
||||
dist = Distribution(attrs)
|
||||
meta = self.format_metadata(dist)
|
||||
assert 'Metadata-Version: 1.1' in meta
|
||||
|
||||
def test_long_description(self):
|
||||
long_desc = textwrap.dedent(
|
||||
"""\
|
||||
example::
|
||||
We start here
|
||||
and continue here
|
||||
and end here."""
|
||||
)
|
||||
attrs = {"name": "package", "version": "1.0", "long_description": long_desc}
|
||||
|
||||
dist = Distribution(attrs)
|
||||
meta = self.format_metadata(dist)
|
||||
meta = meta.replace('\n' + 8 * ' ', '\n')
|
||||
assert long_desc in meta
|
||||
|
||||
def test_custom_pydistutils(self, temp_home):
|
||||
"""
|
||||
pydistutils.cfg is found
|
||||
"""
|
||||
jaraco.path.build({pydistutils_cfg: ''}, temp_home)
|
||||
config_path = temp_home / pydistutils_cfg
|
||||
|
||||
assert str(config_path) in Distribution().find_config_files()
|
||||
|
||||
def test_extra_pydistutils(self, monkeypatch, tmp_path):
|
||||
jaraco.path.build({'overrides.cfg': ''}, tmp_path)
|
||||
filename = tmp_path / 'overrides.cfg'
|
||||
monkeypatch.setenv('DIST_EXTRA_CONFIG', str(filename))
|
||||
assert str(filename) in Distribution().find_config_files()
|
||||
|
||||
def test_fix_help_options(self):
|
||||
help_tuples = [('a', 'b', 'c', 'd'), (1, 2, 3, 4)]
|
||||
fancy_options = fix_help_options(help_tuples)
|
||||
assert fancy_options[0] == ('a', 'b', 'c')
|
||||
assert fancy_options[1] == (1, 2, 3)
|
||||
|
||||
def test_show_help(self, request, capsys):
|
||||
# smoke test, just makes sure some help is displayed
|
||||
dist = Distribution()
|
||||
sys.argv = []
|
||||
dist.help = True
|
||||
dist.script_name = 'setup.py'
|
||||
dist.parse_command_line()
|
||||
|
||||
output = [
|
||||
line for line in capsys.readouterr().out.split('\n') if line.strip() != ''
|
||||
]
|
||||
assert output
|
||||
|
||||
def test_read_metadata(self):
|
||||
attrs = {
|
||||
"name": "package",
|
||||
"version": "1.0",
|
||||
"long_description": "desc",
|
||||
"description": "xxx",
|
||||
"download_url": "http://example.com",
|
||||
"keywords": ['one', 'two'],
|
||||
"requires": ['foo'],
|
||||
}
|
||||
|
||||
dist = Distribution(attrs)
|
||||
metadata = dist.metadata
|
||||
|
||||
# write it then reloads it
|
||||
PKG_INFO = io.StringIO()
|
||||
metadata.write_pkg_file(PKG_INFO)
|
||||
PKG_INFO.seek(0)
|
||||
metadata.read_pkg_file(PKG_INFO)
|
||||
|
||||
assert metadata.name == "package"
|
||||
assert metadata.version == "1.0"
|
||||
assert metadata.description == "xxx"
|
||||
assert metadata.download_url == 'http://example.com'
|
||||
assert metadata.keywords == ['one', 'two']
|
||||
assert metadata.platforms is None
|
||||
assert metadata.obsoletes is None
|
||||
assert metadata.requires == ['foo']
|
||||
|
||||
def test_round_trip_through_email_generator(self):
|
||||
"""
|
||||
In pypa/setuptools#4033, it was shown that once PKG-INFO is
|
||||
re-generated using ``email.generator.Generator``, some control
|
||||
characters might cause problems.
|
||||
"""
|
||||
# Given a PKG-INFO file ...
|
||||
attrs = {
|
||||
"name": "package",
|
||||
"version": "1.0",
|
||||
"long_description": "hello\x0b\nworld\n",
|
||||
}
|
||||
dist = Distribution(attrs)
|
||||
metadata = dist.metadata
|
||||
|
||||
with io.StringIO() as buffer:
|
||||
metadata.write_pkg_file(buffer)
|
||||
msg = buffer.getvalue()
|
||||
|
||||
# ... when it is read and re-written using stdlib's email library,
|
||||
orig = email.message_from_string(msg)
|
||||
policy = email.policy.EmailPolicy(
|
||||
utf8=True,
|
||||
mangle_from_=False,
|
||||
max_line_length=0,
|
||||
)
|
||||
with io.StringIO() as buffer:
|
||||
email.generator.Generator(buffer, policy=policy).flatten(orig)
|
||||
|
||||
buffer.seek(0)
|
||||
regen = email.message_from_file(buffer)
|
||||
|
||||
# ... then it should be the same as the original
|
||||
# (except for the specific line break characters)
|
||||
orig_desc = set(orig["Description"].splitlines())
|
||||
regen_desc = set(regen["Description"].splitlines())
|
||||
assert regen_desc == orig_desc
|
||||
@@ -0,0 +1,108 @@
|
||||
"""Tests for distutils.extension."""
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import warnings
|
||||
from distutils.extension import Extension, read_setup_file
|
||||
|
||||
import pytest
|
||||
|
||||
from .compat.py38 import check_warnings
|
||||
|
||||
|
||||
class TestExtension:
|
||||
def test_read_setup_file(self):
|
||||
# trying to read a Setup file
|
||||
# (sample extracted from the PyGame project)
|
||||
setup = os.path.join(os.path.dirname(__file__), 'Setup.sample')
|
||||
|
||||
exts = read_setup_file(setup)
|
||||
names = [ext.name for ext in exts]
|
||||
names.sort()
|
||||
|
||||
# here are the extensions read_setup_file should have created
|
||||
# out of the file
|
||||
wanted = [
|
||||
'_arraysurfarray',
|
||||
'_camera',
|
||||
'_numericsndarray',
|
||||
'_numericsurfarray',
|
||||
'base',
|
||||
'bufferproxy',
|
||||
'cdrom',
|
||||
'color',
|
||||
'constants',
|
||||
'display',
|
||||
'draw',
|
||||
'event',
|
||||
'fastevent',
|
||||
'font',
|
||||
'gfxdraw',
|
||||
'image',
|
||||
'imageext',
|
||||
'joystick',
|
||||
'key',
|
||||
'mask',
|
||||
'mixer',
|
||||
'mixer_music',
|
||||
'mouse',
|
||||
'movie',
|
||||
'overlay',
|
||||
'pixelarray',
|
||||
'pypm',
|
||||
'rect',
|
||||
'rwobject',
|
||||
'scrap',
|
||||
'surface',
|
||||
'surflock',
|
||||
'time',
|
||||
'transform',
|
||||
]
|
||||
|
||||
assert names == wanted
|
||||
|
||||
def test_extension_init(self):
|
||||
# the first argument, which is the name, must be a string
|
||||
with pytest.raises(AssertionError):
|
||||
Extension(1, [])
|
||||
ext = Extension('name', [])
|
||||
assert ext.name == 'name'
|
||||
|
||||
# the second argument, which is the list of files, must
|
||||
# be a list of strings or PathLike objects
|
||||
with pytest.raises(AssertionError):
|
||||
Extension('name', 'file')
|
||||
with pytest.raises(AssertionError):
|
||||
Extension('name', ['file', 1])
|
||||
ext = Extension('name', ['file1', 'file2'])
|
||||
assert ext.sources == ['file1', 'file2']
|
||||
ext = Extension('name', [pathlib.Path('file1'), pathlib.Path('file2')])
|
||||
assert ext.sources == ['file1', 'file2']
|
||||
|
||||
# others arguments have defaults
|
||||
for attr in (
|
||||
'include_dirs',
|
||||
'define_macros',
|
||||
'undef_macros',
|
||||
'library_dirs',
|
||||
'libraries',
|
||||
'runtime_library_dirs',
|
||||
'extra_objects',
|
||||
'extra_compile_args',
|
||||
'extra_link_args',
|
||||
'export_symbols',
|
||||
'swig_opts',
|
||||
'depends',
|
||||
):
|
||||
assert getattr(ext, attr) == []
|
||||
|
||||
assert ext.language is None
|
||||
assert ext.optional is None
|
||||
|
||||
# if there are unknown keyword options, warn about them
|
||||
with check_warnings() as w:
|
||||
warnings.simplefilter('always')
|
||||
ext = Extension('name', ['file1', 'file2'], chic=True)
|
||||
|
||||
assert len(w.warnings) == 1
|
||||
assert str(w.warnings[0].message) == "Unknown Extension options: 'chic'"
|
||||
@@ -0,0 +1,94 @@
|
||||
"""Tests for distutils.file_util."""
|
||||
|
||||
import errno
|
||||
import os
|
||||
import unittest.mock as mock
|
||||
from distutils.errors import DistutilsFileError
|
||||
from distutils.file_util import copy_file, move_file
|
||||
|
||||
import jaraco.path
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def stuff(request, tmp_path):
|
||||
self = request.instance
|
||||
self.source = tmp_path / 'f1'
|
||||
self.target = tmp_path / 'f2'
|
||||
self.target_dir = tmp_path / 'd1'
|
||||
|
||||
|
||||
class TestFileUtil:
|
||||
def test_move_file_verbosity(self, caplog):
|
||||
jaraco.path.build({self.source: 'some content'})
|
||||
|
||||
move_file(self.source, self.target, verbose=False)
|
||||
assert not caplog.messages
|
||||
|
||||
# back to original state
|
||||
move_file(self.target, self.source, verbose=False)
|
||||
|
||||
move_file(self.source, self.target, verbose=True)
|
||||
wanted = [f'moving {self.source} -> {self.target}']
|
||||
assert caplog.messages == wanted
|
||||
|
||||
# back to original state
|
||||
move_file(self.target, self.source, verbose=False)
|
||||
|
||||
caplog.clear()
|
||||
# now the target is a dir
|
||||
os.mkdir(self.target_dir)
|
||||
move_file(self.source, self.target_dir, verbose=True)
|
||||
wanted = [f'moving {self.source} -> {self.target_dir}']
|
||||
assert caplog.messages == wanted
|
||||
|
||||
def test_move_file_exception_unpacking_rename(self):
|
||||
# see issue 22182
|
||||
with mock.patch("os.rename", side_effect=OSError("wrong", 1)), pytest.raises(
|
||||
DistutilsFileError
|
||||
):
|
||||
jaraco.path.build({self.source: 'spam eggs'})
|
||||
move_file(self.source, self.target, verbose=False)
|
||||
|
||||
def test_move_file_exception_unpacking_unlink(self):
|
||||
# see issue 22182
|
||||
with mock.patch(
|
||||
"os.rename", side_effect=OSError(errno.EXDEV, "wrong")
|
||||
), mock.patch("os.unlink", side_effect=OSError("wrong", 1)), pytest.raises(
|
||||
DistutilsFileError
|
||||
):
|
||||
jaraco.path.build({self.source: 'spam eggs'})
|
||||
move_file(self.source, self.target, verbose=False)
|
||||
|
||||
def test_copy_file_hard_link(self):
|
||||
jaraco.path.build({self.source: 'some content'})
|
||||
# Check first that copy_file() will not fall back on copying the file
|
||||
# instead of creating the hard link.
|
||||
try:
|
||||
os.link(self.source, self.target)
|
||||
except OSError as e:
|
||||
self.skipTest(f'os.link: {e}')
|
||||
else:
|
||||
self.target.unlink()
|
||||
st = os.stat(self.source)
|
||||
copy_file(self.source, self.target, link='hard')
|
||||
st2 = os.stat(self.source)
|
||||
st3 = os.stat(self.target)
|
||||
assert os.path.samestat(st, st2), (st, st2)
|
||||
assert os.path.samestat(st2, st3), (st2, st3)
|
||||
assert self.source.read_text(encoding='utf-8') == 'some content'
|
||||
|
||||
def test_copy_file_hard_link_failure(self):
|
||||
# If hard linking fails, copy_file() falls back on copying file
|
||||
# (some special filesystems don't support hard linking even under
|
||||
# Unix, see issue #8876).
|
||||
jaraco.path.build({self.source: 'some content'})
|
||||
st = os.stat(self.source)
|
||||
with mock.patch("os.link", side_effect=OSError(0, "linking unsupported")):
|
||||
copy_file(self.source, self.target, link='hard')
|
||||
st2 = os.stat(self.source)
|
||||
st3 = os.stat(self.target)
|
||||
assert os.path.samestat(st, st2), (st, st2)
|
||||
assert not os.path.samestat(st2, st3), (st2, st3)
|
||||
for fn in (self.source, self.target):
|
||||
assert fn.read_text(encoding='utf-8') == 'some content'
|
||||
@@ -0,0 +1,336 @@
|
||||
"""Tests for distutils.filelist."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from distutils import debug, filelist
|
||||
from distutils.errors import DistutilsTemplateError
|
||||
from distutils.filelist import FileList, glob_to_re, translate_pattern
|
||||
|
||||
import jaraco.path
|
||||
import pytest
|
||||
|
||||
from .compat import py38 as os_helper
|
||||
|
||||
MANIFEST_IN = """\
|
||||
include ok
|
||||
include xo
|
||||
exclude xo
|
||||
include foo.tmp
|
||||
include buildout.cfg
|
||||
global-include *.x
|
||||
global-include *.txt
|
||||
global-exclude *.tmp
|
||||
recursive-include f *.oo
|
||||
recursive-exclude global *.x
|
||||
graft dir
|
||||
prune dir3
|
||||
"""
|
||||
|
||||
|
||||
def make_local_path(s):
|
||||
"""Converts '/' in a string to os.sep"""
|
||||
return s.replace('/', os.sep)
|
||||
|
||||
|
||||
class TestFileList:
|
||||
def assertNoWarnings(self, caplog):
|
||||
warnings = [rec for rec in caplog.records if rec.levelno == logging.WARNING]
|
||||
assert not warnings
|
||||
caplog.clear()
|
||||
|
||||
def assertWarnings(self, caplog):
|
||||
warnings = [rec for rec in caplog.records if rec.levelno == logging.WARNING]
|
||||
assert warnings
|
||||
caplog.clear()
|
||||
|
||||
def test_glob_to_re(self):
|
||||
sep = os.sep
|
||||
if os.sep == '\\':
|
||||
sep = re.escape(os.sep)
|
||||
|
||||
for glob, regex in (
|
||||
# simple cases
|
||||
('foo*', r'(?s:foo[^%(sep)s]*)\Z'),
|
||||
('foo?', r'(?s:foo[^%(sep)s])\Z'),
|
||||
('foo??', r'(?s:foo[^%(sep)s][^%(sep)s])\Z'),
|
||||
# special cases
|
||||
(r'foo\\*', r'(?s:foo\\\\[^%(sep)s]*)\Z'),
|
||||
(r'foo\\\*', r'(?s:foo\\\\\\[^%(sep)s]*)\Z'),
|
||||
('foo????', r'(?s:foo[^%(sep)s][^%(sep)s][^%(sep)s][^%(sep)s])\Z'),
|
||||
(r'foo\\??', r'(?s:foo\\\\[^%(sep)s][^%(sep)s])\Z'),
|
||||
):
|
||||
regex = regex % {'sep': sep}
|
||||
assert glob_to_re(glob) == regex
|
||||
|
||||
def test_process_template_line(self):
|
||||
# testing all MANIFEST.in template patterns
|
||||
file_list = FileList()
|
||||
mlp = make_local_path
|
||||
|
||||
# simulated file list
|
||||
file_list.allfiles = [
|
||||
'foo.tmp',
|
||||
'ok',
|
||||
'xo',
|
||||
'four.txt',
|
||||
'buildout.cfg',
|
||||
# filelist does not filter out VCS directories,
|
||||
# it's sdist that does
|
||||
mlp('.hg/last-message.txt'),
|
||||
mlp('global/one.txt'),
|
||||
mlp('global/two.txt'),
|
||||
mlp('global/files.x'),
|
||||
mlp('global/here.tmp'),
|
||||
mlp('f/o/f.oo'),
|
||||
mlp('dir/graft-one'),
|
||||
mlp('dir/dir2/graft2'),
|
||||
mlp('dir3/ok'),
|
||||
mlp('dir3/sub/ok.txt'),
|
||||
]
|
||||
|
||||
for line in MANIFEST_IN.split('\n'):
|
||||
if line.strip() == '':
|
||||
continue
|
||||
file_list.process_template_line(line)
|
||||
|
||||
wanted = [
|
||||
'ok',
|
||||
'buildout.cfg',
|
||||
'four.txt',
|
||||
mlp('.hg/last-message.txt'),
|
||||
mlp('global/one.txt'),
|
||||
mlp('global/two.txt'),
|
||||
mlp('f/o/f.oo'),
|
||||
mlp('dir/graft-one'),
|
||||
mlp('dir/dir2/graft2'),
|
||||
]
|
||||
|
||||
assert file_list.files == wanted
|
||||
|
||||
def test_debug_print(self, capsys, monkeypatch):
|
||||
file_list = FileList()
|
||||
file_list.debug_print('xxx')
|
||||
assert capsys.readouterr().out == ''
|
||||
|
||||
monkeypatch.setattr(debug, 'DEBUG', True)
|
||||
file_list.debug_print('xxx')
|
||||
assert capsys.readouterr().out == 'xxx\n'
|
||||
|
||||
def test_set_allfiles(self):
|
||||
file_list = FileList()
|
||||
files = ['a', 'b', 'c']
|
||||
file_list.set_allfiles(files)
|
||||
assert file_list.allfiles == files
|
||||
|
||||
def test_remove_duplicates(self):
|
||||
file_list = FileList()
|
||||
file_list.files = ['a', 'b', 'a', 'g', 'c', 'g']
|
||||
# files must be sorted beforehand (sdist does it)
|
||||
file_list.sort()
|
||||
file_list.remove_duplicates()
|
||||
assert file_list.files == ['a', 'b', 'c', 'g']
|
||||
|
||||
def test_translate_pattern(self):
|
||||
# not regex
|
||||
assert hasattr(translate_pattern('a', anchor=True, is_regex=False), 'search')
|
||||
|
||||
# is a regex
|
||||
regex = re.compile('a')
|
||||
assert translate_pattern(regex, anchor=True, is_regex=True) == regex
|
||||
|
||||
# plain string flagged as regex
|
||||
assert hasattr(translate_pattern('a', anchor=True, is_regex=True), 'search')
|
||||
|
||||
# glob support
|
||||
assert translate_pattern('*.py', anchor=True, is_regex=False).search(
|
||||
'filelist.py'
|
||||
)
|
||||
|
||||
def test_exclude_pattern(self):
|
||||
# return False if no match
|
||||
file_list = FileList()
|
||||
assert not file_list.exclude_pattern('*.py')
|
||||
|
||||
# return True if files match
|
||||
file_list = FileList()
|
||||
file_list.files = ['a.py', 'b.py']
|
||||
assert file_list.exclude_pattern('*.py')
|
||||
|
||||
# test excludes
|
||||
file_list = FileList()
|
||||
file_list.files = ['a.py', 'a.txt']
|
||||
file_list.exclude_pattern('*.py')
|
||||
assert file_list.files == ['a.txt']
|
||||
|
||||
def test_include_pattern(self):
|
||||
# return False if no match
|
||||
file_list = FileList()
|
||||
file_list.set_allfiles([])
|
||||
assert not file_list.include_pattern('*.py')
|
||||
|
||||
# return True if files match
|
||||
file_list = FileList()
|
||||
file_list.set_allfiles(['a.py', 'b.txt'])
|
||||
assert file_list.include_pattern('*.py')
|
||||
|
||||
# test * matches all files
|
||||
file_list = FileList()
|
||||
assert file_list.allfiles is None
|
||||
file_list.set_allfiles(['a.py', 'b.txt'])
|
||||
file_list.include_pattern('*')
|
||||
assert file_list.allfiles == ['a.py', 'b.txt']
|
||||
|
||||
def test_process_template(self, caplog):
|
||||
mlp = make_local_path
|
||||
# invalid lines
|
||||
file_list = FileList()
|
||||
for action in (
|
||||
'include',
|
||||
'exclude',
|
||||
'global-include',
|
||||
'global-exclude',
|
||||
'recursive-include',
|
||||
'recursive-exclude',
|
||||
'graft',
|
||||
'prune',
|
||||
'blarg',
|
||||
):
|
||||
with pytest.raises(DistutilsTemplateError):
|
||||
file_list.process_template_line(action)
|
||||
|
||||
# include
|
||||
file_list = FileList()
|
||||
file_list.set_allfiles(['a.py', 'b.txt', mlp('d/c.py')])
|
||||
|
||||
file_list.process_template_line('include *.py')
|
||||
assert file_list.files == ['a.py']
|
||||
self.assertNoWarnings(caplog)
|
||||
|
||||
file_list.process_template_line('include *.rb')
|
||||
assert file_list.files == ['a.py']
|
||||
self.assertWarnings(caplog)
|
||||
|
||||
# exclude
|
||||
file_list = FileList()
|
||||
file_list.files = ['a.py', 'b.txt', mlp('d/c.py')]
|
||||
|
||||
file_list.process_template_line('exclude *.py')
|
||||
assert file_list.files == ['b.txt', mlp('d/c.py')]
|
||||
self.assertNoWarnings(caplog)
|
||||
|
||||
file_list.process_template_line('exclude *.rb')
|
||||
assert file_list.files == ['b.txt', mlp('d/c.py')]
|
||||
self.assertWarnings(caplog)
|
||||
|
||||
# global-include
|
||||
file_list = FileList()
|
||||
file_list.set_allfiles(['a.py', 'b.txt', mlp('d/c.py')])
|
||||
|
||||
file_list.process_template_line('global-include *.py')
|
||||
assert file_list.files == ['a.py', mlp('d/c.py')]
|
||||
self.assertNoWarnings(caplog)
|
||||
|
||||
file_list.process_template_line('global-include *.rb')
|
||||
assert file_list.files == ['a.py', mlp('d/c.py')]
|
||||
self.assertWarnings(caplog)
|
||||
|
||||
# global-exclude
|
||||
file_list = FileList()
|
||||
file_list.files = ['a.py', 'b.txt', mlp('d/c.py')]
|
||||
|
||||
file_list.process_template_line('global-exclude *.py')
|
||||
assert file_list.files == ['b.txt']
|
||||
self.assertNoWarnings(caplog)
|
||||
|
||||
file_list.process_template_line('global-exclude *.rb')
|
||||
assert file_list.files == ['b.txt']
|
||||
self.assertWarnings(caplog)
|
||||
|
||||
# recursive-include
|
||||
file_list = FileList()
|
||||
file_list.set_allfiles(['a.py', mlp('d/b.py'), mlp('d/c.txt'), mlp('d/d/e.py')])
|
||||
|
||||
file_list.process_template_line('recursive-include d *.py')
|
||||
assert file_list.files == [mlp('d/b.py'), mlp('d/d/e.py')]
|
||||
self.assertNoWarnings(caplog)
|
||||
|
||||
file_list.process_template_line('recursive-include e *.py')
|
||||
assert file_list.files == [mlp('d/b.py'), mlp('d/d/e.py')]
|
||||
self.assertWarnings(caplog)
|
||||
|
||||
# recursive-exclude
|
||||
file_list = FileList()
|
||||
file_list.files = ['a.py', mlp('d/b.py'), mlp('d/c.txt'), mlp('d/d/e.py')]
|
||||
|
||||
file_list.process_template_line('recursive-exclude d *.py')
|
||||
assert file_list.files == ['a.py', mlp('d/c.txt')]
|
||||
self.assertNoWarnings(caplog)
|
||||
|
||||
file_list.process_template_line('recursive-exclude e *.py')
|
||||
assert file_list.files == ['a.py', mlp('d/c.txt')]
|
||||
self.assertWarnings(caplog)
|
||||
|
||||
# graft
|
||||
file_list = FileList()
|
||||
file_list.set_allfiles(['a.py', mlp('d/b.py'), mlp('d/d/e.py'), mlp('f/f.py')])
|
||||
|
||||
file_list.process_template_line('graft d')
|
||||
assert file_list.files == [mlp('d/b.py'), mlp('d/d/e.py')]
|
||||
self.assertNoWarnings(caplog)
|
||||
|
||||
file_list.process_template_line('graft e')
|
||||
assert file_list.files == [mlp('d/b.py'), mlp('d/d/e.py')]
|
||||
self.assertWarnings(caplog)
|
||||
|
||||
# prune
|
||||
file_list = FileList()
|
||||
file_list.files = ['a.py', mlp('d/b.py'), mlp('d/d/e.py'), mlp('f/f.py')]
|
||||
|
||||
file_list.process_template_line('prune d')
|
||||
assert file_list.files == ['a.py', mlp('f/f.py')]
|
||||
self.assertNoWarnings(caplog)
|
||||
|
||||
file_list.process_template_line('prune e')
|
||||
assert file_list.files == ['a.py', mlp('f/f.py')]
|
||||
self.assertWarnings(caplog)
|
||||
|
||||
|
||||
class TestFindAll:
|
||||
@os_helper.skip_unless_symlink
|
||||
def test_missing_symlink(self, temp_cwd):
|
||||
os.symlink('foo', 'bar')
|
||||
assert filelist.findall() == []
|
||||
|
||||
def test_basic_discovery(self, temp_cwd):
|
||||
"""
|
||||
When findall is called with no parameters or with
|
||||
'.' as the parameter, the dot should be omitted from
|
||||
the results.
|
||||
"""
|
||||
jaraco.path.build({'foo': {'file1.txt': ''}, 'bar': {'file2.txt': ''}})
|
||||
file1 = os.path.join('foo', 'file1.txt')
|
||||
file2 = os.path.join('bar', 'file2.txt')
|
||||
expected = [file2, file1]
|
||||
assert sorted(filelist.findall()) == expected
|
||||
|
||||
def test_non_local_discovery(self, tmp_path):
|
||||
"""
|
||||
When findall is called with another path, the full
|
||||
path name should be returned.
|
||||
"""
|
||||
jaraco.path.build({'file1.txt': ''}, tmp_path)
|
||||
expected = [str(tmp_path / 'file1.txt')]
|
||||
assert filelist.findall(tmp_path) == expected
|
||||
|
||||
@os_helper.skip_unless_symlink
|
||||
def test_symlink_loop(self, tmp_path):
|
||||
jaraco.path.build(
|
||||
{
|
||||
'link-to-parent': jaraco.path.Symlink('.'),
|
||||
'somefile': '',
|
||||
},
|
||||
tmp_path,
|
||||
)
|
||||
files = filelist.findall(tmp_path)
|
||||
assert len(files) == 1
|
||||
@@ -0,0 +1,245 @@
|
||||
"""Tests for distutils.command.install."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import site
|
||||
import sys
|
||||
from distutils import sysconfig
|
||||
from distutils.command import install as install_module
|
||||
from distutils.command.build_ext import build_ext
|
||||
from distutils.command.install import INSTALL_SCHEMES, install
|
||||
from distutils.core import Distribution
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.extension import Extension
|
||||
from distutils.tests import missing_compiler_executable, support
|
||||
from distutils.util import is_mingw
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def _make_ext_name(modname):
|
||||
return modname + sysconfig.get_config_var('EXT_SUFFIX')
|
||||
|
||||
|
||||
@support.combine_markers
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
class TestInstall(
|
||||
support.TempdirManager,
|
||||
):
|
||||
@pytest.mark.xfail(
|
||||
'platform.system() == "Windows" and sys.version_info > (3, 11)',
|
||||
reason="pypa/distutils#148",
|
||||
)
|
||||
def test_home_installation_scheme(self):
|
||||
# This ensure two things:
|
||||
# - that --home generates the desired set of directory names
|
||||
# - test --home is supported on all platforms
|
||||
builddir = self.mkdtemp()
|
||||
destination = os.path.join(builddir, "installation")
|
||||
|
||||
dist = Distribution({"name": "foopkg"})
|
||||
# script_name need not exist, it just need to be initialized
|
||||
dist.script_name = os.path.join(builddir, "setup.py")
|
||||
dist.command_obj["build"] = support.DummyCommand(
|
||||
build_base=builddir,
|
||||
build_lib=os.path.join(builddir, "lib"),
|
||||
)
|
||||
|
||||
cmd = install(dist)
|
||||
cmd.home = destination
|
||||
cmd.ensure_finalized()
|
||||
|
||||
assert cmd.install_base == destination
|
||||
assert cmd.install_platbase == destination
|
||||
|
||||
def check_path(got, expected):
|
||||
got = os.path.normpath(got)
|
||||
expected = os.path.normpath(expected)
|
||||
assert got == expected
|
||||
|
||||
impl_name = sys.implementation.name.replace("cpython", "python")
|
||||
libdir = os.path.join(destination, "lib", impl_name)
|
||||
check_path(cmd.install_lib, libdir)
|
||||
_platlibdir = getattr(sys, "platlibdir", "lib")
|
||||
platlibdir = os.path.join(destination, _platlibdir, impl_name)
|
||||
check_path(cmd.install_platlib, platlibdir)
|
||||
check_path(cmd.install_purelib, libdir)
|
||||
check_path(
|
||||
cmd.install_headers,
|
||||
os.path.join(destination, "include", impl_name, "foopkg"),
|
||||
)
|
||||
check_path(cmd.install_scripts, os.path.join(destination, "bin"))
|
||||
check_path(cmd.install_data, destination)
|
||||
|
||||
def test_user_site(self, monkeypatch):
|
||||
# test install with --user
|
||||
# preparing the environment for the test
|
||||
self.tmpdir = self.mkdtemp()
|
||||
orig_site = site.USER_SITE
|
||||
orig_base = site.USER_BASE
|
||||
monkeypatch.setattr(site, 'USER_BASE', os.path.join(self.tmpdir, 'B'))
|
||||
monkeypatch.setattr(site, 'USER_SITE', os.path.join(self.tmpdir, 'S'))
|
||||
monkeypatch.setattr(install_module, 'USER_BASE', site.USER_BASE)
|
||||
monkeypatch.setattr(install_module, 'USER_SITE', site.USER_SITE)
|
||||
|
||||
def _expanduser(path):
|
||||
if path.startswith('~'):
|
||||
return os.path.normpath(self.tmpdir + path[1:])
|
||||
return path
|
||||
|
||||
monkeypatch.setattr(os.path, 'expanduser', _expanduser)
|
||||
|
||||
for key in ('nt_user', 'posix_user'):
|
||||
assert key in INSTALL_SCHEMES
|
||||
|
||||
dist = Distribution({'name': 'xx'})
|
||||
cmd = install(dist)
|
||||
|
||||
# making sure the user option is there
|
||||
options = [name for name, short, label in cmd.user_options]
|
||||
assert 'user' in options
|
||||
|
||||
# setting a value
|
||||
cmd.user = True
|
||||
|
||||
# user base and site shouldn't be created yet
|
||||
assert not os.path.exists(site.USER_BASE)
|
||||
assert not os.path.exists(site.USER_SITE)
|
||||
|
||||
# let's run finalize
|
||||
cmd.ensure_finalized()
|
||||
|
||||
# now they should
|
||||
assert os.path.exists(site.USER_BASE)
|
||||
assert os.path.exists(site.USER_SITE)
|
||||
|
||||
assert 'userbase' in cmd.config_vars
|
||||
assert 'usersite' in cmd.config_vars
|
||||
|
||||
actual_headers = os.path.relpath(cmd.install_headers, site.USER_BASE)
|
||||
if os.name == 'nt' and not is_mingw():
|
||||
site_path = os.path.relpath(os.path.dirname(orig_site), orig_base)
|
||||
include = os.path.join(site_path, 'Include')
|
||||
else:
|
||||
include = sysconfig.get_python_inc(0, '')
|
||||
expect_headers = os.path.join(include, 'xx')
|
||||
|
||||
assert os.path.normcase(actual_headers) == os.path.normcase(expect_headers)
|
||||
|
||||
def test_handle_extra_path(self):
|
||||
dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'})
|
||||
cmd = install(dist)
|
||||
|
||||
# two elements
|
||||
cmd.handle_extra_path()
|
||||
assert cmd.extra_path == ['path', 'dirs']
|
||||
assert cmd.extra_dirs == 'dirs'
|
||||
assert cmd.path_file == 'path'
|
||||
|
||||
# one element
|
||||
cmd.extra_path = ['path']
|
||||
cmd.handle_extra_path()
|
||||
assert cmd.extra_path == ['path']
|
||||
assert cmd.extra_dirs == 'path'
|
||||
assert cmd.path_file == 'path'
|
||||
|
||||
# none
|
||||
dist.extra_path = cmd.extra_path = None
|
||||
cmd.handle_extra_path()
|
||||
assert cmd.extra_path is None
|
||||
assert cmd.extra_dirs == ''
|
||||
assert cmd.path_file is None
|
||||
|
||||
# three elements (no way !)
|
||||
cmd.extra_path = 'path,dirs,again'
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.handle_extra_path()
|
||||
|
||||
def test_finalize_options(self):
|
||||
dist = Distribution({'name': 'xx'})
|
||||
cmd = install(dist)
|
||||
|
||||
# must supply either prefix/exec-prefix/home or
|
||||
# install-base/install-platbase -- not both
|
||||
cmd.prefix = 'prefix'
|
||||
cmd.install_base = 'base'
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.finalize_options()
|
||||
|
||||
# must supply either home or prefix/exec-prefix -- not both
|
||||
cmd.install_base = None
|
||||
cmd.home = 'home'
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.finalize_options()
|
||||
|
||||
# can't combine user with prefix/exec_prefix/home or
|
||||
# install_(plat)base
|
||||
cmd.prefix = None
|
||||
cmd.user = 'user'
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.finalize_options()
|
||||
|
||||
def test_record(self):
|
||||
install_dir = self.mkdtemp()
|
||||
project_dir, dist = self.create_dist(py_modules=['hello'], scripts=['sayhi'])
|
||||
os.chdir(project_dir)
|
||||
self.write_file('hello.py', "def main(): print('o hai')")
|
||||
self.write_file('sayhi', 'from hello import main; main()')
|
||||
|
||||
cmd = install(dist)
|
||||
dist.command_obj['install'] = cmd
|
||||
cmd.root = install_dir
|
||||
cmd.record = os.path.join(project_dir, 'filelist')
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
content = pathlib.Path(cmd.record).read_text(encoding='utf-8')
|
||||
|
||||
found = [pathlib.Path(line).name for line in content.splitlines()]
|
||||
expected = [
|
||||
'hello.py',
|
||||
f'hello.{sys.implementation.cache_tag}.pyc',
|
||||
'sayhi',
|
||||
'UNKNOWN-0.0.0-py{}.{}.egg-info'.format(*sys.version_info[:2]),
|
||||
]
|
||||
assert found == expected
|
||||
|
||||
def test_record_extensions(self):
|
||||
cmd = missing_compiler_executable()
|
||||
if cmd is not None:
|
||||
pytest.skip(f'The {cmd!r} command is not found')
|
||||
install_dir = self.mkdtemp()
|
||||
project_dir, dist = self.create_dist(
|
||||
ext_modules=[Extension('xx', ['xxmodule.c'])]
|
||||
)
|
||||
os.chdir(project_dir)
|
||||
support.copy_xxmodule_c(project_dir)
|
||||
|
||||
buildextcmd = build_ext(dist)
|
||||
support.fixup_build_ext(buildextcmd)
|
||||
buildextcmd.ensure_finalized()
|
||||
|
||||
cmd = install(dist)
|
||||
dist.command_obj['install'] = cmd
|
||||
dist.command_obj['build_ext'] = buildextcmd
|
||||
cmd.root = install_dir
|
||||
cmd.record = os.path.join(project_dir, 'filelist')
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
content = pathlib.Path(cmd.record).read_text(encoding='utf-8')
|
||||
|
||||
found = [pathlib.Path(line).name for line in content.splitlines()]
|
||||
expected = [
|
||||
_make_ext_name('xx'),
|
||||
'UNKNOWN-0.0.0-py{}.{}.egg-info'.format(*sys.version_info[:2]),
|
||||
]
|
||||
assert found == expected
|
||||
|
||||
def test_debug_mode(self, caplog, monkeypatch):
|
||||
# this covers the code called when DEBUG is set
|
||||
monkeypatch.setattr(install_module, 'DEBUG', True)
|
||||
caplog.set_level(logging.DEBUG)
|
||||
self.test_record()
|
||||
assert any(rec for rec in caplog.records if rec.levelno == logging.DEBUG)
|
||||
@@ -0,0 +1,74 @@
|
||||
"""Tests for distutils.command.install_data."""
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
from distutils.command.install_data import install_data
|
||||
from distutils.tests import support
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
class TestInstallData(
|
||||
support.TempdirManager,
|
||||
):
|
||||
def test_simple_run(self):
|
||||
pkg_dir, dist = self.create_dist()
|
||||
cmd = install_data(dist)
|
||||
cmd.install_dir = inst = os.path.join(pkg_dir, 'inst')
|
||||
|
||||
# data_files can contain
|
||||
# - simple files
|
||||
# - a Path object
|
||||
# - a tuple with a path, and a list of file
|
||||
one = os.path.join(pkg_dir, 'one')
|
||||
self.write_file(one, 'xxx')
|
||||
inst2 = os.path.join(pkg_dir, 'inst2')
|
||||
two = os.path.join(pkg_dir, 'two')
|
||||
self.write_file(two, 'xxx')
|
||||
three = pathlib.Path(pkg_dir) / 'three'
|
||||
self.write_file(three, 'xxx')
|
||||
|
||||
cmd.data_files = [one, (inst2, [two]), three]
|
||||
assert cmd.get_inputs() == [one, (inst2, [two]), three]
|
||||
|
||||
# let's run the command
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
# let's check the result
|
||||
assert len(cmd.get_outputs()) == 3
|
||||
rthree = os.path.split(one)[-1]
|
||||
assert os.path.exists(os.path.join(inst, rthree))
|
||||
rtwo = os.path.split(two)[-1]
|
||||
assert os.path.exists(os.path.join(inst2, rtwo))
|
||||
rone = os.path.split(one)[-1]
|
||||
assert os.path.exists(os.path.join(inst, rone))
|
||||
cmd.outfiles = []
|
||||
|
||||
# let's try with warn_dir one
|
||||
cmd.warn_dir = True
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
# let's check the result
|
||||
assert len(cmd.get_outputs()) == 3
|
||||
assert os.path.exists(os.path.join(inst, rthree))
|
||||
assert os.path.exists(os.path.join(inst2, rtwo))
|
||||
assert os.path.exists(os.path.join(inst, rone))
|
||||
cmd.outfiles = []
|
||||
|
||||
# now using root and empty dir
|
||||
cmd.root = os.path.join(pkg_dir, 'root')
|
||||
inst5 = os.path.join(pkg_dir, 'inst5')
|
||||
four = os.path.join(cmd.install_dir, 'four')
|
||||
self.write_file(four, 'xx')
|
||||
cmd.data_files = [one, (inst2, [two]), three, ('inst5', [four]), (inst5, [])]
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
# let's check the result
|
||||
assert len(cmd.get_outputs()) == 5
|
||||
assert os.path.exists(os.path.join(inst, rthree))
|
||||
assert os.path.exists(os.path.join(inst2, rtwo))
|
||||
assert os.path.exists(os.path.join(inst, rone))
|
||||
@@ -0,0 +1,33 @@
|
||||
"""Tests for distutils.command.install_headers."""
|
||||
|
||||
import os
|
||||
from distutils.command.install_headers import install_headers
|
||||
from distutils.tests import support
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
class TestInstallHeaders(
|
||||
support.TempdirManager,
|
||||
):
|
||||
def test_simple_run(self):
|
||||
# we have two headers
|
||||
header_list = self.mkdtemp()
|
||||
header1 = os.path.join(header_list, 'header1')
|
||||
header2 = os.path.join(header_list, 'header2')
|
||||
self.write_file(header1)
|
||||
self.write_file(header2)
|
||||
headers = [header1, header2]
|
||||
|
||||
pkg_dir, dist = self.create_dist(headers=headers)
|
||||
cmd = install_headers(dist)
|
||||
assert cmd.get_inputs() == headers
|
||||
|
||||
# let's run the command
|
||||
cmd.install_dir = os.path.join(pkg_dir, 'inst')
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
# let's check the results
|
||||
assert len(cmd.get_outputs()) == 2
|
||||
@@ -0,0 +1,110 @@
|
||||
"""Tests for distutils.command.install_data."""
|
||||
|
||||
import importlib.util
|
||||
import os
|
||||
import sys
|
||||
from distutils.command.install_lib import install_lib
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.extension import Extension
|
||||
from distutils.tests import support
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@support.combine_markers
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
class TestInstallLib(
|
||||
support.TempdirManager,
|
||||
):
|
||||
def test_finalize_options(self):
|
||||
dist = self.create_dist()[1]
|
||||
cmd = install_lib(dist)
|
||||
|
||||
cmd.finalize_options()
|
||||
assert cmd.compile == 1
|
||||
assert cmd.optimize == 0
|
||||
|
||||
# optimize must be 0, 1, or 2
|
||||
cmd.optimize = 'foo'
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.finalize_options()
|
||||
cmd.optimize = '4'
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.finalize_options()
|
||||
|
||||
cmd.optimize = '2'
|
||||
cmd.finalize_options()
|
||||
assert cmd.optimize == 2
|
||||
|
||||
@pytest.mark.skipif('sys.dont_write_bytecode')
|
||||
def test_byte_compile(self):
|
||||
project_dir, dist = self.create_dist()
|
||||
os.chdir(project_dir)
|
||||
cmd = install_lib(dist)
|
||||
cmd.compile = cmd.optimize = 1
|
||||
|
||||
f = os.path.join(project_dir, 'foo.py')
|
||||
self.write_file(f, '# python file')
|
||||
cmd.byte_compile([f])
|
||||
pyc_file = importlib.util.cache_from_source('foo.py', optimization='')
|
||||
pyc_opt_file = importlib.util.cache_from_source(
|
||||
'foo.py', optimization=cmd.optimize
|
||||
)
|
||||
assert os.path.exists(pyc_file)
|
||||
assert os.path.exists(pyc_opt_file)
|
||||
|
||||
def test_get_outputs(self):
|
||||
project_dir, dist = self.create_dist()
|
||||
os.chdir(project_dir)
|
||||
os.mkdir('spam')
|
||||
cmd = install_lib(dist)
|
||||
|
||||
# setting up a dist environment
|
||||
cmd.compile = cmd.optimize = 1
|
||||
cmd.install_dir = self.mkdtemp()
|
||||
f = os.path.join(project_dir, 'spam', '__init__.py')
|
||||
self.write_file(f, '# python package')
|
||||
cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
|
||||
cmd.distribution.packages = ['spam']
|
||||
cmd.distribution.script_name = 'setup.py'
|
||||
|
||||
# get_outputs should return 4 elements: spam/__init__.py and .pyc,
|
||||
# foo.import-tag-abiflags.so / foo.pyd
|
||||
outputs = cmd.get_outputs()
|
||||
assert len(outputs) == 4, outputs
|
||||
|
||||
def test_get_inputs(self):
|
||||
project_dir, dist = self.create_dist()
|
||||
os.chdir(project_dir)
|
||||
os.mkdir('spam')
|
||||
cmd = install_lib(dist)
|
||||
|
||||
# setting up a dist environment
|
||||
cmd.compile = cmd.optimize = 1
|
||||
cmd.install_dir = self.mkdtemp()
|
||||
f = os.path.join(project_dir, 'spam', '__init__.py')
|
||||
self.write_file(f, '# python package')
|
||||
cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
|
||||
cmd.distribution.packages = ['spam']
|
||||
cmd.distribution.script_name = 'setup.py'
|
||||
|
||||
# get_inputs should return 2 elements: spam/__init__.py and
|
||||
# foo.import-tag-abiflags.so / foo.pyd
|
||||
inputs = cmd.get_inputs()
|
||||
assert len(inputs) == 2, inputs
|
||||
|
||||
def test_dont_write_bytecode(self, caplog):
|
||||
# makes sure byte_compile is not used
|
||||
dist = self.create_dist()[1]
|
||||
cmd = install_lib(dist)
|
||||
cmd.compile = True
|
||||
cmd.optimize = 1
|
||||
|
||||
old_dont_write_bytecode = sys.dont_write_bytecode
|
||||
sys.dont_write_bytecode = True
|
||||
try:
|
||||
cmd.byte_compile([])
|
||||
finally:
|
||||
sys.dont_write_bytecode = old_dont_write_bytecode
|
||||
|
||||
assert 'byte-compiling is disabled' in caplog.messages[0]
|
||||
@@ -0,0 +1,52 @@
|
||||
"""Tests for distutils.command.install_scripts."""
|
||||
|
||||
import os
|
||||
from distutils.command.install_scripts import install_scripts
|
||||
from distutils.core import Distribution
|
||||
from distutils.tests import support
|
||||
|
||||
from . import test_build_scripts
|
||||
|
||||
|
||||
class TestInstallScripts(support.TempdirManager):
|
||||
def test_default_settings(self):
|
||||
dist = Distribution()
|
||||
dist.command_obj["build"] = support.DummyCommand(build_scripts="/foo/bar")
|
||||
dist.command_obj["install"] = support.DummyCommand(
|
||||
install_scripts="/splat/funk",
|
||||
force=True,
|
||||
skip_build=True,
|
||||
)
|
||||
cmd = install_scripts(dist)
|
||||
assert not cmd.force
|
||||
assert not cmd.skip_build
|
||||
assert cmd.build_dir is None
|
||||
assert cmd.install_dir is None
|
||||
|
||||
cmd.finalize_options()
|
||||
|
||||
assert cmd.force
|
||||
assert cmd.skip_build
|
||||
assert cmd.build_dir == "/foo/bar"
|
||||
assert cmd.install_dir == "/splat/funk"
|
||||
|
||||
def test_installation(self):
|
||||
source = self.mkdtemp()
|
||||
|
||||
expected = test_build_scripts.TestBuildScripts.write_sample_scripts(source)
|
||||
|
||||
target = self.mkdtemp()
|
||||
dist = Distribution()
|
||||
dist.command_obj["build"] = support.DummyCommand(build_scripts=source)
|
||||
dist.command_obj["install"] = support.DummyCommand(
|
||||
install_scripts=target,
|
||||
force=True,
|
||||
skip_build=True,
|
||||
)
|
||||
cmd = install_scripts(dist)
|
||||
cmd.finalize_options()
|
||||
cmd.run()
|
||||
|
||||
installed = os.listdir(target)
|
||||
for name in expected:
|
||||
assert name in installed
|
||||
@@ -0,0 +1,12 @@
|
||||
"""Tests for distutils.log"""
|
||||
|
||||
import logging
|
||||
from distutils._log import log
|
||||
|
||||
|
||||
class TestLog:
|
||||
def test_non_ascii(self, caplog):
|
||||
caplog.set_level(logging.DEBUG)
|
||||
log.debug('Dεbug\tMėssãge')
|
||||
log.fatal('Fαtal\tÈrrōr')
|
||||
assert caplog.messages == ['Dεbug\tMėssãge', 'Fαtal\tÈrrōr']
|
||||
@@ -0,0 +1,56 @@
|
||||
from distutils import sysconfig
|
||||
from distutils.errors import CCompilerError, DistutilsPlatformError
|
||||
from distutils.util import is_mingw, split_quoted
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class TestMingw32CCompiler:
|
||||
@pytest.mark.skipif(not is_mingw(), reason='not on mingw')
|
||||
def test_compiler_type(self):
|
||||
from distutils.cygwinccompiler import Mingw32CCompiler
|
||||
|
||||
compiler = Mingw32CCompiler()
|
||||
assert compiler.compiler_type == 'mingw32'
|
||||
|
||||
@pytest.mark.skipif(not is_mingw(), reason='not on mingw')
|
||||
def test_set_executables(self, monkeypatch):
|
||||
from distutils.cygwinccompiler import Mingw32CCompiler
|
||||
|
||||
monkeypatch.setenv('CC', 'cc')
|
||||
monkeypatch.setenv('CXX', 'c++')
|
||||
|
||||
compiler = Mingw32CCompiler()
|
||||
|
||||
assert compiler.compiler == split_quoted('cc -O -Wall')
|
||||
assert compiler.compiler_so == split_quoted('cc -shared -O -Wall')
|
||||
assert compiler.compiler_cxx == split_quoted('c++ -O -Wall')
|
||||
assert compiler.linker_exe == split_quoted('cc')
|
||||
assert compiler.linker_so == split_quoted('cc -shared')
|
||||
|
||||
@pytest.mark.skipif(not is_mingw(), reason='not on mingw')
|
||||
def test_runtime_library_dir_option(self):
|
||||
from distutils.cygwinccompiler import Mingw32CCompiler
|
||||
|
||||
compiler = Mingw32CCompiler()
|
||||
with pytest.raises(DistutilsPlatformError):
|
||||
compiler.runtime_library_dir_option('/usr/lib')
|
||||
|
||||
@pytest.mark.skipif(not is_mingw(), reason='not on mingw')
|
||||
def test_cygwincc_error(self, monkeypatch):
|
||||
import distutils.cygwinccompiler
|
||||
|
||||
monkeypatch.setattr(distutils.cygwinccompiler, 'is_cygwincc', lambda _: True)
|
||||
|
||||
with pytest.raises(CCompilerError):
|
||||
distutils.cygwinccompiler.Mingw32CCompiler()
|
||||
|
||||
@pytest.mark.skipif('sys.platform == "cygwin"')
|
||||
def test_customize_compiler_with_msvc_python(self):
|
||||
from distutils.cygwinccompiler import Mingw32CCompiler
|
||||
|
||||
# In case we have an MSVC Python build, but still want to use
|
||||
# Mingw32CCompiler, then customize_compiler() shouldn't fail at least.
|
||||
# https://github.com/pypa/setuptools/issues/4456
|
||||
compiler = Mingw32CCompiler()
|
||||
sysconfig.customize_compiler(compiler)
|
||||
@@ -0,0 +1,126 @@
|
||||
"""Tests for distutils._modified."""
|
||||
|
||||
import os
|
||||
import types
|
||||
from distutils._modified import newer, newer_group, newer_pairwise, newer_pairwise_group
|
||||
from distutils.errors import DistutilsFileError
|
||||
from distutils.tests import support
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class TestDepUtil(support.TempdirManager):
|
||||
def test_newer(self):
|
||||
tmpdir = self.mkdtemp()
|
||||
new_file = os.path.join(tmpdir, 'new')
|
||||
old_file = os.path.abspath(__file__)
|
||||
|
||||
# Raise DistutilsFileError if 'new_file' does not exist.
|
||||
with pytest.raises(DistutilsFileError):
|
||||
newer(new_file, old_file)
|
||||
|
||||
# Return true if 'new_file' exists and is more recently modified than
|
||||
# 'old_file', or if 'new_file' exists and 'old_file' doesn't.
|
||||
self.write_file(new_file)
|
||||
assert newer(new_file, 'I_dont_exist')
|
||||
assert newer(new_file, old_file)
|
||||
|
||||
# Return false if both exist and 'old_file' is the same age or younger
|
||||
# than 'new_file'.
|
||||
assert not newer(old_file, new_file)
|
||||
|
||||
def _setup_1234(self):
|
||||
tmpdir = self.mkdtemp()
|
||||
sources = os.path.join(tmpdir, 'sources')
|
||||
targets = os.path.join(tmpdir, 'targets')
|
||||
os.mkdir(sources)
|
||||
os.mkdir(targets)
|
||||
one = os.path.join(sources, 'one')
|
||||
two = os.path.join(sources, 'two')
|
||||
three = os.path.abspath(__file__) # I am the old file
|
||||
four = os.path.join(targets, 'four')
|
||||
self.write_file(one)
|
||||
self.write_file(two)
|
||||
self.write_file(four)
|
||||
return one, two, three, four
|
||||
|
||||
def test_newer_pairwise(self):
|
||||
one, two, three, four = self._setup_1234()
|
||||
|
||||
assert newer_pairwise([one, two], [three, four]) == ([one], [three])
|
||||
|
||||
def test_newer_pairwise_mismatch(self):
|
||||
one, two, three, four = self._setup_1234()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
newer_pairwise([one], [three, four])
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
newer_pairwise([one, two], [three])
|
||||
|
||||
def test_newer_pairwise_empty(self):
|
||||
assert newer_pairwise([], []) == ([], [])
|
||||
|
||||
def test_newer_pairwise_fresh(self):
|
||||
one, two, three, four = self._setup_1234()
|
||||
|
||||
assert newer_pairwise([one, three], [two, four]) == ([], [])
|
||||
|
||||
def test_newer_group(self):
|
||||
tmpdir = self.mkdtemp()
|
||||
sources = os.path.join(tmpdir, 'sources')
|
||||
os.mkdir(sources)
|
||||
one = os.path.join(sources, 'one')
|
||||
two = os.path.join(sources, 'two')
|
||||
three = os.path.join(sources, 'three')
|
||||
old_file = os.path.abspath(__file__)
|
||||
|
||||
# return true if 'old_file' is out-of-date with respect to any file
|
||||
# listed in 'sources'.
|
||||
self.write_file(one)
|
||||
self.write_file(two)
|
||||
self.write_file(three)
|
||||
assert newer_group([one, two, three], old_file)
|
||||
assert not newer_group([one, two, old_file], three)
|
||||
|
||||
# missing handling
|
||||
os.remove(one)
|
||||
with pytest.raises(OSError):
|
||||
newer_group([one, two, old_file], three)
|
||||
|
||||
assert not newer_group([one, two, old_file], three, missing='ignore')
|
||||
|
||||
assert newer_group([one, two, old_file], three, missing='newer')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def groups_target(tmp_path):
|
||||
"""
|
||||
Set up some older sources, a target, and newer sources.
|
||||
|
||||
Returns a simple namespace with these values.
|
||||
"""
|
||||
filenames = ['older.c', 'older.h', 'target.o', 'newer.c', 'newer.h']
|
||||
paths = [tmp_path / name for name in filenames]
|
||||
|
||||
for mtime, path in enumerate(paths):
|
||||
path.write_text('', encoding='utf-8')
|
||||
|
||||
# make sure modification times are sequential
|
||||
os.utime(path, (mtime, mtime))
|
||||
|
||||
return types.SimpleNamespace(older=paths[:2], target=paths[2], newer=paths[3:])
|
||||
|
||||
|
||||
def test_newer_pairwise_group(groups_target):
|
||||
older = newer_pairwise_group([groups_target.older], [groups_target.target])
|
||||
newer = newer_pairwise_group([groups_target.newer], [groups_target.target])
|
||||
assert older == ([], [])
|
||||
assert newer == ([groups_target.newer], [groups_target.target])
|
||||
|
||||
|
||||
def test_newer_group_no_sources_no_target(tmp_path):
|
||||
"""
|
||||
Consider no sources and no target "newer".
|
||||
"""
|
||||
assert newer_group([], str(tmp_path / 'does-not-exist'))
|
||||
@@ -0,0 +1,137 @@
|
||||
"""Tests for distutils._msvccompiler."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sysconfig
|
||||
import threading
|
||||
import unittest.mock as mock
|
||||
from distutils import _msvccompiler
|
||||
from distutils.errors import DistutilsPlatformError
|
||||
from distutils.tests import support
|
||||
from distutils.util import get_platform
|
||||
|
||||
import pytest
|
||||
|
||||
needs_winreg = pytest.mark.skipif('not hasattr(_msvccompiler, "winreg")')
|
||||
|
||||
|
||||
class Testmsvccompiler(support.TempdirManager):
|
||||
def test_no_compiler(self, monkeypatch):
|
||||
# makes sure query_vcvarsall raises
|
||||
# a DistutilsPlatformError if the compiler
|
||||
# is not found
|
||||
def _find_vcvarsall(plat_spec):
|
||||
return None, None
|
||||
|
||||
monkeypatch.setattr(_msvccompiler, '_find_vcvarsall', _find_vcvarsall)
|
||||
|
||||
with pytest.raises(DistutilsPlatformError):
|
||||
_msvccompiler._get_vc_env(
|
||||
'wont find this version',
|
||||
)
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not sysconfig.get_platform().startswith("win"),
|
||||
reason="Only run test for non-mingw Windows platforms",
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"plat_name, expected",
|
||||
[
|
||||
("win-arm64", "win-arm64"),
|
||||
("win-amd64", "win-amd64"),
|
||||
(None, get_platform()),
|
||||
],
|
||||
)
|
||||
def test_cross_platform_compilation_paths(self, monkeypatch, plat_name, expected):
|
||||
"""
|
||||
Ensure a specified target platform is passed to _get_vcvars_spec.
|
||||
"""
|
||||
compiler = _msvccompiler.MSVCCompiler()
|
||||
|
||||
def _get_vcvars_spec(host_platform, platform):
|
||||
assert platform == expected
|
||||
|
||||
monkeypatch.setattr(_msvccompiler, '_get_vcvars_spec', _get_vcvars_spec)
|
||||
compiler.initialize(plat_name)
|
||||
|
||||
@needs_winreg
|
||||
def test_get_vc_env_unicode(self):
|
||||
test_var = 'ṰḖṤṪ┅ṼẨṜ'
|
||||
test_value = '₃⁴₅'
|
||||
|
||||
# Ensure we don't early exit from _get_vc_env
|
||||
old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None)
|
||||
os.environ[test_var] = test_value
|
||||
try:
|
||||
env = _msvccompiler._get_vc_env('x86')
|
||||
assert test_var.lower() in env
|
||||
assert test_value == env[test_var.lower()]
|
||||
finally:
|
||||
os.environ.pop(test_var)
|
||||
if old_distutils_use_sdk:
|
||||
os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk
|
||||
|
||||
@needs_winreg
|
||||
@pytest.mark.parametrize('ver', (2015, 2017))
|
||||
def test_get_vc(self, ver):
|
||||
# This function cannot be mocked, so pass if VC is found
|
||||
# and skip otherwise.
|
||||
lookup = getattr(_msvccompiler, f'_find_vc{ver}')
|
||||
expected_version = {2015: 14, 2017: 15}[ver]
|
||||
version, path = lookup()
|
||||
if not version:
|
||||
pytest.skip(f"VS {ver} is not installed")
|
||||
assert version >= expected_version
|
||||
assert os.path.isdir(path)
|
||||
|
||||
|
||||
class CheckThread(threading.Thread):
|
||||
exc_info = None
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
super().run()
|
||||
except Exception:
|
||||
self.exc_info = sys.exc_info()
|
||||
|
||||
def __bool__(self):
|
||||
return not self.exc_info
|
||||
|
||||
|
||||
class TestSpawn:
|
||||
def test_concurrent_safe(self):
|
||||
"""
|
||||
Concurrent calls to spawn should have consistent results.
|
||||
"""
|
||||
compiler = _msvccompiler.MSVCCompiler()
|
||||
compiler._paths = "expected"
|
||||
inner_cmd = 'import os; assert os.environ["PATH"] == "expected"'
|
||||
command = [sys.executable, '-c', inner_cmd]
|
||||
|
||||
threads = [
|
||||
CheckThread(target=compiler.spawn, args=[command]) for n in range(100)
|
||||
]
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
assert all(threads)
|
||||
|
||||
def test_concurrent_safe_fallback(self):
|
||||
"""
|
||||
If CCompiler.spawn has been monkey-patched without support
|
||||
for an env, it should still execute.
|
||||
"""
|
||||
from distutils import ccompiler
|
||||
|
||||
compiler = _msvccompiler.MSVCCompiler()
|
||||
compiler._paths = "expected"
|
||||
|
||||
def CCompiler_spawn(self, cmd):
|
||||
"A spawn without an env argument."
|
||||
assert os.environ["PATH"] == "expected"
|
||||
|
||||
with mock.patch.object(ccompiler.CCompiler, 'spawn', CCompiler_spawn):
|
||||
compiler.spawn(["n/a"])
|
||||
|
||||
assert os.environ.get("PATH") != "expected"
|
||||
@@ -0,0 +1,470 @@
|
||||
"""Tests for distutils.command.sdist."""
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import shutil # noqa: F401
|
||||
import tarfile
|
||||
import zipfile
|
||||
from distutils.archive_util import ARCHIVE_FORMATS
|
||||
from distutils.command.sdist import sdist, show_formats
|
||||
from distutils.core import Distribution
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.filelist import FileList
|
||||
from os.path import join
|
||||
from textwrap import dedent
|
||||
|
||||
import jaraco.path
|
||||
import path
|
||||
import pytest
|
||||
from more_itertools import ilen
|
||||
|
||||
from . import support
|
||||
from .unix_compat import grp, pwd, require_uid_0, require_unix_id
|
||||
|
||||
SETUP_PY = """
|
||||
from distutils.core import setup
|
||||
import somecode
|
||||
|
||||
setup(name='fake')
|
||||
"""
|
||||
|
||||
MANIFEST = """\
|
||||
# file GENERATED by distutils, do NOT edit
|
||||
README
|
||||
buildout.cfg
|
||||
inroot.txt
|
||||
setup.py
|
||||
data%(sep)sdata.dt
|
||||
scripts%(sep)sscript.py
|
||||
some%(sep)sfile.txt
|
||||
some%(sep)sother_file.txt
|
||||
somecode%(sep)s__init__.py
|
||||
somecode%(sep)sdoc.dat
|
||||
somecode%(sep)sdoc.txt
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def project_dir(request, distutils_managed_tempdir):
|
||||
self = request.instance
|
||||
self.tmp_dir = self.mkdtemp()
|
||||
jaraco.path.build(
|
||||
{
|
||||
'somecode': {
|
||||
'__init__.py': '#',
|
||||
},
|
||||
'README': 'xxx',
|
||||
'setup.py': SETUP_PY,
|
||||
},
|
||||
self.tmp_dir,
|
||||
)
|
||||
with path.Path(self.tmp_dir):
|
||||
yield
|
||||
|
||||
|
||||
def clean_lines(filepath):
|
||||
with pathlib.Path(filepath).open(encoding='utf-8') as f:
|
||||
yield from filter(None, map(str.strip, f))
|
||||
|
||||
|
||||
class TestSDist(support.TempdirManager):
|
||||
def get_cmd(self, metadata=None):
|
||||
"""Returns a cmd"""
|
||||
if metadata is None:
|
||||
metadata = {
|
||||
'name': 'ns.fake--pkg',
|
||||
'version': '1.0',
|
||||
'url': 'xxx',
|
||||
'author': 'xxx',
|
||||
'author_email': 'xxx',
|
||||
}
|
||||
dist = Distribution(metadata)
|
||||
dist.script_name = 'setup.py'
|
||||
dist.packages = ['somecode']
|
||||
dist.include_package_data = True
|
||||
cmd = sdist(dist)
|
||||
cmd.dist_dir = 'dist'
|
||||
return dist, cmd
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
def test_prune_file_list(self):
|
||||
# this test creates a project with some VCS dirs and an NFS rename
|
||||
# file, then launches sdist to check they get pruned on all systems
|
||||
|
||||
# creating VCS directories with some files in them
|
||||
os.mkdir(join(self.tmp_dir, 'somecode', '.svn'))
|
||||
self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx')
|
||||
|
||||
os.mkdir(join(self.tmp_dir, 'somecode', '.hg'))
|
||||
self.write_file((self.tmp_dir, 'somecode', '.hg', 'ok'), 'xxx')
|
||||
|
||||
os.mkdir(join(self.tmp_dir, 'somecode', '.git'))
|
||||
self.write_file((self.tmp_dir, 'somecode', '.git', 'ok'), 'xxx')
|
||||
|
||||
self.write_file((self.tmp_dir, 'somecode', '.nfs0001'), 'xxx')
|
||||
|
||||
# now building a sdist
|
||||
dist, cmd = self.get_cmd()
|
||||
|
||||
# zip is available universally
|
||||
# (tar might not be installed under win32)
|
||||
cmd.formats = ['zip']
|
||||
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
# now let's check what we have
|
||||
dist_folder = join(self.tmp_dir, 'dist')
|
||||
files = os.listdir(dist_folder)
|
||||
assert files == ['ns_fake_pkg-1.0.zip']
|
||||
|
||||
zip_file = zipfile.ZipFile(join(dist_folder, 'ns_fake_pkg-1.0.zip'))
|
||||
try:
|
||||
content = zip_file.namelist()
|
||||
finally:
|
||||
zip_file.close()
|
||||
|
||||
# making sure everything has been pruned correctly
|
||||
expected = [
|
||||
'',
|
||||
'PKG-INFO',
|
||||
'README',
|
||||
'setup.py',
|
||||
'somecode/',
|
||||
'somecode/__init__.py',
|
||||
]
|
||||
assert sorted(content) == ['ns_fake_pkg-1.0/' + x for x in expected]
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
@pytest.mark.skipif("not shutil.which('tar')")
|
||||
@pytest.mark.skipif("not shutil.which('gzip')")
|
||||
def test_make_distribution(self):
|
||||
# now building a sdist
|
||||
dist, cmd = self.get_cmd()
|
||||
|
||||
# creating a gztar then a tar
|
||||
cmd.formats = ['gztar', 'tar']
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
# making sure we have two files
|
||||
dist_folder = join(self.tmp_dir, 'dist')
|
||||
result = os.listdir(dist_folder)
|
||||
result.sort()
|
||||
assert result == ['ns_fake_pkg-1.0.tar', 'ns_fake_pkg-1.0.tar.gz']
|
||||
|
||||
os.remove(join(dist_folder, 'ns_fake_pkg-1.0.tar'))
|
||||
os.remove(join(dist_folder, 'ns_fake_pkg-1.0.tar.gz'))
|
||||
|
||||
# now trying a tar then a gztar
|
||||
cmd.formats = ['tar', 'gztar']
|
||||
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
result = os.listdir(dist_folder)
|
||||
result.sort()
|
||||
assert result == ['ns_fake_pkg-1.0.tar', 'ns_fake_pkg-1.0.tar.gz']
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
def test_add_defaults(self):
|
||||
# https://bugs.python.org/issue2279
|
||||
|
||||
# add_default should also include
|
||||
# data_files and package_data
|
||||
dist, cmd = self.get_cmd()
|
||||
|
||||
# filling data_files by pointing files
|
||||
# in package_data
|
||||
dist.package_data = {'': ['*.cfg', '*.dat'], 'somecode': ['*.txt']}
|
||||
self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
|
||||
self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#')
|
||||
|
||||
# adding some data in data_files
|
||||
data_dir = join(self.tmp_dir, 'data')
|
||||
os.mkdir(data_dir)
|
||||
self.write_file((data_dir, 'data.dt'), '#')
|
||||
some_dir = join(self.tmp_dir, 'some')
|
||||
os.mkdir(some_dir)
|
||||
# make sure VCS directories are pruned (#14004)
|
||||
hg_dir = join(self.tmp_dir, '.hg')
|
||||
os.mkdir(hg_dir)
|
||||
self.write_file((hg_dir, 'last-message.txt'), '#')
|
||||
# a buggy regex used to prevent this from working on windows (#6884)
|
||||
self.write_file((self.tmp_dir, 'buildout.cfg'), '#')
|
||||
self.write_file((self.tmp_dir, 'inroot.txt'), '#')
|
||||
self.write_file((some_dir, 'file.txt'), '#')
|
||||
self.write_file((some_dir, 'other_file.txt'), '#')
|
||||
|
||||
dist.data_files = [
|
||||
('data', ['data/data.dt', 'buildout.cfg', 'inroot.txt', 'notexisting']),
|
||||
'some/file.txt',
|
||||
'some/other_file.txt',
|
||||
]
|
||||
|
||||
# adding a script
|
||||
script_dir = join(self.tmp_dir, 'scripts')
|
||||
os.mkdir(script_dir)
|
||||
self.write_file((script_dir, 'script.py'), '#')
|
||||
dist.scripts = [join('scripts', 'script.py')]
|
||||
|
||||
cmd.formats = ['zip']
|
||||
cmd.use_defaults = True
|
||||
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
# now let's check what we have
|
||||
dist_folder = join(self.tmp_dir, 'dist')
|
||||
files = os.listdir(dist_folder)
|
||||
assert files == ['ns_fake_pkg-1.0.zip']
|
||||
|
||||
zip_file = zipfile.ZipFile(join(dist_folder, 'ns_fake_pkg-1.0.zip'))
|
||||
try:
|
||||
content = zip_file.namelist()
|
||||
finally:
|
||||
zip_file.close()
|
||||
|
||||
# making sure everything was added
|
||||
expected = [
|
||||
'',
|
||||
'PKG-INFO',
|
||||
'README',
|
||||
'buildout.cfg',
|
||||
'data/',
|
||||
'data/data.dt',
|
||||
'inroot.txt',
|
||||
'scripts/',
|
||||
'scripts/script.py',
|
||||
'setup.py',
|
||||
'some/',
|
||||
'some/file.txt',
|
||||
'some/other_file.txt',
|
||||
'somecode/',
|
||||
'somecode/__init__.py',
|
||||
'somecode/doc.dat',
|
||||
'somecode/doc.txt',
|
||||
]
|
||||
assert sorted(content) == ['ns_fake_pkg-1.0/' + x for x in expected]
|
||||
|
||||
# checking the MANIFEST
|
||||
manifest = pathlib.Path(self.tmp_dir, 'MANIFEST').read_text(encoding='utf-8')
|
||||
assert manifest == MANIFEST % {'sep': os.sep}
|
||||
|
||||
@staticmethod
|
||||
def warnings(messages, prefix='warning: '):
|
||||
return [msg for msg in messages if msg.startswith(prefix)]
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
def test_metadata_check_option(self, caplog):
|
||||
# testing the `medata-check` option
|
||||
dist, cmd = self.get_cmd(metadata={})
|
||||
|
||||
# this should raise some warnings !
|
||||
# with the `check` subcommand
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
assert len(self.warnings(caplog.messages, 'warning: check: ')) == 1
|
||||
|
||||
# trying with a complete set of metadata
|
||||
caplog.clear()
|
||||
dist, cmd = self.get_cmd()
|
||||
cmd.ensure_finalized()
|
||||
cmd.metadata_check = 0
|
||||
cmd.run()
|
||||
assert len(self.warnings(caplog.messages, 'warning: check: ')) == 0
|
||||
|
||||
def test_show_formats(self, capsys):
|
||||
show_formats()
|
||||
|
||||
# the output should be a header line + one line per format
|
||||
num_formats = len(ARCHIVE_FORMATS.keys())
|
||||
output = [
|
||||
line
|
||||
for line in capsys.readouterr().out.split('\n')
|
||||
if line.strip().startswith('--formats=')
|
||||
]
|
||||
assert len(output) == num_formats
|
||||
|
||||
def test_finalize_options(self):
|
||||
dist, cmd = self.get_cmd()
|
||||
cmd.finalize_options()
|
||||
|
||||
# default options set by finalize
|
||||
assert cmd.manifest == 'MANIFEST'
|
||||
assert cmd.template == 'MANIFEST.in'
|
||||
assert cmd.dist_dir == 'dist'
|
||||
|
||||
# formats has to be a string splitable on (' ', ',') or
|
||||
# a stringlist
|
||||
cmd.formats = 1
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.finalize_options()
|
||||
cmd.formats = ['zip']
|
||||
cmd.finalize_options()
|
||||
|
||||
# formats has to be known
|
||||
cmd.formats = 'supazipa'
|
||||
with pytest.raises(DistutilsOptionError):
|
||||
cmd.finalize_options()
|
||||
|
||||
# the following tests make sure there is a nice error message instead
|
||||
# of a traceback when parsing an invalid manifest template
|
||||
|
||||
def _check_template(self, content, caplog):
|
||||
dist, cmd = self.get_cmd()
|
||||
os.chdir(self.tmp_dir)
|
||||
self.write_file('MANIFEST.in', content)
|
||||
cmd.ensure_finalized()
|
||||
cmd.filelist = FileList()
|
||||
cmd.read_template()
|
||||
assert len(self.warnings(caplog.messages)) == 1
|
||||
|
||||
def test_invalid_template_unknown_command(self, caplog):
|
||||
self._check_template('taunt knights *', caplog)
|
||||
|
||||
def test_invalid_template_wrong_arguments(self, caplog):
|
||||
# this manifest command takes one argument
|
||||
self._check_template('prune', caplog)
|
||||
|
||||
@pytest.mark.skipif("platform.system() != 'Windows'")
|
||||
def test_invalid_template_wrong_path(self, caplog):
|
||||
# on Windows, trailing slashes are not allowed
|
||||
# this used to crash instead of raising a warning: #8286
|
||||
self._check_template('include examples/', caplog)
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
def test_get_file_list(self):
|
||||
# make sure MANIFEST is recalculated
|
||||
dist, cmd = self.get_cmd()
|
||||
|
||||
# filling data_files by pointing files in package_data
|
||||
dist.package_data = {'somecode': ['*.txt']}
|
||||
self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#')
|
||||
cmd.formats = ['gztar']
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
assert ilen(clean_lines(cmd.manifest)) == 5
|
||||
|
||||
# adding a file
|
||||
self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#')
|
||||
|
||||
# make sure build_py is reinitialized, like a fresh run
|
||||
build_py = dist.get_command_obj('build_py')
|
||||
build_py.finalized = False
|
||||
build_py.ensure_finalized()
|
||||
|
||||
cmd.run()
|
||||
|
||||
manifest2 = list(clean_lines(cmd.manifest))
|
||||
|
||||
# do we have the new file in MANIFEST ?
|
||||
assert len(manifest2) == 6
|
||||
assert 'doc2.txt' in manifest2[-1]
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
def test_manifest_marker(self):
|
||||
# check that autogenerated MANIFESTs have a marker
|
||||
dist, cmd = self.get_cmd()
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
assert (
|
||||
next(clean_lines(cmd.manifest))
|
||||
== '# file GENERATED by distutils, do NOT edit'
|
||||
)
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
def test_manifest_comments(self):
|
||||
# make sure comments don't cause exceptions or wrong includes
|
||||
contents = dedent(
|
||||
"""\
|
||||
# bad.py
|
||||
#bad.py
|
||||
good.py
|
||||
"""
|
||||
)
|
||||
dist, cmd = self.get_cmd()
|
||||
cmd.ensure_finalized()
|
||||
self.write_file((self.tmp_dir, cmd.manifest), contents)
|
||||
self.write_file((self.tmp_dir, 'good.py'), '# pick me!')
|
||||
self.write_file((self.tmp_dir, 'bad.py'), "# don't pick me!")
|
||||
self.write_file((self.tmp_dir, '#bad.py'), "# don't pick me!")
|
||||
cmd.run()
|
||||
assert cmd.filelist.files == ['good.py']
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
def test_manual_manifest(self):
|
||||
# check that a MANIFEST without a marker is left alone
|
||||
dist, cmd = self.get_cmd()
|
||||
cmd.formats = ['gztar']
|
||||
cmd.ensure_finalized()
|
||||
self.write_file((self.tmp_dir, cmd.manifest), 'README.manual')
|
||||
self.write_file(
|
||||
(self.tmp_dir, 'README.manual'),
|
||||
'This project maintains its MANIFEST file itself.',
|
||||
)
|
||||
cmd.run()
|
||||
assert cmd.filelist.files == ['README.manual']
|
||||
|
||||
assert list(clean_lines(cmd.manifest)) == ['README.manual']
|
||||
|
||||
archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.0.tar.gz')
|
||||
archive = tarfile.open(archive_name)
|
||||
try:
|
||||
filenames = [tarinfo.name for tarinfo in archive]
|
||||
finally:
|
||||
archive.close()
|
||||
assert sorted(filenames) == [
|
||||
'ns_fake_pkg-1.0',
|
||||
'ns_fake_pkg-1.0/PKG-INFO',
|
||||
'ns_fake_pkg-1.0/README.manual',
|
||||
]
|
||||
|
||||
@pytest.mark.usefixtures('needs_zlib')
|
||||
@require_unix_id
|
||||
@require_uid_0
|
||||
@pytest.mark.skipif("not shutil.which('tar')")
|
||||
@pytest.mark.skipif("not shutil.which('gzip')")
|
||||
def test_make_distribution_owner_group(self):
|
||||
# now building a sdist
|
||||
dist, cmd = self.get_cmd()
|
||||
|
||||
# creating a gztar and specifying the owner+group
|
||||
cmd.formats = ['gztar']
|
||||
cmd.owner = pwd.getpwuid(0)[0]
|
||||
cmd.group = grp.getgrgid(0)[0]
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
# making sure we have the good rights
|
||||
archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.0.tar.gz')
|
||||
archive = tarfile.open(archive_name)
|
||||
try:
|
||||
for member in archive.getmembers():
|
||||
assert member.uid == 0
|
||||
assert member.gid == 0
|
||||
finally:
|
||||
archive.close()
|
||||
|
||||
# building a sdist again
|
||||
dist, cmd = self.get_cmd()
|
||||
|
||||
# creating a gztar
|
||||
cmd.formats = ['gztar']
|
||||
cmd.ensure_finalized()
|
||||
cmd.run()
|
||||
|
||||
# making sure we have the good rights
|
||||
archive_name = join(self.tmp_dir, 'dist', 'ns_fake_pkg-1.0.tar.gz')
|
||||
archive = tarfile.open(archive_name)
|
||||
|
||||
# note that we are not testing the group ownership here
|
||||
# because, depending on the platforms and the container
|
||||
# rights (see #7408)
|
||||
try:
|
||||
for member in archive.getmembers():
|
||||
assert member.uid == os.getuid()
|
||||
finally:
|
||||
archive.close()
|
||||
@@ -0,0 +1,131 @@
|
||||
"""Tests for distutils.spawn."""
|
||||
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
import unittest.mock as mock
|
||||
from distutils.errors import DistutilsExecError
|
||||
from distutils.spawn import find_executable, spawn
|
||||
from distutils.tests import support
|
||||
|
||||
import path
|
||||
import pytest
|
||||
from test.support import unix_shell
|
||||
|
||||
from .compat import py38 as os_helper
|
||||
|
||||
|
||||
class TestSpawn(support.TempdirManager):
|
||||
@pytest.mark.skipif("os.name not in ('nt', 'posix')")
|
||||
def test_spawn(self):
|
||||
tmpdir = self.mkdtemp()
|
||||
|
||||
# creating something executable
|
||||
# through the shell that returns 1
|
||||
if sys.platform != 'win32':
|
||||
exe = os.path.join(tmpdir, 'foo.sh')
|
||||
self.write_file(exe, f'#!{unix_shell}\nexit 1')
|
||||
else:
|
||||
exe = os.path.join(tmpdir, 'foo.bat')
|
||||
self.write_file(exe, 'exit 1')
|
||||
|
||||
os.chmod(exe, 0o777)
|
||||
with pytest.raises(DistutilsExecError):
|
||||
spawn([exe])
|
||||
|
||||
# now something that works
|
||||
if sys.platform != 'win32':
|
||||
exe = os.path.join(tmpdir, 'foo.sh')
|
||||
self.write_file(exe, f'#!{unix_shell}\nexit 0')
|
||||
else:
|
||||
exe = os.path.join(tmpdir, 'foo.bat')
|
||||
self.write_file(exe, 'exit 0')
|
||||
|
||||
os.chmod(exe, 0o777)
|
||||
spawn([exe]) # should work without any error
|
||||
|
||||
def test_find_executable(self, tmp_path):
|
||||
program_path = self._make_executable(tmp_path, '.exe')
|
||||
program = program_path.name
|
||||
program_noeext = program_path.with_suffix('').name
|
||||
filename = str(program_path)
|
||||
tmp_dir = path.Path(tmp_path)
|
||||
|
||||
# test path parameter
|
||||
rv = find_executable(program, path=tmp_dir)
|
||||
assert rv == filename
|
||||
|
||||
if sys.platform == 'win32':
|
||||
# test without ".exe" extension
|
||||
rv = find_executable(program_noeext, path=tmp_dir)
|
||||
assert rv == filename
|
||||
|
||||
# test find in the current directory
|
||||
with tmp_dir:
|
||||
rv = find_executable(program)
|
||||
assert rv == program
|
||||
|
||||
# test non-existent program
|
||||
dont_exist_program = "dontexist_" + program
|
||||
rv = find_executable(dont_exist_program, path=tmp_dir)
|
||||
assert rv is None
|
||||
|
||||
# PATH='': no match, except in the current directory
|
||||
with os_helper.EnvironmentVarGuard() as env:
|
||||
env['PATH'] = ''
|
||||
with mock.patch(
|
||||
'distutils.spawn.os.confstr', return_value=tmp_dir, create=True
|
||||
), mock.patch('distutils.spawn.os.defpath', tmp_dir):
|
||||
rv = find_executable(program)
|
||||
assert rv is None
|
||||
|
||||
# look in current directory
|
||||
with tmp_dir:
|
||||
rv = find_executable(program)
|
||||
assert rv == program
|
||||
|
||||
# PATH=':': explicitly looks in the current directory
|
||||
with os_helper.EnvironmentVarGuard() as env:
|
||||
env['PATH'] = os.pathsep
|
||||
with mock.patch(
|
||||
'distutils.spawn.os.confstr', return_value='', create=True
|
||||
), mock.patch('distutils.spawn.os.defpath', ''):
|
||||
rv = find_executable(program)
|
||||
assert rv is None
|
||||
|
||||
# look in current directory
|
||||
with tmp_dir:
|
||||
rv = find_executable(program)
|
||||
assert rv == program
|
||||
|
||||
# missing PATH: test os.confstr("CS_PATH") and os.defpath
|
||||
with os_helper.EnvironmentVarGuard() as env:
|
||||
env.pop('PATH', None)
|
||||
|
||||
# without confstr
|
||||
with mock.patch(
|
||||
'distutils.spawn.os.confstr', side_effect=ValueError, create=True
|
||||
), mock.patch('distutils.spawn.os.defpath', tmp_dir):
|
||||
rv = find_executable(program)
|
||||
assert rv == filename
|
||||
|
||||
# with confstr
|
||||
with mock.patch(
|
||||
'distutils.spawn.os.confstr', return_value=tmp_dir, create=True
|
||||
), mock.patch('distutils.spawn.os.defpath', ''):
|
||||
rv = find_executable(program)
|
||||
assert rv == filename
|
||||
|
||||
@staticmethod
|
||||
def _make_executable(tmp_path, ext):
|
||||
# Give the temporary program a suffix regardless of platform.
|
||||
# It's needed on Windows and not harmful on others.
|
||||
program = tmp_path.joinpath('program').with_suffix(ext)
|
||||
program.write_text("", encoding='utf-8')
|
||||
program.chmod(stat.S_IXUSR)
|
||||
return program
|
||||
|
||||
def test_spawn_missing_exe(self):
|
||||
with pytest.raises(DistutilsExecError) as ctx:
|
||||
spawn(['does-not-exist'])
|
||||
assert "command 'does-not-exist' failed" in str(ctx.value)
|
||||
@@ -0,0 +1,319 @@
|
||||
"""Tests for distutils.sysconfig."""
|
||||
|
||||
import contextlib
|
||||
import distutils
|
||||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
import sys
|
||||
from distutils import sysconfig
|
||||
from distutils.ccompiler import new_compiler # noqa: F401
|
||||
from distutils.unixccompiler import UnixCCompiler
|
||||
|
||||
import jaraco.envs
|
||||
import path
|
||||
import pytest
|
||||
from jaraco.text import trim
|
||||
from test.support import swap_item
|
||||
|
||||
|
||||
def _gen_makefile(root, contents):
|
||||
jaraco.path.build({'Makefile': trim(contents)}, root)
|
||||
return root / 'Makefile'
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
class TestSysconfig:
|
||||
def test_get_config_h_filename(self):
|
||||
config_h = sysconfig.get_config_h_filename()
|
||||
assert os.path.isfile(config_h)
|
||||
|
||||
@pytest.mark.skipif("platform.system() == 'Windows'")
|
||||
@pytest.mark.skipif("sys.implementation.name != 'cpython'")
|
||||
def test_get_makefile_filename(self):
|
||||
makefile = sysconfig.get_makefile_filename()
|
||||
assert os.path.isfile(makefile)
|
||||
|
||||
def test_get_python_lib(self, tmp_path):
|
||||
assert sysconfig.get_python_lib() != sysconfig.get_python_lib(prefix=tmp_path)
|
||||
|
||||
def test_get_config_vars(self):
|
||||
cvars = sysconfig.get_config_vars()
|
||||
assert isinstance(cvars, dict)
|
||||
assert cvars
|
||||
|
||||
@pytest.mark.skipif('sysconfig.IS_PYPY')
|
||||
@pytest.mark.skipif('sysconfig.python_build')
|
||||
@pytest.mark.xfail('platform.system() == "Windows"')
|
||||
def test_srcdir_simple(self):
|
||||
# See #15364.
|
||||
srcdir = pathlib.Path(sysconfig.get_config_var('srcdir'))
|
||||
|
||||
assert srcdir.absolute()
|
||||
assert srcdir.is_dir()
|
||||
|
||||
makefile = pathlib.Path(sysconfig.get_makefile_filename())
|
||||
assert makefile.parent.samefile(srcdir)
|
||||
|
||||
@pytest.mark.skipif('sysconfig.IS_PYPY')
|
||||
@pytest.mark.skipif('not sysconfig.python_build')
|
||||
def test_srcdir_python_build(self):
|
||||
# See #15364.
|
||||
srcdir = pathlib.Path(sysconfig.get_config_var('srcdir'))
|
||||
|
||||
# The python executable has not been installed so srcdir
|
||||
# should be a full source checkout.
|
||||
Python_h = srcdir.joinpath('Include', 'Python.h')
|
||||
assert Python_h.is_file()
|
||||
assert sysconfig._is_python_source_dir(srcdir)
|
||||
assert sysconfig._is_python_source_dir(str(srcdir))
|
||||
|
||||
def test_srcdir_independent_of_cwd(self):
|
||||
"""
|
||||
srcdir should be independent of the current working directory
|
||||
"""
|
||||
# See #15364.
|
||||
srcdir = sysconfig.get_config_var('srcdir')
|
||||
with path.Path('..'):
|
||||
srcdir2 = sysconfig.get_config_var('srcdir')
|
||||
assert srcdir == srcdir2
|
||||
|
||||
def customize_compiler(self):
|
||||
# make sure AR gets caught
|
||||
class compiler:
|
||||
compiler_type = 'unix'
|
||||
executables = UnixCCompiler.executables
|
||||
|
||||
def __init__(self):
|
||||
self.exes = {}
|
||||
|
||||
def set_executables(self, **kw):
|
||||
for k, v in kw.items():
|
||||
self.exes[k] = v
|
||||
|
||||
sysconfig_vars = {
|
||||
'AR': 'sc_ar',
|
||||
'CC': 'sc_cc',
|
||||
'CXX': 'sc_cxx',
|
||||
'ARFLAGS': '--sc-arflags',
|
||||
'CFLAGS': '--sc-cflags',
|
||||
'CCSHARED': '--sc-ccshared',
|
||||
'LDSHARED': 'sc_ldshared',
|
||||
'SHLIB_SUFFIX': 'sc_shutil_suffix',
|
||||
}
|
||||
|
||||
comp = compiler()
|
||||
with contextlib.ExitStack() as cm:
|
||||
for key, value in sysconfig_vars.items():
|
||||
cm.enter_context(swap_item(sysconfig._config_vars, key, value))
|
||||
sysconfig.customize_compiler(comp)
|
||||
|
||||
return comp
|
||||
|
||||
@pytest.mark.skipif("not isinstance(new_compiler(), UnixCCompiler)")
|
||||
@pytest.mark.usefixtures('disable_macos_customization')
|
||||
def test_customize_compiler(self):
|
||||
# Make sure that sysconfig._config_vars is initialized
|
||||
sysconfig.get_config_vars()
|
||||
|
||||
os.environ['AR'] = 'env_ar'
|
||||
os.environ['CC'] = 'env_cc'
|
||||
os.environ['CPP'] = 'env_cpp'
|
||||
os.environ['CXX'] = 'env_cxx --env-cxx-flags'
|
||||
os.environ['LDSHARED'] = 'env_ldshared'
|
||||
os.environ['LDFLAGS'] = '--env-ldflags'
|
||||
os.environ['ARFLAGS'] = '--env-arflags'
|
||||
os.environ['CFLAGS'] = '--env-cflags'
|
||||
os.environ['CPPFLAGS'] = '--env-cppflags'
|
||||
os.environ['RANLIB'] = 'env_ranlib'
|
||||
|
||||
comp = self.customize_compiler()
|
||||
assert comp.exes['archiver'] == 'env_ar --env-arflags'
|
||||
assert comp.exes['preprocessor'] == 'env_cpp --env-cppflags'
|
||||
assert comp.exes['compiler'] == 'env_cc --sc-cflags --env-cflags --env-cppflags'
|
||||
assert comp.exes['compiler_so'] == (
|
||||
'env_cc --sc-cflags --env-cflags --env-cppflags --sc-ccshared'
|
||||
)
|
||||
assert (
|
||||
comp.exes['compiler_cxx']
|
||||
== 'env_cxx --env-cxx-flags --sc-cflags --env-cppflags'
|
||||
)
|
||||
assert comp.exes['linker_exe'] == 'env_cc'
|
||||
assert comp.exes['linker_so'] == (
|
||||
'env_ldshared --env-ldflags --env-cflags --env-cppflags'
|
||||
)
|
||||
assert comp.shared_lib_extension == 'sc_shutil_suffix'
|
||||
|
||||
if sys.platform == "darwin":
|
||||
assert comp.exes['ranlib'] == 'env_ranlib'
|
||||
else:
|
||||
assert 'ranlib' not in comp.exes
|
||||
|
||||
del os.environ['AR']
|
||||
del os.environ['CC']
|
||||
del os.environ['CPP']
|
||||
del os.environ['CXX']
|
||||
del os.environ['LDSHARED']
|
||||
del os.environ['LDFLAGS']
|
||||
del os.environ['ARFLAGS']
|
||||
del os.environ['CFLAGS']
|
||||
del os.environ['CPPFLAGS']
|
||||
del os.environ['RANLIB']
|
||||
|
||||
comp = self.customize_compiler()
|
||||
assert comp.exes['archiver'] == 'sc_ar --sc-arflags'
|
||||
assert comp.exes['preprocessor'] == 'sc_cc -E'
|
||||
assert comp.exes['compiler'] == 'sc_cc --sc-cflags'
|
||||
assert comp.exes['compiler_so'] == 'sc_cc --sc-cflags --sc-ccshared'
|
||||
assert comp.exes['compiler_cxx'] == 'sc_cxx --sc-cflags'
|
||||
assert comp.exes['linker_exe'] == 'sc_cc'
|
||||
assert comp.exes['linker_so'] == 'sc_ldshared'
|
||||
assert comp.shared_lib_extension == 'sc_shutil_suffix'
|
||||
assert 'ranlib' not in comp.exes
|
||||
|
||||
def test_parse_makefile_base(self, tmp_path):
|
||||
makefile = _gen_makefile(
|
||||
tmp_path,
|
||||
"""
|
||||
CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB'
|
||||
VAR=$OTHER
|
||||
OTHER=foo
|
||||
""",
|
||||
)
|
||||
d = sysconfig.parse_makefile(makefile)
|
||||
assert d == {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'", 'OTHER': 'foo'}
|
||||
|
||||
def test_parse_makefile_literal_dollar(self, tmp_path):
|
||||
makefile = _gen_makefile(
|
||||
tmp_path,
|
||||
"""
|
||||
CONFIG_ARGS= '--arg1=optarg1' 'ENV=\\$$LIB'
|
||||
VAR=$OTHER
|
||||
OTHER=foo
|
||||
""",
|
||||
)
|
||||
d = sysconfig.parse_makefile(makefile)
|
||||
assert d == {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'", 'OTHER': 'foo'}
|
||||
|
||||
def test_sysconfig_module(self):
|
||||
import sysconfig as global_sysconfig
|
||||
|
||||
assert global_sysconfig.get_config_var('CFLAGS') == sysconfig.get_config_var(
|
||||
'CFLAGS'
|
||||
)
|
||||
assert global_sysconfig.get_config_var('LDFLAGS') == sysconfig.get_config_var(
|
||||
'LDFLAGS'
|
||||
)
|
||||
|
||||
# On macOS, binary installers support extension module building on
|
||||
# various levels of the operating system with differing Xcode
|
||||
# configurations, requiring customization of some of the
|
||||
# compiler configuration directives to suit the environment on
|
||||
# the installed machine. Some of these customizations may require
|
||||
# running external programs and are thus deferred until needed by
|
||||
# the first extension module build. Only
|
||||
# the Distutils version of sysconfig is used for extension module
|
||||
# builds, which happens earlier in the Distutils tests. This may
|
||||
# cause the following tests to fail since no tests have caused
|
||||
# the global version of sysconfig to call the customization yet.
|
||||
# The solution for now is to simply skip this test in this case.
|
||||
# The longer-term solution is to only have one version of sysconfig.
|
||||
@pytest.mark.skipif("sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER')")
|
||||
def test_sysconfig_compiler_vars(self):
|
||||
import sysconfig as global_sysconfig
|
||||
|
||||
if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'):
|
||||
pytest.skip('compiler flags customized')
|
||||
assert global_sysconfig.get_config_var('LDSHARED') == sysconfig.get_config_var(
|
||||
'LDSHARED'
|
||||
)
|
||||
assert global_sysconfig.get_config_var('CC') == sysconfig.get_config_var('CC')
|
||||
|
||||
@pytest.mark.skipif("not sysconfig.get_config_var('EXT_SUFFIX')")
|
||||
def test_SO_deprecation(self):
|
||||
with pytest.warns(DeprecationWarning):
|
||||
sysconfig.get_config_var('SO')
|
||||
|
||||
def test_customize_compiler_before_get_config_vars(self, tmp_path):
|
||||
# Issue #21923: test that a Distribution compiler
|
||||
# instance can be called without an explicit call to
|
||||
# get_config_vars().
|
||||
jaraco.path.build(
|
||||
{
|
||||
'file': trim("""
|
||||
from distutils.core import Distribution
|
||||
config = Distribution().get_command_obj('config')
|
||||
# try_compile may pass or it may fail if no compiler
|
||||
# is found but it should not raise an exception.
|
||||
rc = config.try_compile('int x;')
|
||||
""")
|
||||
},
|
||||
tmp_path,
|
||||
)
|
||||
p = subprocess.Popen(
|
||||
[sys.executable, tmp_path / 'file'],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
encoding='utf-8',
|
||||
)
|
||||
outs, errs = p.communicate()
|
||||
assert 0 == p.returncode, "Subprocess failed: " + outs
|
||||
|
||||
def test_parse_config_h(self):
|
||||
config_h = sysconfig.get_config_h_filename()
|
||||
input = {}
|
||||
with open(config_h, encoding="utf-8") as f:
|
||||
result = sysconfig.parse_config_h(f, g=input)
|
||||
assert input is result
|
||||
with open(config_h, encoding="utf-8") as f:
|
||||
result = sysconfig.parse_config_h(f)
|
||||
assert isinstance(result, dict)
|
||||
|
||||
@pytest.mark.skipif("platform.system() != 'Windows'")
|
||||
@pytest.mark.skipif("sys.implementation.name != 'cpython'")
|
||||
def test_win_ext_suffix(self):
|
||||
assert sysconfig.get_config_var("EXT_SUFFIX").endswith(".pyd")
|
||||
assert sysconfig.get_config_var("EXT_SUFFIX") != ".pyd"
|
||||
|
||||
@pytest.mark.skipif("platform.system() != 'Windows'")
|
||||
@pytest.mark.skipif("sys.implementation.name != 'cpython'")
|
||||
@pytest.mark.skipif(
|
||||
'\\PCbuild\\'.casefold() not in sys.executable.casefold(),
|
||||
reason='Need sys.executable to be in a source tree',
|
||||
)
|
||||
def test_win_build_venv_from_source_tree(self, tmp_path):
|
||||
"""Ensure distutils.sysconfig detects venvs from source tree builds."""
|
||||
env = jaraco.envs.VEnv()
|
||||
env.create_opts = env.clean_opts
|
||||
env.root = tmp_path
|
||||
env.ensure_env()
|
||||
cmd = [
|
||||
env.exe(),
|
||||
"-c",
|
||||
"import distutils.sysconfig; print(distutils.sysconfig.python_build)",
|
||||
]
|
||||
distutils_path = os.path.dirname(os.path.dirname(distutils.__file__))
|
||||
out = subprocess.check_output(
|
||||
cmd, env={**os.environ, "PYTHONPATH": distutils_path}
|
||||
)
|
||||
assert out == "True"
|
||||
|
||||
def test_get_python_inc_missing_config_dir(self, monkeypatch):
|
||||
"""
|
||||
In portable Python installations, the sysconfig will be broken,
|
||||
pointing to the directories where the installation was built and
|
||||
not where it currently is. In this case, ensure that the missing
|
||||
directory isn't used for get_python_inc.
|
||||
|
||||
See pypa/distutils#178.
|
||||
"""
|
||||
|
||||
def override(name):
|
||||
if name == 'INCLUDEPY':
|
||||
return '/does-not-exist'
|
||||
return sysconfig.get_config_var(name)
|
||||
|
||||
monkeypatch.setattr(sysconfig, 'get_config_var', override)
|
||||
|
||||
assert os.path.exists(sysconfig.get_python_inc())
|
||||
@@ -0,0 +1,127 @@
|
||||
"""Tests for distutils.text_file."""
|
||||
|
||||
from distutils.tests import support
|
||||
from distutils.text_file import TextFile
|
||||
|
||||
import jaraco.path
|
||||
import path
|
||||
|
||||
TEST_DATA = """# test file
|
||||
|
||||
line 3 \\
|
||||
# intervening comment
|
||||
continues on next line
|
||||
"""
|
||||
|
||||
|
||||
class TestTextFile(support.TempdirManager):
|
||||
def test_class(self):
|
||||
# old tests moved from text_file.__main__
|
||||
# so they are really called by the buildbots
|
||||
|
||||
# result 1: no fancy options
|
||||
result1 = [
|
||||
'# test file\n',
|
||||
'\n',
|
||||
'line 3 \\\n',
|
||||
'# intervening comment\n',
|
||||
' continues on next line\n',
|
||||
]
|
||||
|
||||
# result 2: just strip comments
|
||||
result2 = ["\n", "line 3 \\\n", " continues on next line\n"]
|
||||
|
||||
# result 3: just strip blank lines
|
||||
result3 = [
|
||||
"# test file\n",
|
||||
"line 3 \\\n",
|
||||
"# intervening comment\n",
|
||||
" continues on next line\n",
|
||||
]
|
||||
|
||||
# result 4: default, strip comments, blank lines,
|
||||
# and trailing whitespace
|
||||
result4 = ["line 3 \\", " continues on next line"]
|
||||
|
||||
# result 5: strip comments and blanks, plus join lines (but don't
|
||||
# "collapse" joined lines
|
||||
result5 = ["line 3 continues on next line"]
|
||||
|
||||
# result 6: strip comments and blanks, plus join lines (and
|
||||
# "collapse" joined lines
|
||||
result6 = ["line 3 continues on next line"]
|
||||
|
||||
def test_input(count, description, file, expected_result):
|
||||
result = file.readlines()
|
||||
assert result == expected_result
|
||||
|
||||
tmp_path = path.Path(self.mkdtemp())
|
||||
filename = tmp_path / 'test.txt'
|
||||
jaraco.path.build({filename.name: TEST_DATA}, tmp_path)
|
||||
|
||||
in_file = TextFile(
|
||||
filename,
|
||||
strip_comments=False,
|
||||
skip_blanks=False,
|
||||
lstrip_ws=False,
|
||||
rstrip_ws=False,
|
||||
)
|
||||
try:
|
||||
test_input(1, "no processing", in_file, result1)
|
||||
finally:
|
||||
in_file.close()
|
||||
|
||||
in_file = TextFile(
|
||||
filename,
|
||||
strip_comments=True,
|
||||
skip_blanks=False,
|
||||
lstrip_ws=False,
|
||||
rstrip_ws=False,
|
||||
)
|
||||
try:
|
||||
test_input(2, "strip comments", in_file, result2)
|
||||
finally:
|
||||
in_file.close()
|
||||
|
||||
in_file = TextFile(
|
||||
filename,
|
||||
strip_comments=False,
|
||||
skip_blanks=True,
|
||||
lstrip_ws=False,
|
||||
rstrip_ws=False,
|
||||
)
|
||||
try:
|
||||
test_input(3, "strip blanks", in_file, result3)
|
||||
finally:
|
||||
in_file.close()
|
||||
|
||||
in_file = TextFile(filename)
|
||||
try:
|
||||
test_input(4, "default processing", in_file, result4)
|
||||
finally:
|
||||
in_file.close()
|
||||
|
||||
in_file = TextFile(
|
||||
filename,
|
||||
strip_comments=True,
|
||||
skip_blanks=True,
|
||||
join_lines=True,
|
||||
rstrip_ws=True,
|
||||
)
|
||||
try:
|
||||
test_input(5, "join lines without collapsing", in_file, result5)
|
||||
finally:
|
||||
in_file.close()
|
||||
|
||||
in_file = TextFile(
|
||||
filename,
|
||||
strip_comments=True,
|
||||
skip_blanks=True,
|
||||
join_lines=True,
|
||||
rstrip_ws=True,
|
||||
collapse_join=True,
|
||||
)
|
||||
try:
|
||||
test_input(6, "join lines with collapsing", in_file, result6)
|
||||
finally:
|
||||
in_file.close()
|
||||
@@ -0,0 +1,351 @@
|
||||
"""Tests for distutils.unixccompiler."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import unittest.mock as mock
|
||||
from distutils import sysconfig
|
||||
from distutils.compat import consolidate_linker_args
|
||||
from distutils.errors import DistutilsPlatformError
|
||||
from distutils.unixccompiler import UnixCCompiler
|
||||
from distutils.util import _clear_cached_macosx_ver
|
||||
|
||||
import pytest
|
||||
|
||||
from . import support
|
||||
from .compat.py38 import EnvironmentVarGuard
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def save_values(monkeypatch):
|
||||
monkeypatch.setattr(sys, 'platform', sys.platform)
|
||||
monkeypatch.setattr(sysconfig, 'get_config_var', sysconfig.get_config_var)
|
||||
monkeypatch.setattr(sysconfig, 'get_config_vars', sysconfig.get_config_vars)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def compiler_wrapper(request):
|
||||
class CompilerWrapper(UnixCCompiler):
|
||||
def rpath_foo(self):
|
||||
return self.runtime_library_dir_option('/foo')
|
||||
|
||||
request.instance.cc = CompilerWrapper()
|
||||
|
||||
|
||||
class TestUnixCCompiler(support.TempdirManager):
|
||||
@pytest.mark.skipif('platform.system == "Windows"')
|
||||
def test_runtime_libdir_option(self): # noqa: C901
|
||||
# Issue #5900; GitHub Issue #37
|
||||
#
|
||||
# Ensure RUNPATH is added to extension modules with RPATH if
|
||||
# GNU ld is used
|
||||
|
||||
# darwin
|
||||
sys.platform = 'darwin'
|
||||
darwin_ver_var = 'MACOSX_DEPLOYMENT_TARGET'
|
||||
darwin_rpath_flag = '-Wl,-rpath,/foo'
|
||||
darwin_lib_flag = '-L/foo'
|
||||
|
||||
# (macOS version from syscfg, macOS version from env var) -> flag
|
||||
# Version value of None generates two tests: as None and as empty string
|
||||
# Expected flag value of None means an mismatch exception is expected
|
||||
darwin_test_cases = [
|
||||
((None, None), darwin_lib_flag),
|
||||
((None, '11'), darwin_rpath_flag),
|
||||
(('10', None), darwin_lib_flag),
|
||||
(('10.3', None), darwin_lib_flag),
|
||||
(('10.3.1', None), darwin_lib_flag),
|
||||
(('10.5', None), darwin_rpath_flag),
|
||||
(('10.5.1', None), darwin_rpath_flag),
|
||||
(('10.3', '10.3'), darwin_lib_flag),
|
||||
(('10.3', '10.5'), darwin_rpath_flag),
|
||||
(('10.5', '10.3'), darwin_lib_flag),
|
||||
(('10.5', '11'), darwin_rpath_flag),
|
||||
(('10.4', '10'), None),
|
||||
]
|
||||
|
||||
def make_darwin_gcv(syscfg_macosx_ver):
|
||||
def gcv(var):
|
||||
if var == darwin_ver_var:
|
||||
return syscfg_macosx_ver
|
||||
return "xxx"
|
||||
|
||||
return gcv
|
||||
|
||||
def do_darwin_test(syscfg_macosx_ver, env_macosx_ver, expected_flag):
|
||||
env = os.environ
|
||||
msg = f"macOS version = (sysconfig={syscfg_macosx_ver!r}, env={env_macosx_ver!r})"
|
||||
|
||||
# Save
|
||||
old_gcv = sysconfig.get_config_var
|
||||
old_env_macosx_ver = env.get(darwin_ver_var)
|
||||
|
||||
# Setup environment
|
||||
_clear_cached_macosx_ver()
|
||||
sysconfig.get_config_var = make_darwin_gcv(syscfg_macosx_ver)
|
||||
if env_macosx_ver is not None:
|
||||
env[darwin_ver_var] = env_macosx_ver
|
||||
elif darwin_ver_var in env:
|
||||
env.pop(darwin_ver_var)
|
||||
|
||||
# Run the test
|
||||
if expected_flag is not None:
|
||||
assert self.cc.rpath_foo() == expected_flag, msg
|
||||
else:
|
||||
with pytest.raises(
|
||||
DistutilsPlatformError, match=darwin_ver_var + r' mismatch'
|
||||
):
|
||||
self.cc.rpath_foo()
|
||||
|
||||
# Restore
|
||||
if old_env_macosx_ver is not None:
|
||||
env[darwin_ver_var] = old_env_macosx_ver
|
||||
elif darwin_ver_var in env:
|
||||
env.pop(darwin_ver_var)
|
||||
sysconfig.get_config_var = old_gcv
|
||||
_clear_cached_macosx_ver()
|
||||
|
||||
for macosx_vers, expected_flag in darwin_test_cases:
|
||||
syscfg_macosx_ver, env_macosx_ver = macosx_vers
|
||||
do_darwin_test(syscfg_macosx_ver, env_macosx_ver, expected_flag)
|
||||
# Bonus test cases with None interpreted as empty string
|
||||
if syscfg_macosx_ver is None:
|
||||
do_darwin_test("", env_macosx_ver, expected_flag)
|
||||
if env_macosx_ver is None:
|
||||
do_darwin_test(syscfg_macosx_ver, "", expected_flag)
|
||||
if syscfg_macosx_ver is None and env_macosx_ver is None:
|
||||
do_darwin_test("", "", expected_flag)
|
||||
|
||||
old_gcv = sysconfig.get_config_var
|
||||
|
||||
# hp-ux
|
||||
sys.platform = 'hp-ux'
|
||||
|
||||
def gcv(v):
|
||||
return 'xxx'
|
||||
|
||||
sysconfig.get_config_var = gcv
|
||||
assert self.cc.rpath_foo() == ['+s', '-L/foo']
|
||||
|
||||
def gcv(v):
|
||||
return 'gcc'
|
||||
|
||||
sysconfig.get_config_var = gcv
|
||||
assert self.cc.rpath_foo() == ['-Wl,+s', '-L/foo']
|
||||
|
||||
def gcv(v):
|
||||
return 'g++'
|
||||
|
||||
sysconfig.get_config_var = gcv
|
||||
assert self.cc.rpath_foo() == ['-Wl,+s', '-L/foo']
|
||||
|
||||
sysconfig.get_config_var = old_gcv
|
||||
|
||||
# GCC GNULD
|
||||
sys.platform = 'bar'
|
||||
|
||||
def gcv(v):
|
||||
if v == 'CC':
|
||||
return 'gcc'
|
||||
elif v == 'GNULD':
|
||||
return 'yes'
|
||||
|
||||
sysconfig.get_config_var = gcv
|
||||
assert self.cc.rpath_foo() == consolidate_linker_args([
|
||||
'-Wl,--enable-new-dtags',
|
||||
'-Wl,-rpath,/foo',
|
||||
])
|
||||
|
||||
def gcv(v):
|
||||
if v == 'CC':
|
||||
return 'gcc -pthread -B /bar'
|
||||
elif v == 'GNULD':
|
||||
return 'yes'
|
||||
|
||||
sysconfig.get_config_var = gcv
|
||||
assert self.cc.rpath_foo() == consolidate_linker_args([
|
||||
'-Wl,--enable-new-dtags',
|
||||
'-Wl,-rpath,/foo',
|
||||
])
|
||||
|
||||
# GCC non-GNULD
|
||||
sys.platform = 'bar'
|
||||
|
||||
def gcv(v):
|
||||
if v == 'CC':
|
||||
return 'gcc'
|
||||
elif v == 'GNULD':
|
||||
return 'no'
|
||||
|
||||
sysconfig.get_config_var = gcv
|
||||
assert self.cc.rpath_foo() == '-Wl,-R/foo'
|
||||
|
||||
# GCC GNULD with fully qualified configuration prefix
|
||||
# see #7617
|
||||
sys.platform = 'bar'
|
||||
|
||||
def gcv(v):
|
||||
if v == 'CC':
|
||||
return 'x86_64-pc-linux-gnu-gcc-4.4.2'
|
||||
elif v == 'GNULD':
|
||||
return 'yes'
|
||||
|
||||
sysconfig.get_config_var = gcv
|
||||
assert self.cc.rpath_foo() == consolidate_linker_args([
|
||||
'-Wl,--enable-new-dtags',
|
||||
'-Wl,-rpath,/foo',
|
||||
])
|
||||
|
||||
# non-GCC GNULD
|
||||
sys.platform = 'bar'
|
||||
|
||||
def gcv(v):
|
||||
if v == 'CC':
|
||||
return 'cc'
|
||||
elif v == 'GNULD':
|
||||
return 'yes'
|
||||
|
||||
sysconfig.get_config_var = gcv
|
||||
assert self.cc.rpath_foo() == consolidate_linker_args([
|
||||
'-Wl,--enable-new-dtags',
|
||||
'-Wl,-rpath,/foo',
|
||||
])
|
||||
|
||||
# non-GCC non-GNULD
|
||||
sys.platform = 'bar'
|
||||
|
||||
def gcv(v):
|
||||
if v == 'CC':
|
||||
return 'cc'
|
||||
elif v == 'GNULD':
|
||||
return 'no'
|
||||
|
||||
sysconfig.get_config_var = gcv
|
||||
assert self.cc.rpath_foo() == '-Wl,-R/foo'
|
||||
|
||||
@pytest.mark.skipif('platform.system == "Windows"')
|
||||
def test_cc_overrides_ldshared(self):
|
||||
# Issue #18080:
|
||||
# ensure that setting CC env variable also changes default linker
|
||||
def gcv(v):
|
||||
if v == 'LDSHARED':
|
||||
return 'gcc-4.2 -bundle -undefined dynamic_lookup '
|
||||
return 'gcc-4.2'
|
||||
|
||||
def gcvs(*args, _orig=sysconfig.get_config_vars):
|
||||
if args:
|
||||
return list(map(sysconfig.get_config_var, args))
|
||||
return _orig()
|
||||
|
||||
sysconfig.get_config_var = gcv
|
||||
sysconfig.get_config_vars = gcvs
|
||||
with EnvironmentVarGuard() as env:
|
||||
env['CC'] = 'my_cc'
|
||||
del env['LDSHARED']
|
||||
sysconfig.customize_compiler(self.cc)
|
||||
assert self.cc.linker_so[0] == 'my_cc'
|
||||
|
||||
@pytest.mark.skipif('platform.system == "Windows"')
|
||||
@pytest.mark.usefixtures('disable_macos_customization')
|
||||
def test_cc_overrides_ldshared_for_cxx_correctly(self):
|
||||
"""
|
||||
Ensure that setting CC env variable also changes default linker
|
||||
correctly when building C++ extensions.
|
||||
|
||||
pypa/distutils#126
|
||||
"""
|
||||
|
||||
def gcv(v):
|
||||
if v == 'LDSHARED':
|
||||
return 'gcc-4.2 -bundle -undefined dynamic_lookup '
|
||||
elif v == 'LDCXXSHARED':
|
||||
return 'g++-4.2 -bundle -undefined dynamic_lookup '
|
||||
elif v == 'CXX':
|
||||
return 'g++-4.2'
|
||||
elif v == 'CC':
|
||||
return 'gcc-4.2'
|
||||
return ''
|
||||
|
||||
def gcvs(*args, _orig=sysconfig.get_config_vars):
|
||||
if args:
|
||||
return list(map(sysconfig.get_config_var, args))
|
||||
return _orig()
|
||||
|
||||
sysconfig.get_config_var = gcv
|
||||
sysconfig.get_config_vars = gcvs
|
||||
with mock.patch.object(
|
||||
self.cc, 'spawn', return_value=None
|
||||
) as mock_spawn, mock.patch.object(
|
||||
self.cc, '_need_link', return_value=True
|
||||
), mock.patch.object(
|
||||
self.cc, 'mkpath', return_value=None
|
||||
), EnvironmentVarGuard() as env:
|
||||
env['CC'] = 'ccache my_cc'
|
||||
env['CXX'] = 'my_cxx'
|
||||
del env['LDSHARED']
|
||||
sysconfig.customize_compiler(self.cc)
|
||||
assert self.cc.linker_so[0:2] == ['ccache', 'my_cc']
|
||||
self.cc.link(None, [], 'a.out', target_lang='c++')
|
||||
call_args = mock_spawn.call_args[0][0]
|
||||
expected = ['my_cxx', '-bundle', '-undefined', 'dynamic_lookup']
|
||||
assert call_args[:4] == expected
|
||||
|
||||
@pytest.mark.skipif('platform.system == "Windows"')
|
||||
def test_explicit_ldshared(self):
|
||||
# Issue #18080:
|
||||
# ensure that setting CC env variable does not change
|
||||
# explicit LDSHARED setting for linker
|
||||
def gcv(v):
|
||||
if v == 'LDSHARED':
|
||||
return 'gcc-4.2 -bundle -undefined dynamic_lookup '
|
||||
return 'gcc-4.2'
|
||||
|
||||
def gcvs(*args, _orig=sysconfig.get_config_vars):
|
||||
if args:
|
||||
return list(map(sysconfig.get_config_var, args))
|
||||
return _orig()
|
||||
|
||||
sysconfig.get_config_var = gcv
|
||||
sysconfig.get_config_vars = gcvs
|
||||
with EnvironmentVarGuard() as env:
|
||||
env['CC'] = 'my_cc'
|
||||
env['LDSHARED'] = 'my_ld -bundle -dynamic'
|
||||
sysconfig.customize_compiler(self.cc)
|
||||
assert self.cc.linker_so[0] == 'my_ld'
|
||||
|
||||
def test_has_function(self):
|
||||
# Issue https://github.com/pypa/distutils/issues/64:
|
||||
# ensure that setting output_dir does not raise
|
||||
# FileNotFoundError: [Errno 2] No such file or directory: 'a.out'
|
||||
self.cc.output_dir = 'scratch'
|
||||
os.chdir(self.mkdtemp())
|
||||
self.cc.has_function('abort')
|
||||
|
||||
def test_find_library_file(self, monkeypatch):
|
||||
compiler = UnixCCompiler()
|
||||
compiler._library_root = lambda dir: dir
|
||||
monkeypatch.setattr(os.path, 'exists', lambda d: 'existing' in d)
|
||||
|
||||
libname = 'libabc.dylib' if sys.platform != 'cygwin' else 'cygabc.dll'
|
||||
dirs = ('/foo/bar/missing', '/foo/bar/existing')
|
||||
assert (
|
||||
compiler.find_library_file(dirs, 'abc').replace('\\', '/')
|
||||
== f'/foo/bar/existing/{libname}'
|
||||
)
|
||||
assert (
|
||||
compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
|
||||
== f'/foo/bar/existing/{libname}'
|
||||
)
|
||||
|
||||
monkeypatch.setattr(
|
||||
os.path,
|
||||
'exists',
|
||||
lambda d: 'existing' in d and '.a' in d and '.dll.a' not in d,
|
||||
)
|
||||
assert (
|
||||
compiler.find_library_file(dirs, 'abc').replace('\\', '/')
|
||||
== '/foo/bar/existing/libabc.a'
|
||||
)
|
||||
assert (
|
||||
compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/')
|
||||
== '/foo/bar/existing/libabc.a'
|
||||
)
|
||||
@@ -0,0 +1,243 @@
|
||||
"""Tests for distutils.util."""
|
||||
|
||||
import email
|
||||
import email.generator
|
||||
import email.policy
|
||||
import io
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import sysconfig as stdlib_sysconfig
|
||||
import unittest.mock as mock
|
||||
from copy import copy
|
||||
from distutils import sysconfig, util
|
||||
from distutils.errors import DistutilsByteCompileError, DistutilsPlatformError
|
||||
from distutils.util import (
|
||||
byte_compile,
|
||||
change_root,
|
||||
check_environ,
|
||||
convert_path,
|
||||
get_host_platform,
|
||||
get_platform,
|
||||
grok_environment_error,
|
||||
rfc822_escape,
|
||||
split_quoted,
|
||||
strtobool,
|
||||
)
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def environment(monkeypatch):
|
||||
monkeypatch.setattr(os, 'name', os.name)
|
||||
monkeypatch.setattr(sys, 'platform', sys.platform)
|
||||
monkeypatch.setattr(sys, 'version', sys.version)
|
||||
monkeypatch.setattr(os, 'sep', os.sep)
|
||||
monkeypatch.setattr(os.path, 'join', os.path.join)
|
||||
monkeypatch.setattr(os.path, 'isabs', os.path.isabs)
|
||||
monkeypatch.setattr(os.path, 'splitdrive', os.path.splitdrive)
|
||||
monkeypatch.setattr(sysconfig, '_config_vars', copy(sysconfig._config_vars))
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('save_env')
|
||||
class TestUtil:
|
||||
def test_get_host_platform(self):
|
||||
with mock.patch('os.name', 'nt'):
|
||||
with mock.patch('sys.version', '... [... (ARM64)]'):
|
||||
assert get_host_platform() == 'win-arm64'
|
||||
with mock.patch('sys.version', '... [... (ARM)]'):
|
||||
assert get_host_platform() == 'win-arm32'
|
||||
|
||||
with mock.patch('sys.version_info', (3, 9, 0, 'final', 0)):
|
||||
assert get_host_platform() == stdlib_sysconfig.get_platform()
|
||||
|
||||
def test_get_platform(self):
|
||||
with mock.patch('os.name', 'nt'):
|
||||
with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x86'}):
|
||||
assert get_platform() == 'win32'
|
||||
with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x64'}):
|
||||
assert get_platform() == 'win-amd64'
|
||||
with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm'}):
|
||||
assert get_platform() == 'win-arm32'
|
||||
with mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm64'}):
|
||||
assert get_platform() == 'win-arm64'
|
||||
|
||||
def test_convert_path(self):
|
||||
expected = os.sep.join(('', 'home', 'to', 'my', 'stuff'))
|
||||
assert convert_path('/home/to/my/stuff') == expected
|
||||
assert convert_path(pathlib.Path('/home/to/my/stuff')) == expected
|
||||
assert convert_path('.') == os.curdir
|
||||
|
||||
def test_change_root(self):
|
||||
# linux/mac
|
||||
os.name = 'posix'
|
||||
|
||||
def _isabs(path):
|
||||
return path[0] == '/'
|
||||
|
||||
os.path.isabs = _isabs
|
||||
|
||||
def _join(*path):
|
||||
return '/'.join(path)
|
||||
|
||||
os.path.join = _join
|
||||
|
||||
assert change_root('/root', '/old/its/here') == '/root/old/its/here'
|
||||
assert change_root('/root', 'its/here') == '/root/its/here'
|
||||
|
||||
# windows
|
||||
os.name = 'nt'
|
||||
os.sep = '\\'
|
||||
|
||||
def _isabs(path):
|
||||
return path.startswith('c:\\')
|
||||
|
||||
os.path.isabs = _isabs
|
||||
|
||||
def _splitdrive(path):
|
||||
if path.startswith('c:'):
|
||||
return ('', path.replace('c:', ''))
|
||||
return ('', path)
|
||||
|
||||
os.path.splitdrive = _splitdrive
|
||||
|
||||
def _join(*path):
|
||||
return '\\'.join(path)
|
||||
|
||||
os.path.join = _join
|
||||
|
||||
assert (
|
||||
change_root('c:\\root', 'c:\\old\\its\\here') == 'c:\\root\\old\\its\\here'
|
||||
)
|
||||
assert change_root('c:\\root', 'its\\here') == 'c:\\root\\its\\here'
|
||||
|
||||
# BugsBunny os (it's a great os)
|
||||
os.name = 'BugsBunny'
|
||||
with pytest.raises(DistutilsPlatformError):
|
||||
change_root('c:\\root', 'its\\here')
|
||||
|
||||
# XXX platforms to be covered: mac
|
||||
|
||||
def test_check_environ(self):
|
||||
util.check_environ.cache_clear()
|
||||
os.environ.pop('HOME', None)
|
||||
|
||||
check_environ()
|
||||
|
||||
assert os.environ['PLAT'] == get_platform()
|
||||
|
||||
@pytest.mark.skipif("os.name != 'posix'")
|
||||
def test_check_environ_getpwuid(self):
|
||||
util.check_environ.cache_clear()
|
||||
os.environ.pop('HOME', None)
|
||||
|
||||
import pwd
|
||||
|
||||
# only set pw_dir field, other fields are not used
|
||||
result = pwd.struct_passwd((
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
'/home/distutils',
|
||||
None,
|
||||
))
|
||||
with mock.patch.object(pwd, 'getpwuid', return_value=result):
|
||||
check_environ()
|
||||
assert os.environ['HOME'] == '/home/distutils'
|
||||
|
||||
util.check_environ.cache_clear()
|
||||
os.environ.pop('HOME', None)
|
||||
|
||||
# bpo-10496: Catch pwd.getpwuid() error
|
||||
with mock.patch.object(pwd, 'getpwuid', side_effect=KeyError):
|
||||
check_environ()
|
||||
assert 'HOME' not in os.environ
|
||||
|
||||
def test_split_quoted(self):
|
||||
assert split_quoted('""one"" "two" \'three\' \\four') == [
|
||||
'one',
|
||||
'two',
|
||||
'three',
|
||||
'four',
|
||||
]
|
||||
|
||||
def test_strtobool(self):
|
||||
yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1')
|
||||
no = ('n', 'no', 'f', 'false', 'off', '0', 'Off', 'No', 'N')
|
||||
|
||||
for y in yes:
|
||||
assert strtobool(y)
|
||||
|
||||
for n in no:
|
||||
assert not strtobool(n)
|
||||
|
||||
indent = 8 * ' '
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"given,wanted",
|
||||
[
|
||||
# 0x0b, 0x0c, ..., etc are also considered a line break by Python
|
||||
("hello\x0b\nworld\n", f"hello\x0b{indent}\n{indent}world\n{indent}"),
|
||||
("hello\x1eworld", f"hello\x1e{indent}world"),
|
||||
("", ""),
|
||||
(
|
||||
"I am a\npoor\nlonesome\nheader\n",
|
||||
f"I am a\n{indent}poor\n{indent}lonesome\n{indent}header\n{indent}",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_rfc822_escape(self, given, wanted):
|
||||
"""
|
||||
We want to ensure a multi-line header parses correctly.
|
||||
|
||||
For interoperability, the escaped value should also "round-trip" over
|
||||
`email.generator.Generator.flatten` and `email.message_from_*`
|
||||
(see pypa/setuptools#4033).
|
||||
|
||||
The main issue is that internally `email.policy.EmailPolicy` uses
|
||||
`splitlines` which will split on some control chars. If all the new lines
|
||||
are not prefixed with spaces, the parser will interrupt reading
|
||||
the current header and produce an incomplete value, while
|
||||
incorrectly interpreting the rest of the headers as part of the payload.
|
||||
"""
|
||||
res = rfc822_escape(given)
|
||||
|
||||
policy = email.policy.EmailPolicy(
|
||||
utf8=True,
|
||||
mangle_from_=False,
|
||||
max_line_length=0,
|
||||
)
|
||||
with io.StringIO() as buffer:
|
||||
raw = f"header: {res}\nother-header: 42\n\npayload\n"
|
||||
orig = email.message_from_string(raw)
|
||||
email.generator.Generator(buffer, policy=policy).flatten(orig)
|
||||
buffer.seek(0)
|
||||
regen = email.message_from_file(buffer)
|
||||
|
||||
for msg in (orig, regen):
|
||||
assert msg.get_payload() == "payload\n"
|
||||
assert msg["other-header"] == "42"
|
||||
# Generator may replace control chars with `\n`
|
||||
assert set(msg["header"].splitlines()) == set(res.splitlines())
|
||||
|
||||
assert res == wanted
|
||||
|
||||
def test_dont_write_bytecode(self):
|
||||
# makes sure byte_compile raise a DistutilsError
|
||||
# if sys.dont_write_bytecode is True
|
||||
old_dont_write_bytecode = sys.dont_write_bytecode
|
||||
sys.dont_write_bytecode = True
|
||||
try:
|
||||
with pytest.raises(DistutilsByteCompileError):
|
||||
byte_compile([])
|
||||
finally:
|
||||
sys.dont_write_bytecode = old_dont_write_bytecode
|
||||
|
||||
def test_grok_environment_error(self):
|
||||
# test obsolete function to ensure backward compat (#4931)
|
||||
exc = OSError("Unable to find batch file")
|
||||
msg = grok_environment_error(exc)
|
||||
assert msg == "error: Unable to find batch file"
|
||||
@@ -0,0 +1,80 @@
|
||||
"""Tests for distutils.version."""
|
||||
|
||||
import distutils
|
||||
from distutils.version import LooseVersion, StrictVersion
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def suppress_deprecation():
|
||||
with distutils.version.suppress_known_deprecation():
|
||||
yield
|
||||
|
||||
|
||||
class TestVersion:
|
||||
def test_prerelease(self):
|
||||
version = StrictVersion('1.2.3a1')
|
||||
assert version.version == (1, 2, 3)
|
||||
assert version.prerelease == ('a', 1)
|
||||
assert str(version) == '1.2.3a1'
|
||||
|
||||
version = StrictVersion('1.2.0')
|
||||
assert str(version) == '1.2'
|
||||
|
||||
def test_cmp_strict(self):
|
||||
versions = (
|
||||
('1.5.1', '1.5.2b2', -1),
|
||||
('161', '3.10a', ValueError),
|
||||
('8.02', '8.02', 0),
|
||||
('3.4j', '1996.07.12', ValueError),
|
||||
('3.2.pl0', '3.1.1.6', ValueError),
|
||||
('2g6', '11g', ValueError),
|
||||
('0.9', '2.2', -1),
|
||||
('1.2.1', '1.2', 1),
|
||||
('1.1', '1.2.2', -1),
|
||||
('1.2', '1.1', 1),
|
||||
('1.2.1', '1.2.2', -1),
|
||||
('1.2.2', '1.2', 1),
|
||||
('1.2', '1.2.2', -1),
|
||||
('0.4.0', '0.4', 0),
|
||||
('1.13++', '5.5.kw', ValueError),
|
||||
)
|
||||
|
||||
for v1, v2, wanted in versions:
|
||||
try:
|
||||
res = StrictVersion(v1)._cmp(StrictVersion(v2))
|
||||
except ValueError:
|
||||
if wanted is ValueError:
|
||||
continue
|
||||
else:
|
||||
raise AssertionError(f"cmp({v1}, {v2}) shouldn't raise ValueError")
|
||||
assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
|
||||
res = StrictVersion(v1)._cmp(v2)
|
||||
assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
|
||||
res = StrictVersion(v1)._cmp(object())
|
||||
assert (
|
||||
res is NotImplemented
|
||||
), f'cmp({v1}, {v2}) should be NotImplemented, got {res}'
|
||||
|
||||
def test_cmp(self):
|
||||
versions = (
|
||||
('1.5.1', '1.5.2b2', -1),
|
||||
('161', '3.10a', 1),
|
||||
('8.02', '8.02', 0),
|
||||
('3.4j', '1996.07.12', -1),
|
||||
('3.2.pl0', '3.1.1.6', 1),
|
||||
('2g6', '11g', -1),
|
||||
('0.960923', '2.2beta29', -1),
|
||||
('1.13++', '5.5.kw', -1),
|
||||
)
|
||||
|
||||
for v1, v2, wanted in versions:
|
||||
res = LooseVersion(v1)._cmp(LooseVersion(v2))
|
||||
assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
|
||||
res = LooseVersion(v1)._cmp(v2)
|
||||
assert res == wanted, f'cmp({v1}, {v2}) should be {wanted}, got {res}'
|
||||
res = LooseVersion(v1)._cmp(object())
|
||||
assert (
|
||||
res is NotImplemented
|
||||
), f'cmp({v1}, {v2}) should be NotImplemented, got {res}'
|
||||
@@ -0,0 +1,17 @@
|
||||
import sys
|
||||
|
||||
try:
|
||||
import grp
|
||||
import pwd
|
||||
except ImportError:
|
||||
grp = pwd = None
|
||||
|
||||
import pytest
|
||||
|
||||
UNIX_ID_SUPPORT = grp and pwd
|
||||
UID_0_SUPPORT = UNIX_ID_SUPPORT and sys.platform != "cygwin"
|
||||
|
||||
require_unix_id = pytest.mark.skipif(
|
||||
not UNIX_ID_SUPPORT, reason="Requires grp and pwd support"
|
||||
)
|
||||
require_uid_0 = pytest.mark.skipif(not UID_0_SUPPORT, reason="Requires UID 0 support")
|
||||
Reference in New Issue
Block a user